mirror of
https://github.com/QwenLM/qwen-code.git
synced 2025-12-21 09:17:53 +00:00
pre-release commit
This commit is contained in:
75
packages/cli/src/config/auth.test.ts
Normal file
75
packages/cli/src/config/auth.test.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { AuthType } from '@qwen/qwen-code-core';
|
||||
import { vi } from 'vitest';
|
||||
import { validateAuthMethod } from './auth.js';
|
||||
|
||||
vi.mock('./settings.js', () => ({
|
||||
loadEnvironment: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('validateAuthMethod', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
process.env = {};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
it('should return null for LOGIN_WITH_GOOGLE', () => {
|
||||
expect(validateAuthMethod(AuthType.LOGIN_WITH_GOOGLE)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for CLOUD_SHELL', () => {
|
||||
expect(validateAuthMethod(AuthType.CLOUD_SHELL)).toBeNull();
|
||||
});
|
||||
|
||||
describe('USE_GEMINI', () => {
|
||||
it('should return null if GEMINI_API_KEY is set', () => {
|
||||
process.env.GEMINI_API_KEY = 'test-key';
|
||||
expect(validateAuthMethod(AuthType.USE_GEMINI)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return an error message if GEMINI_API_KEY is not set', () => {
|
||||
expect(validateAuthMethod(AuthType.USE_GEMINI)).toBe(
|
||||
'GEMINI_API_KEY environment variable not found. Add that to your environment and try again (no reload needed if using .env)!',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('USE_VERTEX_AI', () => {
|
||||
it('should return null if GOOGLE_CLOUD_PROJECT and GOOGLE_CLOUD_LOCATION are set', () => {
|
||||
process.env.GOOGLE_CLOUD_PROJECT = 'test-project';
|
||||
process.env.GOOGLE_CLOUD_LOCATION = 'test-location';
|
||||
expect(validateAuthMethod(AuthType.USE_VERTEX_AI)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null if GOOGLE_API_KEY is set', () => {
|
||||
process.env.GOOGLE_API_KEY = 'test-api-key';
|
||||
expect(validateAuthMethod(AuthType.USE_VERTEX_AI)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return an error message if no required environment variables are set', () => {
|
||||
expect(validateAuthMethod(AuthType.USE_VERTEX_AI)).toBe(
|
||||
'When using Vertex AI, you must specify either:\n' +
|
||||
'• GOOGLE_CLOUD_PROJECT and GOOGLE_CLOUD_LOCATION environment variables.\n' +
|
||||
'• GOOGLE_API_KEY environment variable (if using express mode).\n' +
|
||||
'Update your environment and try again (no reload needed if using .env)!',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should return an error message for an invalid auth method', () => {
|
||||
expect(validateAuthMethod('invalid-method')).toBe(
|
||||
'Invalid auth method selected.',
|
||||
);
|
||||
});
|
||||
});
|
||||
61
packages/cli/src/config/auth.ts
Normal file
61
packages/cli/src/config/auth.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { AuthType } from '@qwen/qwen-code-core';
|
||||
import { loadEnvironment } from './settings.js';
|
||||
|
||||
export const validateAuthMethod = (authMethod: string): string | null => {
|
||||
loadEnvironment();
|
||||
if (
|
||||
authMethod === AuthType.LOGIN_WITH_GOOGLE ||
|
||||
authMethod === AuthType.CLOUD_SHELL
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (authMethod === AuthType.USE_GEMINI) {
|
||||
if (!process.env.GEMINI_API_KEY) {
|
||||
return 'GEMINI_API_KEY environment variable not found. Add that to your environment and try again (no reload needed if using .env)!';
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if (authMethod === AuthType.USE_VERTEX_AI) {
|
||||
const hasVertexProjectLocationConfig =
|
||||
!!process.env.GOOGLE_CLOUD_PROJECT && !!process.env.GOOGLE_CLOUD_LOCATION;
|
||||
const hasGoogleApiKey = !!process.env.GOOGLE_API_KEY;
|
||||
if (!hasVertexProjectLocationConfig && !hasGoogleApiKey) {
|
||||
return (
|
||||
'When using Vertex AI, you must specify either:\n' +
|
||||
'• GOOGLE_CLOUD_PROJECT and GOOGLE_CLOUD_LOCATION environment variables.\n' +
|
||||
'• GOOGLE_API_KEY environment variable (if using express mode).\n' +
|
||||
'Update your environment and try again (no reload needed if using .env)!'
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if (authMethod === AuthType.USE_OPENAI) {
|
||||
if (!process.env.OPENAI_API_KEY) {
|
||||
return 'OPENAI_API_KEY environment variable not found. You can enter it interactively or add it to your .env file.';
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
return 'Invalid auth method selected.';
|
||||
};
|
||||
|
||||
export const setOpenAIApiKey = (apiKey: string): void => {
|
||||
process.env.OPENAI_API_KEY = apiKey;
|
||||
};
|
||||
|
||||
export const setOpenAIBaseUrl = (baseUrl: string): void => {
|
||||
process.env.OPENAI_BASE_URL = baseUrl;
|
||||
};
|
||||
|
||||
export const setOpenAIModel = (model: string): void => {
|
||||
process.env.OPENAI_MODEL = model;
|
||||
};
|
||||
243
packages/cli/src/config/config.integration.test.ts
Normal file
243
packages/cli/src/config/config.integration.test.ts
Normal file
@@ -0,0 +1,243 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { tmpdir } from 'os';
|
||||
import {
|
||||
Config,
|
||||
ConfigParameters,
|
||||
ContentGeneratorConfig,
|
||||
} from '@qwen/qwen-code-core';
|
||||
|
||||
const TEST_CONTENT_GENERATOR_CONFIG: ContentGeneratorConfig = {
|
||||
apiKey: 'test-key',
|
||||
model: 'test-model',
|
||||
userAgent: 'test-agent',
|
||||
};
|
||||
|
||||
// Mock file discovery service and tool registry
|
||||
vi.mock('@qwen/qwen-code-core', async () => {
|
||||
const actual = await vi.importActual('@qwen/qwen-code-core');
|
||||
return {
|
||||
...actual,
|
||||
FileDiscoveryService: vi.fn().mockImplementation(() => ({
|
||||
initialize: vi.fn(),
|
||||
})),
|
||||
createToolRegistry: vi.fn().mockResolvedValue({}),
|
||||
};
|
||||
});
|
||||
|
||||
describe('Configuration Integration Tests', () => {
|
||||
let tempDir: string;
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = fs.mkdtempSync(path.join(tmpdir(), 'gemini-cli-test-'));
|
||||
originalEnv = { ...process.env };
|
||||
process.env.GEMINI_API_KEY = 'test-api-key';
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
if (fs.existsSync(tempDir)) {
|
||||
fs.rmSync(tempDir, { recursive: true });
|
||||
}
|
||||
});
|
||||
|
||||
describe('File Filtering Configuration', () => {
|
||||
it('should load default file filtering settings', async () => {
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
fileFilteringRespectGitIgnore: undefined, // Should default to true
|
||||
};
|
||||
|
||||
const config = new Config(configParams);
|
||||
|
||||
expect(config.getFileFilteringRespectGitIgnore()).toBe(true);
|
||||
});
|
||||
|
||||
it('should load custom file filtering settings from configuration', async () => {
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
fileFiltering: {
|
||||
respectGitIgnore: false,
|
||||
},
|
||||
};
|
||||
|
||||
const config = new Config(configParams);
|
||||
|
||||
expect(config.getFileFilteringRespectGitIgnore()).toBe(false);
|
||||
});
|
||||
|
||||
it('should merge user and workspace file filtering settings', async () => {
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
fileFilteringRespectGitIgnore: true,
|
||||
};
|
||||
|
||||
const config = new Config(configParams);
|
||||
|
||||
expect(config.getFileFilteringRespectGitIgnore()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Configuration Integration', () => {
|
||||
it('should handle partial configuration objects gracefully', async () => {
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
fileFiltering: {
|
||||
respectGitIgnore: false,
|
||||
},
|
||||
};
|
||||
|
||||
const config = new Config(configParams);
|
||||
|
||||
// Specified settings should be applied
|
||||
expect(config.getFileFilteringRespectGitIgnore()).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle empty configuration objects gracefully', async () => {
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
fileFilteringRespectGitIgnore: undefined,
|
||||
};
|
||||
|
||||
const config = new Config(configParams);
|
||||
|
||||
// All settings should use defaults
|
||||
expect(config.getFileFilteringRespectGitIgnore()).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle missing configuration sections gracefully', async () => {
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
// Missing fileFiltering configuration
|
||||
};
|
||||
|
||||
const config = new Config(configParams);
|
||||
|
||||
// All git-aware settings should use defaults
|
||||
expect(config.getFileFilteringRespectGitIgnore()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Real-world Configuration Scenarios', () => {
|
||||
it('should handle a security-focused configuration', async () => {
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
fileFilteringRespectGitIgnore: true,
|
||||
};
|
||||
|
||||
const config = new Config(configParams);
|
||||
|
||||
expect(config.getFileFilteringRespectGitIgnore()).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle a CI/CD environment configuration', async () => {
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
fileFiltering: {
|
||||
respectGitIgnore: false,
|
||||
}, // CI might need to see all files
|
||||
};
|
||||
|
||||
const config = new Config(configParams);
|
||||
|
||||
expect(config.getFileFilteringRespectGitIgnore()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Checkpointing Configuration', () => {
|
||||
it('should enable checkpointing when the setting is true', async () => {
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
checkpointing: true,
|
||||
};
|
||||
|
||||
const config = new Config(configParams);
|
||||
|
||||
expect(config.getCheckpointingEnabled()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Extension Context Files', () => {
|
||||
it('should have an empty array for extension context files by default', () => {
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
};
|
||||
const config = new Config(configParams);
|
||||
expect(config.getExtensionContextFilePaths()).toEqual([]);
|
||||
});
|
||||
|
||||
it('should correctly store and return extension context file paths', () => {
|
||||
const contextFiles = ['/path/to/file1.txt', '/path/to/file2.js'];
|
||||
const configParams: ConfigParameters = {
|
||||
cwd: '/tmp',
|
||||
contentGeneratorConfig: TEST_CONTENT_GENERATOR_CONFIG,
|
||||
embeddingModel: 'test-embedding-model',
|
||||
sandbox: false,
|
||||
targetDir: tempDir,
|
||||
debugMode: false,
|
||||
extensionContextFilePaths: contextFiles,
|
||||
};
|
||||
const config = new Config(configParams);
|
||||
expect(config.getExtensionContextFilePaths()).toEqual(contextFiles);
|
||||
});
|
||||
});
|
||||
});
|
||||
1009
packages/cli/src/config/config.test.ts
Normal file
1009
packages/cli/src/config/config.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
429
packages/cli/src/config/config.ts
Normal file
429
packages/cli/src/config/config.ts
Normal file
@@ -0,0 +1,429 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import yargs from 'yargs/yargs';
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import process from 'node:process';
|
||||
import {
|
||||
Config,
|
||||
loadServerHierarchicalMemory,
|
||||
setGeminiMdFilename as setServerGeminiMdFilename,
|
||||
getCurrentGeminiMdFilename,
|
||||
ApprovalMode,
|
||||
DEFAULT_GEMINI_MODEL,
|
||||
DEFAULT_GEMINI_EMBEDDING_MODEL,
|
||||
FileDiscoveryService,
|
||||
TelemetryTarget,
|
||||
MCPServerConfig,
|
||||
} from '@qwen/qwen-code-core';
|
||||
import { Settings } from './settings.js';
|
||||
|
||||
import { Extension, filterActiveExtensions } from './extension.js';
|
||||
import { getCliVersion } from '../utils/version.js';
|
||||
import { loadSandboxConfig } from './sandboxConfig.js';
|
||||
|
||||
// Simple console logger for now - replace with actual logger if available
|
||||
const logger = {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
debug: (...args: any[]) => console.debug('[DEBUG]', ...args),
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
warn: (...args: any[]) => console.warn('[WARN]', ...args),
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
error: (...args: any[]) => console.error('[ERROR]', ...args),
|
||||
};
|
||||
|
||||
export interface CliArgs {
|
||||
model: string | undefined;
|
||||
sandbox: boolean | string | undefined;
|
||||
sandboxImage: string | undefined;
|
||||
debug: boolean | undefined;
|
||||
prompt: string | undefined;
|
||||
promptInteractive: string | undefined;
|
||||
allFiles: boolean | undefined;
|
||||
all_files: boolean | undefined;
|
||||
showMemoryUsage: boolean | undefined;
|
||||
show_memory_usage: boolean | undefined;
|
||||
yolo: boolean | undefined;
|
||||
telemetry: boolean | undefined;
|
||||
checkpointing: boolean | undefined;
|
||||
telemetryTarget: string | undefined;
|
||||
telemetryOtlpEndpoint: string | undefined;
|
||||
telemetryLogPrompts: boolean | undefined;
|
||||
allowedMcpServerNames: string[] | undefined;
|
||||
extensions: string[] | undefined;
|
||||
listExtensions: boolean | undefined;
|
||||
ideMode: boolean | undefined;
|
||||
openaiLogging: boolean | undefined;
|
||||
openaiApiKey: string | undefined;
|
||||
openaiBaseUrl: string | undefined;
|
||||
}
|
||||
|
||||
export async function parseArguments(): Promise<CliArgs> {
|
||||
const yargsInstance = yargs(hideBin(process.argv))
|
||||
.scriptName('qwen')
|
||||
.usage(
|
||||
'$0 [options]',
|
||||
'Qwen Code - Launch an interactive CLI, use -p/--prompt for non-interactive mode',
|
||||
)
|
||||
.option('model', {
|
||||
alias: 'm',
|
||||
type: 'string',
|
||||
description: `Model`,
|
||||
default: process.env.GEMINI_MODEL || DEFAULT_GEMINI_MODEL,
|
||||
})
|
||||
.option('prompt', {
|
||||
alias: 'p',
|
||||
type: 'string',
|
||||
description: 'Prompt. Appended to input on stdin (if any).',
|
||||
})
|
||||
.option('prompt-interactive', {
|
||||
alias: 'i',
|
||||
type: 'string',
|
||||
description:
|
||||
'Execute the provided prompt and continue in interactive mode',
|
||||
})
|
||||
.option('sandbox', {
|
||||
alias: 's',
|
||||
type: 'boolean',
|
||||
description: 'Run in sandbox?',
|
||||
})
|
||||
.option('sandbox-image', {
|
||||
type: 'string',
|
||||
description: 'Sandbox image URI.',
|
||||
})
|
||||
.option('debug', {
|
||||
alias: 'd',
|
||||
type: 'boolean',
|
||||
description: 'Run in debug mode?',
|
||||
default: false,
|
||||
})
|
||||
.option('all-files', {
|
||||
alias: ['a'],
|
||||
type: 'boolean',
|
||||
description: 'Include ALL files in context?',
|
||||
default: false,
|
||||
})
|
||||
.option('all_files', {
|
||||
type: 'boolean',
|
||||
description: 'Include ALL files in context?',
|
||||
default: false,
|
||||
})
|
||||
.deprecateOption(
|
||||
'all_files',
|
||||
'Use --all-files instead. We will be removing --all_files in the coming weeks.',
|
||||
)
|
||||
.option('show-memory-usage', {
|
||||
type: 'boolean',
|
||||
description: 'Show memory usage in status bar',
|
||||
default: false,
|
||||
})
|
||||
.option('show_memory_usage', {
|
||||
type: 'boolean',
|
||||
description: 'Show memory usage in status bar',
|
||||
default: false,
|
||||
})
|
||||
.deprecateOption(
|
||||
'show_memory_usage',
|
||||
'Use --show-memory-usage instead. We will be removing --show_memory_usage in the coming weeks.',
|
||||
)
|
||||
.option('yolo', {
|
||||
alias: 'y',
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Automatically accept all actions (aka YOLO mode, see https://www.youtube.com/watch?v=xvFZjo5PgG0 for more details)?',
|
||||
default: false,
|
||||
})
|
||||
.option('telemetry', {
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Enable telemetry? This flag specifically controls if telemetry is sent. Other --telemetry-* flags set specific values but do not enable telemetry on their own.',
|
||||
})
|
||||
.option('telemetry-target', {
|
||||
type: 'string',
|
||||
choices: ['local', 'gcp'],
|
||||
description:
|
||||
'Set the telemetry target (local or gcp). Overrides settings files.',
|
||||
})
|
||||
.option('telemetry-otlp-endpoint', {
|
||||
type: 'string',
|
||||
description:
|
||||
'Set the OTLP endpoint for telemetry. Overrides environment variables and settings files.',
|
||||
})
|
||||
.option('telemetry-log-prompts', {
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Enable or disable logging of user prompts for telemetry. Overrides settings files.',
|
||||
})
|
||||
.option('checkpointing', {
|
||||
alias: 'c',
|
||||
type: 'boolean',
|
||||
description: 'Enables checkpointing of file edits',
|
||||
default: false,
|
||||
})
|
||||
.option('allowed-mcp-server-names', {
|
||||
type: 'array',
|
||||
string: true,
|
||||
description: 'Allowed MCP server names',
|
||||
})
|
||||
.option('extensions', {
|
||||
alias: 'e',
|
||||
type: 'array',
|
||||
string: true,
|
||||
description:
|
||||
'A list of extensions to use. If not provided, all extensions are used.',
|
||||
})
|
||||
.option('list-extensions', {
|
||||
alias: 'l',
|
||||
type: 'boolean',
|
||||
description: 'List all available extensions and exit.',
|
||||
})
|
||||
.option('ide-mode', {
|
||||
type: 'boolean',
|
||||
description: 'Run in IDE mode?',
|
||||
})
|
||||
.option('openai-logging', {
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Enable logging of OpenAI API calls for debugging and analysis',
|
||||
})
|
||||
.option('openai-api-key', {
|
||||
type: 'string',
|
||||
description: 'OpenAI API key to use for authentication',
|
||||
})
|
||||
.option('openai-base-url', {
|
||||
type: 'string',
|
||||
description: 'OpenAI base URL (for custom endpoints)',
|
||||
})
|
||||
|
||||
.version(await getCliVersion()) // This will enable the --version flag based on package.json
|
||||
.alias('v', 'version')
|
||||
.help()
|
||||
.alias('h', 'help')
|
||||
.strict()
|
||||
.check((argv) => {
|
||||
if (argv.prompt && argv.promptInteractive) {
|
||||
throw new Error(
|
||||
'Cannot use both --prompt (-p) and --prompt-interactive (-i) together',
|
||||
);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
yargsInstance.wrap(yargsInstance.terminalWidth());
|
||||
return yargsInstance.argv;
|
||||
}
|
||||
|
||||
// This function is now a thin wrapper around the server's implementation.
|
||||
// It's kept in the CLI for now as App.tsx directly calls it for memory refresh.
|
||||
// TODO: Consider if App.tsx should get memory via a server call or if Config should refresh itself.
|
||||
export async function loadHierarchicalGeminiMemory(
|
||||
currentWorkingDirectory: string,
|
||||
debugMode: boolean,
|
||||
fileService: FileDiscoveryService,
|
||||
extensionContextFilePaths: string[] = [],
|
||||
): Promise<{ memoryContent: string; fileCount: number }> {
|
||||
if (debugMode) {
|
||||
logger.debug(
|
||||
`CLI: Delegating hierarchical memory load to server for CWD: ${currentWorkingDirectory}`,
|
||||
);
|
||||
}
|
||||
// Directly call the server function.
|
||||
// The server function will use its own homedir() for the global path.
|
||||
return loadServerHierarchicalMemory(
|
||||
currentWorkingDirectory,
|
||||
debugMode,
|
||||
fileService,
|
||||
extensionContextFilePaths,
|
||||
);
|
||||
}
|
||||
|
||||
export async function loadCliConfig(
|
||||
settings: Settings,
|
||||
extensions: Extension[],
|
||||
sessionId: string,
|
||||
argv: CliArgs,
|
||||
): Promise<Config> {
|
||||
const debugMode =
|
||||
argv.debug ||
|
||||
[process.env.DEBUG, process.env.DEBUG_MODE].some(
|
||||
(v) => v === 'true' || v === '1',
|
||||
);
|
||||
|
||||
const ideMode =
|
||||
(argv.ideMode ?? settings.ideMode ?? false) &&
|
||||
process.env.TERM_PROGRAM === 'vscode' &&
|
||||
!process.env.SANDBOX;
|
||||
|
||||
const activeExtensions = filterActiveExtensions(
|
||||
extensions,
|
||||
argv.extensions || [],
|
||||
);
|
||||
|
||||
// Handle OpenAI API key from command line
|
||||
if (argv.openaiApiKey) {
|
||||
process.env.OPENAI_API_KEY = argv.openaiApiKey;
|
||||
}
|
||||
|
||||
// Handle OpenAI base URL from command line
|
||||
if (argv.openaiBaseUrl) {
|
||||
process.env.OPENAI_BASE_URL = argv.openaiBaseUrl;
|
||||
}
|
||||
|
||||
// Set the context filename in the server's memoryTool module BEFORE loading memory
|
||||
// TODO(b/343434939): This is a bit of a hack. The contextFileName should ideally be passed
|
||||
// directly to the Config constructor in core, and have core handle setGeminiMdFilename.
|
||||
// However, loadHierarchicalGeminiMemory is called *before* createServerConfig.
|
||||
if (settings.contextFileName) {
|
||||
setServerGeminiMdFilename(settings.contextFileName);
|
||||
} else {
|
||||
// Reset to default if not provided in settings.
|
||||
setServerGeminiMdFilename(getCurrentGeminiMdFilename());
|
||||
}
|
||||
|
||||
const extensionContextFilePaths = activeExtensions.flatMap(
|
||||
(e) => e.contextFiles,
|
||||
);
|
||||
|
||||
const fileService = new FileDiscoveryService(process.cwd());
|
||||
// Call the (now wrapper) loadHierarchicalGeminiMemory which calls the server's version
|
||||
const { memoryContent, fileCount } = await loadHierarchicalGeminiMemory(
|
||||
process.cwd(),
|
||||
debugMode,
|
||||
fileService,
|
||||
extensionContextFilePaths,
|
||||
);
|
||||
|
||||
let mcpServers = mergeMcpServers(settings, activeExtensions);
|
||||
const excludeTools = mergeExcludeTools(settings, activeExtensions);
|
||||
|
||||
if (argv.allowedMcpServerNames) {
|
||||
const allowedNames = new Set(argv.allowedMcpServerNames.filter(Boolean));
|
||||
if (allowedNames.size > 0) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(([key]) => allowedNames.has(key)),
|
||||
);
|
||||
} else {
|
||||
mcpServers = {};
|
||||
}
|
||||
}
|
||||
|
||||
if (ideMode) {
|
||||
mcpServers['_ide_server'] = new MCPServerConfig(
|
||||
undefined, // command
|
||||
undefined, // args
|
||||
undefined, // env
|
||||
undefined, // cwd
|
||||
undefined, // url
|
||||
'http://localhost:3000/mcp', // httpUrl
|
||||
undefined, // headers
|
||||
undefined, // tcp
|
||||
undefined, // timeout
|
||||
false, // trust
|
||||
'IDE connection', // description
|
||||
undefined, // includeTools
|
||||
undefined, // excludeTools
|
||||
);
|
||||
}
|
||||
|
||||
const sandboxConfig = await loadSandboxConfig(settings, argv);
|
||||
|
||||
return new Config({
|
||||
sessionId,
|
||||
embeddingModel: DEFAULT_GEMINI_EMBEDDING_MODEL,
|
||||
sandbox: sandboxConfig,
|
||||
targetDir: process.cwd(),
|
||||
debugMode,
|
||||
question: argv.promptInteractive || argv.prompt || '',
|
||||
fullContext: argv.allFiles || argv.all_files || false,
|
||||
coreTools: settings.coreTools || undefined,
|
||||
excludeTools,
|
||||
toolDiscoveryCommand: settings.toolDiscoveryCommand,
|
||||
toolCallCommand: settings.toolCallCommand,
|
||||
mcpServerCommand: settings.mcpServerCommand,
|
||||
mcpServers,
|
||||
userMemory: memoryContent,
|
||||
geminiMdFileCount: fileCount,
|
||||
approvalMode: argv.yolo || false ? ApprovalMode.YOLO : ApprovalMode.DEFAULT,
|
||||
showMemoryUsage:
|
||||
argv.showMemoryUsage ||
|
||||
argv.show_memory_usage ||
|
||||
settings.showMemoryUsage ||
|
||||
false,
|
||||
accessibility: settings.accessibility,
|
||||
telemetry: {
|
||||
enabled: argv.telemetry ?? settings.telemetry?.enabled,
|
||||
target: (argv.telemetryTarget ??
|
||||
settings.telemetry?.target) as TelemetryTarget,
|
||||
otlpEndpoint:
|
||||
argv.telemetryOtlpEndpoint ??
|
||||
process.env.OTEL_EXPORTER_OTLP_ENDPOINT ??
|
||||
settings.telemetry?.otlpEndpoint,
|
||||
logPrompts: argv.telemetryLogPrompts ?? settings.telemetry?.logPrompts,
|
||||
},
|
||||
usageStatisticsEnabled: settings.usageStatisticsEnabled ?? true,
|
||||
// Git-aware file filtering settings
|
||||
fileFiltering: {
|
||||
respectGitIgnore: settings.fileFiltering?.respectGitIgnore,
|
||||
enableRecursiveFileSearch:
|
||||
settings.fileFiltering?.enableRecursiveFileSearch,
|
||||
},
|
||||
checkpointing: argv.checkpointing || settings.checkpointing?.enabled,
|
||||
proxy:
|
||||
process.env.HTTPS_PROXY ||
|
||||
process.env.https_proxy ||
|
||||
process.env.HTTP_PROXY ||
|
||||
process.env.http_proxy,
|
||||
cwd: process.cwd(),
|
||||
fileDiscoveryService: fileService,
|
||||
bugCommand: settings.bugCommand,
|
||||
model: argv.model!,
|
||||
extensionContextFilePaths,
|
||||
maxSessionTurns: settings.maxSessionTurns ?? -1,
|
||||
listExtensions: argv.listExtensions || false,
|
||||
activeExtensions: activeExtensions.map((e) => ({
|
||||
name: e.config.name,
|
||||
version: e.config.version,
|
||||
})),
|
||||
noBrowser: !!process.env.NO_BROWSER,
|
||||
ideMode,
|
||||
enableOpenAILogging:
|
||||
(typeof argv.openaiLogging === 'undefined'
|
||||
? settings.enableOpenAILogging
|
||||
: argv.openaiLogging) ?? false,
|
||||
sampling_params: settings.sampling_params,
|
||||
});
|
||||
}
|
||||
|
||||
function mergeMcpServers(settings: Settings, extensions: Extension[]) {
|
||||
const mcpServers = { ...(settings.mcpServers || {}) };
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) {
|
||||
logger.warn(
|
||||
`Skipping extension MCP config for server with key "${key}" as it already exists.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
mcpServers[key] = server;
|
||||
},
|
||||
);
|
||||
}
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
function mergeExcludeTools(
|
||||
settings: Settings,
|
||||
extensions: Extension[],
|
||||
): string[] {
|
||||
const allExcludeTools = new Set(settings.excludeTools || []);
|
||||
for (const extension of extensions) {
|
||||
for (const tool of extension.config.excludeTools || []) {
|
||||
allExcludeTools.add(tool);
|
||||
}
|
||||
}
|
||||
return [...allExcludeTools];
|
||||
}
|
||||
151
packages/cli/src/config/extension.test.ts
Normal file
151
packages/cli/src/config/extension.test.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi } from 'vitest';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import {
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
EXTENSIONS_DIRECTORY_NAME,
|
||||
filterActiveExtensions,
|
||||
loadExtensions,
|
||||
} from './extension.js';
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const os = await importOriginal<typeof import('os')>();
|
||||
return {
|
||||
...os,
|
||||
homedir: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('loadExtensions', () => {
|
||||
let tempWorkspaceDir: string;
|
||||
let tempHomeDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempWorkspaceDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'gemini-cli-test-workspace-'),
|
||||
);
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
|
||||
);
|
||||
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should load context file path when GEMINI.md is present', () => {
|
||||
const workspaceExtensionsDir = path.join(
|
||||
tempWorkspaceDir,
|
||||
EXTENSIONS_DIRECTORY_NAME,
|
||||
);
|
||||
fs.mkdirSync(workspaceExtensionsDir, { recursive: true });
|
||||
createExtension(workspaceExtensionsDir, 'ext1', '1.0.0', true);
|
||||
createExtension(workspaceExtensionsDir, 'ext2', '2.0.0');
|
||||
|
||||
const extensions = loadExtensions(tempWorkspaceDir);
|
||||
|
||||
expect(extensions).toHaveLength(2);
|
||||
const ext1 = extensions.find((e) => e.config.name === 'ext1');
|
||||
const ext2 = extensions.find((e) => e.config.name === 'ext2');
|
||||
expect(ext1?.contextFiles).toEqual([
|
||||
path.join(workspaceExtensionsDir, 'ext1', 'QWEN.md'),
|
||||
]);
|
||||
expect(ext2?.contextFiles).toEqual([]);
|
||||
});
|
||||
|
||||
it('should load context file path from the extension config', () => {
|
||||
const workspaceExtensionsDir = path.join(
|
||||
tempWorkspaceDir,
|
||||
EXTENSIONS_DIRECTORY_NAME,
|
||||
);
|
||||
fs.mkdirSync(workspaceExtensionsDir, { recursive: true });
|
||||
createExtension(
|
||||
workspaceExtensionsDir,
|
||||
'ext1',
|
||||
'1.0.0',
|
||||
false,
|
||||
'my-context-file.md',
|
||||
);
|
||||
|
||||
const extensions = loadExtensions(tempWorkspaceDir);
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
const ext1 = extensions.find((e) => e.config.name === 'ext1');
|
||||
expect(ext1?.contextFiles).toEqual([
|
||||
path.join(workspaceExtensionsDir, 'ext1', 'my-context-file.md'),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('filterActiveExtensions', () => {
|
||||
const extensions = [
|
||||
{ config: { name: 'ext1', version: '1.0.0' }, contextFiles: [] },
|
||||
{ config: { name: 'ext2', version: '1.0.0' }, contextFiles: [] },
|
||||
{ config: { name: 'ext3', version: '1.0.0' }, contextFiles: [] },
|
||||
];
|
||||
|
||||
it('should return all extensions if no enabled extensions are provided', () => {
|
||||
const activeExtensions = filterActiveExtensions(extensions, []);
|
||||
expect(activeExtensions).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should return only the enabled extensions', () => {
|
||||
const activeExtensions = filterActiveExtensions(extensions, [
|
||||
'ext1',
|
||||
'ext3',
|
||||
]);
|
||||
expect(activeExtensions).toHaveLength(2);
|
||||
expect(activeExtensions.some((e) => e.config.name === 'ext1')).toBe(true);
|
||||
expect(activeExtensions.some((e) => e.config.name === 'ext3')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return no extensions when "none" is provided', () => {
|
||||
const activeExtensions = filterActiveExtensions(extensions, ['none']);
|
||||
expect(activeExtensions).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle case-insensitivity', () => {
|
||||
const activeExtensions = filterActiveExtensions(extensions, ['EXT1']);
|
||||
expect(activeExtensions).toHaveLength(1);
|
||||
expect(activeExtensions[0].config.name).toBe('ext1');
|
||||
});
|
||||
|
||||
it('should log an error for unknown extensions', () => {
|
||||
const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
||||
filterActiveExtensions(extensions, ['ext4']);
|
||||
expect(consoleSpy).toHaveBeenCalledWith('Extension not found: ext4');
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
function createExtension(
|
||||
extensionsDir: string,
|
||||
name: string,
|
||||
version: string,
|
||||
addContextFile = false,
|
||||
contextFileName?: string,
|
||||
): void {
|
||||
const extDir = path.join(extensionsDir, name);
|
||||
fs.mkdirSync(extDir);
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name, version, contextFileName }),
|
||||
);
|
||||
|
||||
if (addContextFile) {
|
||||
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
|
||||
}
|
||||
|
||||
if (contextFileName) {
|
||||
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
|
||||
}
|
||||
}
|
||||
159
packages/cli/src/config/extension.ts
Normal file
159
packages/cli/src/config/extension.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { MCPServerConfig } from '@qwen/qwen-code-core';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
export const EXTENSIONS_DIRECTORY_NAME = path.join('.qwen', 'extensions');
|
||||
export const EXTENSIONS_CONFIG_FILENAME = 'gemini-extension.json';
|
||||
|
||||
export interface Extension {
|
||||
config: ExtensionConfig;
|
||||
contextFiles: string[];
|
||||
}
|
||||
|
||||
export interface ExtensionConfig {
|
||||
name: string;
|
||||
version: string;
|
||||
mcpServers?: Record<string, MCPServerConfig>;
|
||||
contextFileName?: string | string[];
|
||||
excludeTools?: string[];
|
||||
}
|
||||
|
||||
export function loadExtensions(workspaceDir: string): Extension[] {
|
||||
const allExtensions = [
|
||||
...loadExtensionsFromDir(workspaceDir),
|
||||
...loadExtensionsFromDir(os.homedir()),
|
||||
];
|
||||
|
||||
const uniqueExtensions = new Map<string, Extension>();
|
||||
for (const extension of allExtensions) {
|
||||
if (!uniqueExtensions.has(extension.config.name)) {
|
||||
console.log(
|
||||
`Loading extension: ${extension.config.name} (version: ${extension.config.version})`,
|
||||
);
|
||||
uniqueExtensions.set(extension.config.name, extension);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(uniqueExtensions.values());
|
||||
}
|
||||
|
||||
function loadExtensionsFromDir(dir: string): Extension[] {
|
||||
const extensionsDir = path.join(dir, EXTENSIONS_DIRECTORY_NAME);
|
||||
if (!fs.existsSync(extensionsDir)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const extensions: Extension[] = [];
|
||||
for (const subdir of fs.readdirSync(extensionsDir)) {
|
||||
const extensionDir = path.join(extensionsDir, subdir);
|
||||
|
||||
const extension = loadExtension(extensionDir);
|
||||
if (extension != null) {
|
||||
extensions.push(extension);
|
||||
}
|
||||
}
|
||||
return extensions;
|
||||
}
|
||||
|
||||
function loadExtension(extensionDir: string): Extension | null {
|
||||
if (!fs.statSync(extensionDir).isDirectory()) {
|
||||
console.error(
|
||||
`Warning: unexpected file ${extensionDir} in extensions directory.`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const configFilePath = path.join(extensionDir, EXTENSIONS_CONFIG_FILENAME);
|
||||
if (!fs.existsSync(configFilePath)) {
|
||||
console.error(
|
||||
`Warning: extension directory ${extensionDir} does not contain a config file ${configFilePath}.`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const configContent = fs.readFileSync(configFilePath, 'utf-8');
|
||||
const config = JSON.parse(configContent) as ExtensionConfig;
|
||||
if (!config.name || !config.version) {
|
||||
console.error(
|
||||
`Invalid extension config in ${configFilePath}: missing name or version.`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const contextFiles = getContextFileNames(config)
|
||||
.map((contextFileName) => path.join(extensionDir, contextFileName))
|
||||
.filter((contextFilePath) => fs.existsSync(contextFilePath));
|
||||
|
||||
return {
|
||||
config,
|
||||
contextFiles,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(
|
||||
`Warning: error parsing extension config in ${configFilePath}: ${e}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function getContextFileNames(config: ExtensionConfig): string[] {
|
||||
if (!config.contextFileName) {
|
||||
return ['QWEN.md'];
|
||||
} else if (!Array.isArray(config.contextFileName)) {
|
||||
return [config.contextFileName];
|
||||
}
|
||||
return config.contextFileName;
|
||||
}
|
||||
|
||||
export function filterActiveExtensions(
|
||||
extensions: Extension[],
|
||||
enabledExtensionNames: string[],
|
||||
): Extension[] {
|
||||
if (enabledExtensionNames.length === 0) {
|
||||
return extensions;
|
||||
}
|
||||
|
||||
const lowerCaseEnabledExtensions = new Set(
|
||||
enabledExtensionNames.map((e) => e.trim().toLowerCase()),
|
||||
);
|
||||
|
||||
if (
|
||||
lowerCaseEnabledExtensions.size === 1 &&
|
||||
lowerCaseEnabledExtensions.has('none')
|
||||
) {
|
||||
if (extensions.length > 0) {
|
||||
console.log('All extensions are disabled.');
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
const activeExtensions: Extension[] = [];
|
||||
const notFoundNames = new Set(lowerCaseEnabledExtensions);
|
||||
|
||||
for (const extension of extensions) {
|
||||
const lowerCaseName = extension.config.name.toLowerCase();
|
||||
if (lowerCaseEnabledExtensions.has(lowerCaseName)) {
|
||||
console.log(
|
||||
`Activated extension: ${extension.config.name} (version: ${extension.config.version})`,
|
||||
);
|
||||
activeExtensions.push(extension);
|
||||
notFoundNames.delete(lowerCaseName);
|
||||
} else {
|
||||
console.log(`Disabled extension: ${extension.config.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
for (const requestedName of notFoundNames) {
|
||||
console.log(`Extension not found: ${requestedName}`);
|
||||
}
|
||||
|
||||
return activeExtensions;
|
||||
}
|
||||
107
packages/cli/src/config/sandboxConfig.ts
Normal file
107
packages/cli/src/config/sandboxConfig.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { SandboxConfig } from '@qwen/qwen-code-core';
|
||||
import commandExists from 'command-exists';
|
||||
import * as os from 'node:os';
|
||||
import { getPackageJson } from '../utils/package.js';
|
||||
import { Settings } from './settings.js';
|
||||
|
||||
// This is a stripped-down version of the CliArgs interface from config.ts
|
||||
// to avoid circular dependencies.
|
||||
interface SandboxCliArgs {
|
||||
sandbox?: boolean | string;
|
||||
sandboxImage?: string;
|
||||
}
|
||||
|
||||
const VALID_SANDBOX_COMMANDS: ReadonlyArray<SandboxConfig['command']> = [
|
||||
'docker',
|
||||
'podman',
|
||||
'sandbox-exec',
|
||||
];
|
||||
|
||||
function isSandboxCommand(value: string): value is SandboxConfig['command'] {
|
||||
return (VALID_SANDBOX_COMMANDS as readonly string[]).includes(value);
|
||||
}
|
||||
|
||||
function getSandboxCommand(
|
||||
sandbox?: boolean | string,
|
||||
): SandboxConfig['command'] | '' {
|
||||
// If the SANDBOX env var is set, we're already inside the sandbox.
|
||||
if (process.env.SANDBOX) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// note environment variable takes precedence over argument (from command line or settings)
|
||||
const environmentConfiguredSandbox =
|
||||
process.env.GEMINI_SANDBOX?.toLowerCase().trim() ?? '';
|
||||
sandbox =
|
||||
environmentConfiguredSandbox?.length > 0
|
||||
? environmentConfiguredSandbox
|
||||
: sandbox;
|
||||
if (sandbox === '1' || sandbox === 'true') sandbox = true;
|
||||
else if (sandbox === '0' || sandbox === 'false' || !sandbox) sandbox = false;
|
||||
|
||||
if (sandbox === false) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (typeof sandbox === 'string' && sandbox) {
|
||||
if (!isSandboxCommand(sandbox)) {
|
||||
console.error(
|
||||
`ERROR: invalid sandbox command '${sandbox}'. Must be one of ${VALID_SANDBOX_COMMANDS.join(
|
||||
', ',
|
||||
)}`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
// confirm that specified command exists
|
||||
if (commandExists.sync(sandbox)) {
|
||||
return sandbox;
|
||||
}
|
||||
console.error(
|
||||
`ERROR: missing sandbox command '${sandbox}' (from GEMINI_SANDBOX)`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// look for seatbelt, docker, or podman, in that order
|
||||
// for container-based sandboxing, require sandbox to be enabled explicitly
|
||||
if (os.platform() === 'darwin' && commandExists.sync('sandbox-exec')) {
|
||||
return 'sandbox-exec';
|
||||
} else if (commandExists.sync('docker') && sandbox === true) {
|
||||
return 'docker';
|
||||
} else if (commandExists.sync('podman') && sandbox === true) {
|
||||
return 'podman';
|
||||
}
|
||||
|
||||
// throw an error if user requested sandbox but no command was found
|
||||
if (sandbox === true) {
|
||||
console.error(
|
||||
'ERROR: GEMINI_SANDBOX is true but failed to determine command for sandbox; ' +
|
||||
'install docker or podman or specify command in GEMINI_SANDBOX',
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
export async function loadSandboxConfig(
|
||||
settings: Settings,
|
||||
argv: SandboxCliArgs,
|
||||
): Promise<SandboxConfig | undefined> {
|
||||
const sandboxOption = argv.sandbox ?? settings.sandbox;
|
||||
const command = getSandboxCommand(sandboxOption);
|
||||
|
||||
const packageJson = await getPackageJson();
|
||||
const image =
|
||||
argv.sandboxImage ??
|
||||
process.env.GEMINI_SANDBOX_IMAGE ??
|
||||
packageJson?.config?.sandboxImageUri;
|
||||
|
||||
return command && image ? { command, image } : undefined;
|
||||
}
|
||||
795
packages/cli/src/config/settings.test.ts
Normal file
795
packages/cli/src/config/settings.test.ts
Normal file
@@ -0,0 +1,795 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/// <reference types="vitest/globals" />
|
||||
|
||||
// Mock 'os' first.
|
||||
import * as osActual from 'os'; // Import for type info for the mock factory
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const actualOs = await importOriginal<typeof osActual>();
|
||||
return {
|
||||
...actualOs,
|
||||
homedir: vi.fn(() => '/mock/home/user'),
|
||||
platform: vi.fn(() => 'linux'),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock './settings.js' to ensure it uses the mocked 'os.homedir()' for its internal constants.
|
||||
vi.mock('./settings.js', async (importActual) => {
|
||||
const originalModule = await importActual<typeof import('./settings.js')>();
|
||||
return {
|
||||
__esModule: true, // Ensure correct module shape
|
||||
...originalModule, // Re-export all original members
|
||||
// We are relying on originalModule's USER_SETTINGS_PATH being constructed with mocked os.homedir()
|
||||
};
|
||||
});
|
||||
|
||||
// NOW import everything else, including the (now effectively re-exported) settings.js
|
||||
import * as pathActual from 'path'; // Restored for MOCK_WORKSPACE_SETTINGS_PATH
|
||||
import {
|
||||
describe,
|
||||
it,
|
||||
expect,
|
||||
vi,
|
||||
beforeEach,
|
||||
afterEach,
|
||||
type Mocked,
|
||||
type Mock,
|
||||
} from 'vitest';
|
||||
import * as fs from 'fs'; // fs will be mocked separately
|
||||
import stripJsonComments from 'strip-json-comments'; // Will be mocked separately
|
||||
|
||||
// These imports will get the versions from the vi.mock('./settings.js', ...) factory.
|
||||
import {
|
||||
loadSettings,
|
||||
USER_SETTINGS_PATH, // This IS the mocked path.
|
||||
SYSTEM_SETTINGS_PATH,
|
||||
SETTINGS_DIRECTORY_NAME, // This is from the original module, but used by the mock.
|
||||
SettingScope,
|
||||
} from './settings.js';
|
||||
|
||||
const MOCK_WORKSPACE_DIR = '/mock/workspace';
|
||||
// Use the (mocked) SETTINGS_DIRECTORY_NAME for consistency
|
||||
const MOCK_WORKSPACE_SETTINGS_PATH = pathActual.join(
|
||||
MOCK_WORKSPACE_DIR,
|
||||
SETTINGS_DIRECTORY_NAME,
|
||||
'settings.json',
|
||||
);
|
||||
|
||||
vi.mock('fs');
|
||||
vi.mock('strip-json-comments', () => ({
|
||||
default: vi.fn((content) => content),
|
||||
}));
|
||||
|
||||
describe('Settings Loading and Merging', () => {
|
||||
let mockFsExistsSync: Mocked<typeof fs.existsSync>;
|
||||
let mockStripJsonComments: Mocked<typeof stripJsonComments>;
|
||||
let mockFsMkdirSync: Mocked<typeof fs.mkdirSync>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
|
||||
mockFsExistsSync = vi.mocked(fs.existsSync);
|
||||
mockFsMkdirSync = vi.mocked(fs.mkdirSync);
|
||||
mockStripJsonComments = vi.mocked(stripJsonComments);
|
||||
|
||||
vi.mocked(osActual.homedir).mockReturnValue('/mock/home/user');
|
||||
(mockStripJsonComments as unknown as Mock).mockImplementation(
|
||||
(jsonString: string) => jsonString,
|
||||
);
|
||||
(mockFsExistsSync as Mock).mockReturnValue(false);
|
||||
(fs.readFileSync as Mock).mockReturnValue('{}'); // Return valid empty JSON
|
||||
(mockFsMkdirSync as Mock).mockImplementation(() => undefined);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('loadSettings', () => {
|
||||
it('should load empty settings if no files exist', () => {
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.system.settings).toEqual({});
|
||||
expect(settings.user.settings).toEqual({});
|
||||
expect(settings.workspace.settings).toEqual({});
|
||||
expect(settings.merged).toEqual({});
|
||||
expect(settings.errors.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should load system settings if only system file exists', () => {
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === SYSTEM_SETTINGS_PATH,
|
||||
);
|
||||
const systemSettingsContent = {
|
||||
theme: 'system-default',
|
||||
sandbox: false,
|
||||
};
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === SYSTEM_SETTINGS_PATH)
|
||||
return JSON.stringify(systemSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith(
|
||||
SYSTEM_SETTINGS_PATH,
|
||||
'utf-8',
|
||||
);
|
||||
expect(settings.system.settings).toEqual(systemSettingsContent);
|
||||
expect(settings.user.settings).toEqual({});
|
||||
expect(settings.workspace.settings).toEqual({});
|
||||
expect(settings.merged).toEqual(systemSettingsContent);
|
||||
});
|
||||
|
||||
it('should load user settings if only user file exists', () => {
|
||||
const expectedUserSettingsPath = USER_SETTINGS_PATH; // Use the path actually resolved by the (mocked) module
|
||||
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === expectedUserSettingsPath,
|
||||
);
|
||||
const userSettingsContent = {
|
||||
theme: 'dark',
|
||||
contextFileName: 'USER_CONTEXT.md',
|
||||
};
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === expectedUserSettingsPath)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith(
|
||||
expectedUserSettingsPath,
|
||||
'utf-8',
|
||||
);
|
||||
expect(settings.user.settings).toEqual(userSettingsContent);
|
||||
expect(settings.workspace.settings).toEqual({});
|
||||
expect(settings.merged).toEqual(userSettingsContent);
|
||||
});
|
||||
|
||||
it('should load workspace settings if only workspace file exists', () => {
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === MOCK_WORKSPACE_SETTINGS_PATH,
|
||||
);
|
||||
const workspaceSettingsContent = {
|
||||
sandbox: true,
|
||||
contextFileName: 'WORKSPACE_CONTEXT.md',
|
||||
};
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith(
|
||||
MOCK_WORKSPACE_SETTINGS_PATH,
|
||||
'utf-8',
|
||||
);
|
||||
expect(settings.user.settings).toEqual({});
|
||||
expect(settings.workspace.settings).toEqual(workspaceSettingsContent);
|
||||
expect(settings.merged).toEqual(workspaceSettingsContent);
|
||||
});
|
||||
|
||||
it('should merge user and workspace settings, with workspace taking precedence', () => {
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true);
|
||||
const userSettingsContent = {
|
||||
theme: 'dark',
|
||||
sandbox: false,
|
||||
contextFileName: 'USER_CONTEXT.md',
|
||||
};
|
||||
const workspaceSettingsContent = {
|
||||
sandbox: true,
|
||||
coreTools: ['tool1'],
|
||||
contextFileName: 'WORKSPACE_CONTEXT.md',
|
||||
};
|
||||
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(settings.user.settings).toEqual(userSettingsContent);
|
||||
expect(settings.workspace.settings).toEqual(workspaceSettingsContent);
|
||||
expect(settings.merged).toEqual({
|
||||
theme: 'dark',
|
||||
sandbox: true,
|
||||
coreTools: ['tool1'],
|
||||
contextFileName: 'WORKSPACE_CONTEXT.md',
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge system, user and workspace settings, with system taking precedence over workspace, and workspace over user', () => {
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true);
|
||||
const systemSettingsContent = {
|
||||
theme: 'system-theme',
|
||||
sandbox: false,
|
||||
telemetry: { enabled: false },
|
||||
};
|
||||
const userSettingsContent = {
|
||||
theme: 'dark',
|
||||
sandbox: true,
|
||||
contextFileName: 'USER_CONTEXT.md',
|
||||
};
|
||||
const workspaceSettingsContent = {
|
||||
sandbox: false,
|
||||
coreTools: ['tool1'],
|
||||
contextFileName: 'WORKSPACE_CONTEXT.md',
|
||||
};
|
||||
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === SYSTEM_SETTINGS_PATH)
|
||||
return JSON.stringify(systemSettingsContent);
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(settings.system.settings).toEqual(systemSettingsContent);
|
||||
expect(settings.user.settings).toEqual(userSettingsContent);
|
||||
expect(settings.workspace.settings).toEqual(workspaceSettingsContent);
|
||||
expect(settings.merged).toEqual({
|
||||
theme: 'system-theme',
|
||||
sandbox: false,
|
||||
telemetry: { enabled: false },
|
||||
coreTools: ['tool1'],
|
||||
contextFileName: 'WORKSPACE_CONTEXT.md',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle contextFileName correctly when only in user settings', () => {
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
const userSettingsContent = { contextFileName: 'CUSTOM.md' };
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.merged.contextFileName).toBe('CUSTOM.md');
|
||||
});
|
||||
|
||||
it('should handle contextFileName correctly when only in workspace settings', () => {
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === MOCK_WORKSPACE_SETTINGS_PATH,
|
||||
);
|
||||
const workspaceSettingsContent = {
|
||||
contextFileName: 'PROJECT_SPECIFIC.md',
|
||||
};
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.merged.contextFileName).toBe('PROJECT_SPECIFIC.md');
|
||||
});
|
||||
|
||||
it('should default contextFileName to undefined if not in any settings file', () => {
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true);
|
||||
const userSettingsContent = { theme: 'dark' };
|
||||
const workspaceSettingsContent = { sandbox: true };
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.merged.contextFileName).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should load telemetry setting from user settings', () => {
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
const userSettingsContent = { telemetry: true };
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.merged.telemetry).toBe(true);
|
||||
});
|
||||
|
||||
it('should load telemetry setting from workspace settings', () => {
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === MOCK_WORKSPACE_SETTINGS_PATH,
|
||||
);
|
||||
const workspaceSettingsContent = { telemetry: false };
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.merged.telemetry).toBe(false);
|
||||
});
|
||||
|
||||
it('should prioritize workspace telemetry setting over user setting', () => {
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true);
|
||||
const userSettingsContent = { telemetry: true };
|
||||
const workspaceSettingsContent = { telemetry: false };
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.merged.telemetry).toBe(false);
|
||||
});
|
||||
|
||||
it('should have telemetry as undefined if not in any settings file', () => {
|
||||
(mockFsExistsSync as Mock).mockReturnValue(false); // No settings files exist
|
||||
(fs.readFileSync as Mock).mockReturnValue('{}');
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.merged.telemetry).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle JSON parsing errors gracefully', () => {
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true); // Both files "exist"
|
||||
const invalidJsonContent = 'invalid json';
|
||||
const userReadError = new SyntaxError(
|
||||
"Expected ',' or '}' after property value in JSON at position 10",
|
||||
);
|
||||
const workspaceReadError = new SyntaxError(
|
||||
'Unexpected token i in JSON at position 0',
|
||||
);
|
||||
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH) {
|
||||
// Simulate JSON.parse throwing for user settings
|
||||
vi.spyOn(JSON, 'parse').mockImplementationOnce(() => {
|
||||
throw userReadError;
|
||||
});
|
||||
return invalidJsonContent; // Content that would cause JSON.parse to throw
|
||||
}
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH) {
|
||||
// Simulate JSON.parse throwing for workspace settings
|
||||
vi.spyOn(JSON, 'parse').mockImplementationOnce(() => {
|
||||
throw workspaceReadError;
|
||||
});
|
||||
return invalidJsonContent;
|
||||
}
|
||||
return '{}'; // Default for other reads
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
// Check that settings are empty due to parsing errors
|
||||
expect(settings.user.settings).toEqual({});
|
||||
expect(settings.workspace.settings).toEqual({});
|
||||
expect(settings.merged).toEqual({});
|
||||
|
||||
// Check that error objects are populated in settings.errors
|
||||
expect(settings.errors).toBeDefined();
|
||||
// Assuming both user and workspace files cause errors and are added in order
|
||||
expect(settings.errors.length).toEqual(2);
|
||||
|
||||
const userError = settings.errors.find(
|
||||
(e) => e.path === USER_SETTINGS_PATH,
|
||||
);
|
||||
expect(userError).toBeDefined();
|
||||
expect(userError?.message).toBe(userReadError.message);
|
||||
|
||||
const workspaceError = settings.errors.find(
|
||||
(e) => e.path === MOCK_WORKSPACE_SETTINGS_PATH,
|
||||
);
|
||||
expect(workspaceError).toBeDefined();
|
||||
expect(workspaceError?.message).toBe(workspaceReadError.message);
|
||||
|
||||
// Restore JSON.parse mock if it was spied on specifically for this test
|
||||
vi.restoreAllMocks(); // Or more targeted restore if needed
|
||||
});
|
||||
|
||||
it('should resolve environment variables in user settings', () => {
|
||||
process.env.TEST_API_KEY = 'user_api_key_from_env';
|
||||
const userSettingsContent = {
|
||||
apiKey: '$TEST_API_KEY',
|
||||
someUrl: 'https://test.com/${TEST_API_KEY}',
|
||||
};
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.user.settings.apiKey).toBe('user_api_key_from_env');
|
||||
expect(settings.user.settings.someUrl).toBe(
|
||||
'https://test.com/user_api_key_from_env',
|
||||
);
|
||||
expect(settings.merged.apiKey).toBe('user_api_key_from_env');
|
||||
delete process.env.TEST_API_KEY;
|
||||
});
|
||||
|
||||
it('should resolve environment variables in workspace settings', () => {
|
||||
process.env.WORKSPACE_ENDPOINT = 'workspace_endpoint_from_env';
|
||||
const workspaceSettingsContent = {
|
||||
endpoint: '${WORKSPACE_ENDPOINT}/api',
|
||||
nested: { value: '$WORKSPACE_ENDPOINT' },
|
||||
};
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === MOCK_WORKSPACE_SETTINGS_PATH,
|
||||
);
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.workspace.settings.endpoint).toBe(
|
||||
'workspace_endpoint_from_env/api',
|
||||
);
|
||||
expect(settings.workspace.settings.nested.value).toBe(
|
||||
'workspace_endpoint_from_env',
|
||||
);
|
||||
expect(settings.merged.endpoint).toBe('workspace_endpoint_from_env/api');
|
||||
delete process.env.WORKSPACE_ENDPOINT;
|
||||
});
|
||||
|
||||
it('should prioritize user env variables over workspace env variables if keys clash after resolution', () => {
|
||||
const userSettingsContent = { configValue: '$SHARED_VAR' };
|
||||
const workspaceSettingsContent = { configValue: '$SHARED_VAR' };
|
||||
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true);
|
||||
const originalSharedVar = process.env.SHARED_VAR;
|
||||
// Temporarily delete to ensure a clean slate for the test's specific manipulations
|
||||
delete process.env.SHARED_VAR;
|
||||
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH) {
|
||||
process.env.SHARED_VAR = 'user_value_for_user_read'; // Set for user settings read
|
||||
return JSON.stringify(userSettingsContent);
|
||||
}
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH) {
|
||||
process.env.SHARED_VAR = 'workspace_value_for_workspace_read'; // Set for workspace settings read
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
}
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(settings.user.settings.configValue).toBe(
|
||||
'user_value_for_user_read',
|
||||
);
|
||||
expect(settings.workspace.settings.configValue).toBe(
|
||||
'workspace_value_for_workspace_read',
|
||||
);
|
||||
// Merged should take workspace's resolved value
|
||||
expect(settings.merged.configValue).toBe(
|
||||
'workspace_value_for_workspace_read',
|
||||
);
|
||||
|
||||
// Restore original environment variable state
|
||||
if (originalSharedVar !== undefined) {
|
||||
process.env.SHARED_VAR = originalSharedVar;
|
||||
} else {
|
||||
delete process.env.SHARED_VAR; // Ensure it's deleted if it wasn't there before
|
||||
}
|
||||
});
|
||||
|
||||
it('should prioritize workspace env variables over user env variables if keys clash after resolution', () => {
|
||||
const userSettingsContent = { configValue: '$SHARED_VAR' };
|
||||
const workspaceSettingsContent = { configValue: '$SHARED_VAR' };
|
||||
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true);
|
||||
const originalSharedVar = process.env.SHARED_VAR;
|
||||
// Temporarily delete to ensure a clean slate for the test's specific manipulations
|
||||
delete process.env.SHARED_VAR;
|
||||
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH) {
|
||||
process.env.SHARED_VAR = 'user_value_for_user_read'; // Set for user settings read
|
||||
return JSON.stringify(userSettingsContent);
|
||||
}
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH) {
|
||||
process.env.SHARED_VAR = 'workspace_value_for_workspace_read'; // Set for workspace settings read
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
}
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(settings.user.settings.configValue).toBe(
|
||||
'user_value_for_user_read',
|
||||
);
|
||||
expect(settings.workspace.settings.configValue).toBe(
|
||||
'workspace_value_for_workspace_read',
|
||||
);
|
||||
// Merged should take workspace's resolved value
|
||||
expect(settings.merged.configValue).toBe(
|
||||
'workspace_value_for_workspace_read',
|
||||
);
|
||||
|
||||
// Restore original environment variable state
|
||||
if (originalSharedVar !== undefined) {
|
||||
process.env.SHARED_VAR = originalSharedVar;
|
||||
} else {
|
||||
delete process.env.SHARED_VAR; // Ensure it's deleted if it wasn't there before
|
||||
}
|
||||
});
|
||||
|
||||
it('should prioritize system env variables over workspace env variables if keys clash after resolution', () => {
|
||||
const workspaceSettingsContent = { configValue: '$SHARED_VAR' };
|
||||
const systemSettingsContent = { configValue: '$SHARED_VAR' };
|
||||
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true);
|
||||
const originalSharedVar = process.env.SHARED_VAR;
|
||||
// Temporarily delete to ensure a clean slate for the test's specific manipulations
|
||||
delete process.env.SHARED_VAR;
|
||||
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === SYSTEM_SETTINGS_PATH) {
|
||||
process.env.SHARED_VAR = 'system_value_for_system_read'; // Set for system settings read
|
||||
return JSON.stringify(systemSettingsContent);
|
||||
}
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH) {
|
||||
process.env.SHARED_VAR = 'workspace_value_for_workspace_read'; // Set for workspace settings read
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
}
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(settings.system.settings.configValue).toBe(
|
||||
'system_value_for_system_read',
|
||||
);
|
||||
expect(settings.workspace.settings.configValue).toBe(
|
||||
'workspace_value_for_workspace_read',
|
||||
);
|
||||
// Merged should take workspace's resolved value
|
||||
expect(settings.merged.configValue).toBe('system_value_for_system_read');
|
||||
|
||||
// Restore original environment variable state
|
||||
if (originalSharedVar !== undefined) {
|
||||
process.env.SHARED_VAR = originalSharedVar;
|
||||
} else {
|
||||
delete process.env.SHARED_VAR; // Ensure it's deleted if it wasn't there before
|
||||
}
|
||||
});
|
||||
|
||||
it('should leave unresolved environment variables as is', () => {
|
||||
const userSettingsContent = { apiKey: '$UNDEFINED_VAR' };
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.user.settings.apiKey).toBe('$UNDEFINED_VAR');
|
||||
expect(settings.merged.apiKey).toBe('$UNDEFINED_VAR');
|
||||
});
|
||||
|
||||
it('should resolve multiple environment variables in a single string', () => {
|
||||
process.env.VAR_A = 'valueA';
|
||||
process.env.VAR_B = 'valueB';
|
||||
const userSettingsContent = { path: '/path/$VAR_A/${VAR_B}/end' };
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.user.settings.path).toBe('/path/valueA/valueB/end');
|
||||
delete process.env.VAR_A;
|
||||
delete process.env.VAR_B;
|
||||
});
|
||||
|
||||
it('should resolve environment variables in arrays', () => {
|
||||
process.env.ITEM_1 = 'item1_env';
|
||||
process.env.ITEM_2 = 'item2_env';
|
||||
const userSettingsContent = { list: ['$ITEM_1', '${ITEM_2}', 'literal'] };
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.user.settings.list).toEqual([
|
||||
'item1_env',
|
||||
'item2_env',
|
||||
'literal',
|
||||
]);
|
||||
delete process.env.ITEM_1;
|
||||
delete process.env.ITEM_2;
|
||||
});
|
||||
|
||||
it('should correctly pass through null, boolean, and number types, and handle undefined properties', () => {
|
||||
process.env.MY_ENV_STRING = 'env_string_value';
|
||||
process.env.MY_ENV_STRING_NESTED = 'env_string_nested_value';
|
||||
|
||||
const userSettingsContent = {
|
||||
nullVal: null,
|
||||
trueVal: true,
|
||||
falseVal: false,
|
||||
numberVal: 123.45,
|
||||
stringVal: '$MY_ENV_STRING',
|
||||
nestedObj: {
|
||||
nestedNull: null,
|
||||
nestedBool: true,
|
||||
nestedNum: 0,
|
||||
nestedString: 'literal',
|
||||
anotherEnv: '${MY_ENV_STRING_NESTED}',
|
||||
},
|
||||
};
|
||||
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(settings.user.settings.nullVal).toBeNull();
|
||||
expect(settings.user.settings.trueVal).toBe(true);
|
||||
expect(settings.user.settings.falseVal).toBe(false);
|
||||
expect(settings.user.settings.numberVal).toBe(123.45);
|
||||
expect(settings.user.settings.stringVal).toBe('env_string_value');
|
||||
expect(settings.user.settings.undefinedVal).toBeUndefined();
|
||||
|
||||
expect(settings.user.settings.nestedObj.nestedNull).toBeNull();
|
||||
expect(settings.user.settings.nestedObj.nestedBool).toBe(true);
|
||||
expect(settings.user.settings.nestedObj.nestedNum).toBe(0);
|
||||
expect(settings.user.settings.nestedObj.nestedString).toBe('literal');
|
||||
expect(settings.user.settings.nestedObj.anotherEnv).toBe(
|
||||
'env_string_nested_value',
|
||||
);
|
||||
|
||||
delete process.env.MY_ENV_STRING;
|
||||
delete process.env.MY_ENV_STRING_NESTED;
|
||||
});
|
||||
|
||||
it('should resolve multiple concatenated environment variables in a single string value', () => {
|
||||
process.env.TEST_HOST = 'myhost';
|
||||
process.env.TEST_PORT = '9090';
|
||||
const userSettingsContent = {
|
||||
serverAddress: '${TEST_HOST}:${TEST_PORT}/api',
|
||||
};
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
expect(settings.user.settings.serverAddress).toBe('myhost:9090/api');
|
||||
|
||||
delete process.env.TEST_HOST;
|
||||
delete process.env.TEST_PORT;
|
||||
});
|
||||
});
|
||||
|
||||
describe('LoadedSettings class', () => {
|
||||
it('setValue should update the correct scope and recompute merged settings', () => {
|
||||
(mockFsExistsSync as Mock).mockReturnValue(false);
|
||||
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
vi.mocked(fs.writeFileSync).mockImplementation(() => {});
|
||||
// mkdirSync is mocked in beforeEach to return undefined, which is fine for void usage
|
||||
|
||||
loadedSettings.setValue(SettingScope.User, 'theme', 'matrix');
|
||||
expect(loadedSettings.user.settings.theme).toBe('matrix');
|
||||
expect(loadedSettings.merged.theme).toBe('matrix');
|
||||
expect(fs.writeFileSync).toHaveBeenCalledWith(
|
||||
USER_SETTINGS_PATH,
|
||||
JSON.stringify({ theme: 'matrix' }, null, 2),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
loadedSettings.setValue(
|
||||
SettingScope.Workspace,
|
||||
'contextFileName',
|
||||
'MY_AGENTS.md',
|
||||
);
|
||||
expect(loadedSettings.workspace.settings.contextFileName).toBe(
|
||||
'MY_AGENTS.md',
|
||||
);
|
||||
expect(loadedSettings.merged.contextFileName).toBe('MY_AGENTS.md');
|
||||
expect(loadedSettings.merged.theme).toBe('matrix'); // User setting should still be there
|
||||
expect(fs.writeFileSync).toHaveBeenCalledWith(
|
||||
MOCK_WORKSPACE_SETTINGS_PATH,
|
||||
JSON.stringify({ contextFileName: 'MY_AGENTS.md' }, null, 2),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
// System theme overrides user and workspace themes
|
||||
loadedSettings.setValue(SettingScope.System, 'theme', 'ocean');
|
||||
|
||||
expect(loadedSettings.system.settings.theme).toBe('ocean');
|
||||
expect(loadedSettings.merged.theme).toBe('ocean');
|
||||
});
|
||||
});
|
||||
});
|
||||
388
packages/cli/src/config/settings.ts
Normal file
388
packages/cli/src/config/settings.ts
Normal file
@@ -0,0 +1,388 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { homedir, platform } from 'os';
|
||||
import * as dotenv from 'dotenv';
|
||||
import {
|
||||
MCPServerConfig,
|
||||
GEMINI_CONFIG_DIR as GEMINI_DIR,
|
||||
getErrorMessage,
|
||||
BugCommandSettings,
|
||||
TelemetrySettings,
|
||||
AuthType,
|
||||
} from '@qwen/qwen-code-core';
|
||||
import stripJsonComments from 'strip-json-comments';
|
||||
import { DefaultLight } from '../ui/themes/default-light.js';
|
||||
import { DefaultDark } from '../ui/themes/default.js';
|
||||
|
||||
export const SETTINGS_DIRECTORY_NAME = '.qwen';
|
||||
export const USER_SETTINGS_DIR = path.join(homedir(), SETTINGS_DIRECTORY_NAME);
|
||||
export const USER_SETTINGS_PATH = path.join(USER_SETTINGS_DIR, 'settings.json');
|
||||
|
||||
function getSystemSettingsPath(): string {
|
||||
if (platform() === 'darwin') {
|
||||
return '/Library/Application Support/QwenCode/settings.json';
|
||||
} else if (platform() === 'win32') {
|
||||
return 'C:\\ProgramData\\qwen-code\\settings.json';
|
||||
} else {
|
||||
return '/etc/qwen-code/settings.json';
|
||||
}
|
||||
}
|
||||
|
||||
export const SYSTEM_SETTINGS_PATH = getSystemSettingsPath();
|
||||
|
||||
export enum SettingScope {
|
||||
User = 'User',
|
||||
Workspace = 'Workspace',
|
||||
System = 'System',
|
||||
}
|
||||
|
||||
export interface CheckpointingSettings {
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
export interface AccessibilitySettings {
|
||||
disableLoadingPhrases?: boolean;
|
||||
}
|
||||
|
||||
export interface Settings {
|
||||
theme?: string;
|
||||
selectedAuthType?: AuthType;
|
||||
sandbox?: boolean | string;
|
||||
coreTools?: string[];
|
||||
excludeTools?: string[];
|
||||
toolDiscoveryCommand?: string;
|
||||
toolCallCommand?: string;
|
||||
mcpServerCommand?: string;
|
||||
mcpServers?: Record<string, MCPServerConfig>;
|
||||
showMemoryUsage?: boolean;
|
||||
contextFileName?: string | string[];
|
||||
accessibility?: AccessibilitySettings;
|
||||
telemetry?: TelemetrySettings;
|
||||
usageStatisticsEnabled?: boolean;
|
||||
preferredEditor?: string;
|
||||
bugCommand?: BugCommandSettings;
|
||||
checkpointing?: CheckpointingSettings;
|
||||
autoConfigureMaxOldSpaceSize?: boolean;
|
||||
enableOpenAILogging?: boolean;
|
||||
|
||||
// Git-aware file filtering settings
|
||||
fileFiltering?: {
|
||||
respectGitIgnore?: boolean;
|
||||
enableRecursiveFileSearch?: boolean;
|
||||
};
|
||||
|
||||
// UI setting. Does not display the ANSI-controlled terminal title.
|
||||
hideWindowTitle?: boolean;
|
||||
hideTips?: boolean;
|
||||
hideBanner?: boolean;
|
||||
|
||||
// Setting for setting maximum number of user/model/tool turns in a session.
|
||||
maxSessionTurns?: number;
|
||||
|
||||
// Sampling parameters for content generation
|
||||
sampling_params?: {
|
||||
top_p?: number;
|
||||
top_k?: number;
|
||||
repetition_penalty?: number;
|
||||
presence_penalty?: number;
|
||||
frequency_penalty?: number;
|
||||
temperature?: number;
|
||||
max_tokens?: number;
|
||||
};
|
||||
|
||||
// Add other settings here.
|
||||
ideMode?: boolean;
|
||||
}
|
||||
|
||||
export interface SettingsError {
|
||||
message: string;
|
||||
path: string;
|
||||
}
|
||||
|
||||
export interface SettingsFile {
|
||||
settings: Settings;
|
||||
path: string;
|
||||
}
|
||||
export class LoadedSettings {
|
||||
constructor(
|
||||
system: SettingsFile,
|
||||
user: SettingsFile,
|
||||
workspace: SettingsFile,
|
||||
errors: SettingsError[],
|
||||
) {
|
||||
this.system = system;
|
||||
this.user = user;
|
||||
this.workspace = workspace;
|
||||
this.errors = errors;
|
||||
this._merged = this.computeMergedSettings();
|
||||
}
|
||||
|
||||
readonly system: SettingsFile;
|
||||
readonly user: SettingsFile;
|
||||
readonly workspace: SettingsFile;
|
||||
readonly errors: SettingsError[];
|
||||
|
||||
private _merged: Settings;
|
||||
|
||||
get merged(): Settings {
|
||||
return this._merged;
|
||||
}
|
||||
|
||||
private computeMergedSettings(): Settings {
|
||||
return {
|
||||
...this.user.settings,
|
||||
...this.workspace.settings,
|
||||
...this.system.settings,
|
||||
};
|
||||
}
|
||||
|
||||
forScope(scope: SettingScope): SettingsFile {
|
||||
switch (scope) {
|
||||
case SettingScope.User:
|
||||
return this.user;
|
||||
case SettingScope.Workspace:
|
||||
return this.workspace;
|
||||
case SettingScope.System:
|
||||
return this.system;
|
||||
default:
|
||||
throw new Error(`Invalid scope: ${scope}`);
|
||||
}
|
||||
}
|
||||
|
||||
setValue(
|
||||
scope: SettingScope,
|
||||
key: keyof Settings,
|
||||
value: string | Record<string, MCPServerConfig> | undefined,
|
||||
): void {
|
||||
const settingsFile = this.forScope(scope);
|
||||
// @ts-expect-error - value can be string | Record<string, MCPServerConfig>
|
||||
settingsFile.settings[key] = value;
|
||||
this._merged = this.computeMergedSettings();
|
||||
saveSettings(settingsFile);
|
||||
}
|
||||
}
|
||||
|
||||
function resolveEnvVarsInString(value: string): string {
|
||||
const envVarRegex = /\$(?:(\w+)|{([^}]+)})/g; // Find $VAR_NAME or ${VAR_NAME}
|
||||
return value.replace(envVarRegex, (match, varName1, varName2) => {
|
||||
const varName = varName1 || varName2;
|
||||
if (process && process.env && typeof process.env[varName] === 'string') {
|
||||
return process.env[varName]!;
|
||||
}
|
||||
return match;
|
||||
});
|
||||
}
|
||||
|
||||
function resolveEnvVarsInObject<T>(obj: T): T {
|
||||
if (
|
||||
obj === null ||
|
||||
obj === undefined ||
|
||||
typeof obj === 'boolean' ||
|
||||
typeof obj === 'number'
|
||||
) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (typeof obj === 'string') {
|
||||
return resolveEnvVarsInString(obj) as unknown as T;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map((item) => resolveEnvVarsInObject(item)) as unknown as T;
|
||||
}
|
||||
|
||||
if (typeof obj === 'object') {
|
||||
const newObj = { ...obj } as T;
|
||||
for (const key in newObj) {
|
||||
if (Object.prototype.hasOwnProperty.call(newObj, key)) {
|
||||
newObj[key] = resolveEnvVarsInObject(newObj[key]);
|
||||
}
|
||||
}
|
||||
return newObj;
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
function findEnvFile(startDir: string): string | null {
|
||||
let currentDir = path.resolve(startDir);
|
||||
while (true) {
|
||||
// prefer gemini-specific .env under GEMINI_DIR
|
||||
const geminiEnvPath = path.join(currentDir, GEMINI_DIR, '.env');
|
||||
if (fs.existsSync(geminiEnvPath)) {
|
||||
return geminiEnvPath;
|
||||
}
|
||||
const envPath = path.join(currentDir, '.env');
|
||||
if (fs.existsSync(envPath)) {
|
||||
return envPath;
|
||||
}
|
||||
const parentDir = path.dirname(currentDir);
|
||||
if (parentDir === currentDir || !parentDir) {
|
||||
// check .env under home as fallback, again preferring gemini-specific .env
|
||||
const homeGeminiEnvPath = path.join(homedir(), GEMINI_DIR, '.env');
|
||||
if (fs.existsSync(homeGeminiEnvPath)) {
|
||||
return homeGeminiEnvPath;
|
||||
}
|
||||
const homeEnvPath = path.join(homedir(), '.env');
|
||||
if (fs.existsSync(homeEnvPath)) {
|
||||
return homeEnvPath;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
currentDir = parentDir;
|
||||
}
|
||||
}
|
||||
|
||||
export function setUpCloudShellEnvironment(envFilePath: string | null): void {
|
||||
// Special handling for GOOGLE_CLOUD_PROJECT in Cloud Shell:
|
||||
// Because GOOGLE_CLOUD_PROJECT in Cloud Shell tracks the project
|
||||
// set by the user using "gcloud config set project" we do not want to
|
||||
// use its value. So, unless the user overrides GOOGLE_CLOUD_PROJECT in
|
||||
// one of the .env files, we set the Cloud Shell-specific default here.
|
||||
if (envFilePath && fs.existsSync(envFilePath)) {
|
||||
const envFileContent = fs.readFileSync(envFilePath);
|
||||
const parsedEnv = dotenv.parse(envFileContent);
|
||||
if (parsedEnv.GOOGLE_CLOUD_PROJECT) {
|
||||
// .env file takes precedence in Cloud Shell
|
||||
process.env.GOOGLE_CLOUD_PROJECT = parsedEnv.GOOGLE_CLOUD_PROJECT;
|
||||
} else {
|
||||
// If not in .env, set to default and override global
|
||||
process.env.GOOGLE_CLOUD_PROJECT = 'cloudshell-gca';
|
||||
}
|
||||
} else {
|
||||
// If no .env file, set to default and override global
|
||||
process.env.GOOGLE_CLOUD_PROJECT = 'cloudshell-gca';
|
||||
}
|
||||
}
|
||||
|
||||
export function loadEnvironment(): void {
|
||||
const envFilePath = findEnvFile(process.cwd());
|
||||
|
||||
if (process.env.CLOUD_SHELL === 'true') {
|
||||
setUpCloudShellEnvironment(envFilePath);
|
||||
}
|
||||
|
||||
if (envFilePath) {
|
||||
dotenv.config({ path: envFilePath, quiet: true });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from user and workspace directories.
|
||||
* Project settings override user settings.
|
||||
*/
|
||||
export function loadSettings(workspaceDir: string): LoadedSettings {
|
||||
loadEnvironment();
|
||||
let systemSettings: Settings = {};
|
||||
let userSettings: Settings = {};
|
||||
let workspaceSettings: Settings = {};
|
||||
const settingsErrors: SettingsError[] = [];
|
||||
|
||||
// Load system settings
|
||||
try {
|
||||
if (fs.existsSync(SYSTEM_SETTINGS_PATH)) {
|
||||
const systemContent = fs.readFileSync(SYSTEM_SETTINGS_PATH, 'utf-8');
|
||||
const parsedSystemSettings = JSON.parse(
|
||||
stripJsonComments(systemContent),
|
||||
) as Settings;
|
||||
systemSettings = resolveEnvVarsInObject(parsedSystemSettings);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
settingsErrors.push({
|
||||
message: getErrorMessage(error),
|
||||
path: SYSTEM_SETTINGS_PATH,
|
||||
});
|
||||
}
|
||||
|
||||
// Load user settings
|
||||
try {
|
||||
if (fs.existsSync(USER_SETTINGS_PATH)) {
|
||||
const userContent = fs.readFileSync(USER_SETTINGS_PATH, 'utf-8');
|
||||
const parsedUserSettings = JSON.parse(
|
||||
stripJsonComments(userContent),
|
||||
) as Settings;
|
||||
userSettings = resolveEnvVarsInObject(parsedUserSettings);
|
||||
// Support legacy theme names
|
||||
if (userSettings.theme && userSettings.theme === 'VS') {
|
||||
userSettings.theme = DefaultLight.name;
|
||||
} else if (userSettings.theme && userSettings.theme === 'VS2015') {
|
||||
userSettings.theme = DefaultDark.name;
|
||||
}
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
settingsErrors.push({
|
||||
message: getErrorMessage(error),
|
||||
path: USER_SETTINGS_PATH,
|
||||
});
|
||||
}
|
||||
|
||||
const workspaceSettingsPath = path.join(
|
||||
workspaceDir,
|
||||
SETTINGS_DIRECTORY_NAME,
|
||||
'settings.json',
|
||||
);
|
||||
|
||||
// Load workspace settings
|
||||
try {
|
||||
if (fs.existsSync(workspaceSettingsPath)) {
|
||||
const projectContent = fs.readFileSync(workspaceSettingsPath, 'utf-8');
|
||||
const parsedWorkspaceSettings = JSON.parse(
|
||||
stripJsonComments(projectContent),
|
||||
) as Settings;
|
||||
workspaceSettings = resolveEnvVarsInObject(parsedWorkspaceSettings);
|
||||
if (workspaceSettings.theme && workspaceSettings.theme === 'VS') {
|
||||
workspaceSettings.theme = DefaultLight.name;
|
||||
} else if (
|
||||
workspaceSettings.theme &&
|
||||
workspaceSettings.theme === 'VS2015'
|
||||
) {
|
||||
workspaceSettings.theme = DefaultDark.name;
|
||||
}
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
settingsErrors.push({
|
||||
message: getErrorMessage(error),
|
||||
path: workspaceSettingsPath,
|
||||
});
|
||||
}
|
||||
|
||||
return new LoadedSettings(
|
||||
{
|
||||
path: SYSTEM_SETTINGS_PATH,
|
||||
settings: systemSettings,
|
||||
},
|
||||
{
|
||||
path: USER_SETTINGS_PATH,
|
||||
settings: userSettings,
|
||||
},
|
||||
{
|
||||
path: workspaceSettingsPath,
|
||||
settings: workspaceSettings,
|
||||
},
|
||||
settingsErrors,
|
||||
);
|
||||
}
|
||||
|
||||
export function saveSettings(settingsFile: SettingsFile): void {
|
||||
try {
|
||||
// Ensure the directory exists
|
||||
const dirPath = path.dirname(settingsFile.path);
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
fs.mkdirSync(dirPath, { recursive: true });
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
settingsFile.path,
|
||||
JSON.stringify(settingsFile.settings, null, 2),
|
||||
'utf-8',
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Error saving user settings file:', error);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user