mirror of
https://github.com/QwenLM/qwen-code.git
synced 2025-12-20 16:57:46 +00:00
Sync upstream Gemini-CLI v0.8.2 (#838)
This commit is contained in:
140
packages/cli/src/utils/commands.test.ts
Normal file
140
packages/cli/src/utils/commands.test.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseSlashCommand } from './commands.js';
|
||||
import { CommandKind, type SlashCommand } from '../ui/commands/types.js';
|
||||
|
||||
// Mock command structure for testing
|
||||
const mockCommands: readonly SlashCommand[] = [
|
||||
{
|
||||
name: 'help',
|
||||
description: 'Show help',
|
||||
action: async () => {},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
},
|
||||
{
|
||||
name: 'commit',
|
||||
description: 'Commit changes',
|
||||
action: async () => {},
|
||||
kind: CommandKind.FILE,
|
||||
},
|
||||
{
|
||||
name: 'memory',
|
||||
description: 'Manage memory',
|
||||
altNames: ['mem'],
|
||||
subCommands: [
|
||||
{
|
||||
name: 'add',
|
||||
description: 'Add to memory',
|
||||
action: async () => {},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
},
|
||||
{
|
||||
name: 'clear',
|
||||
description: 'Clear memory',
|
||||
altNames: ['c'],
|
||||
action: async () => {},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
},
|
||||
],
|
||||
kind: CommandKind.BUILT_IN,
|
||||
},
|
||||
];
|
||||
|
||||
describe('parseSlashCommand', () => {
|
||||
it('should parse a simple command without arguments', () => {
|
||||
const result = parseSlashCommand('/help', mockCommands);
|
||||
expect(result.commandToExecute?.name).toBe('help');
|
||||
expect(result.args).toBe('');
|
||||
expect(result.canonicalPath).toEqual(['help']);
|
||||
});
|
||||
|
||||
it('should parse a simple command with arguments', () => {
|
||||
const result = parseSlashCommand(
|
||||
'/commit -m "Initial commit"',
|
||||
mockCommands,
|
||||
);
|
||||
expect(result.commandToExecute?.name).toBe('commit');
|
||||
expect(result.args).toBe('-m "Initial commit"');
|
||||
expect(result.canonicalPath).toEqual(['commit']);
|
||||
});
|
||||
|
||||
it('should parse a subcommand', () => {
|
||||
const result = parseSlashCommand('/memory add', mockCommands);
|
||||
expect(result.commandToExecute?.name).toBe('add');
|
||||
expect(result.args).toBe('');
|
||||
expect(result.canonicalPath).toEqual(['memory', 'add']);
|
||||
});
|
||||
|
||||
it('should parse a subcommand with arguments', () => {
|
||||
const result = parseSlashCommand(
|
||||
'/memory add some important data',
|
||||
mockCommands,
|
||||
);
|
||||
expect(result.commandToExecute?.name).toBe('add');
|
||||
expect(result.args).toBe('some important data');
|
||||
expect(result.canonicalPath).toEqual(['memory', 'add']);
|
||||
});
|
||||
|
||||
it('should handle a command alias', () => {
|
||||
const result = parseSlashCommand('/mem add some data', mockCommands);
|
||||
expect(result.commandToExecute?.name).toBe('add');
|
||||
expect(result.args).toBe('some data');
|
||||
expect(result.canonicalPath).toEqual(['memory', 'add']);
|
||||
});
|
||||
|
||||
it('should handle a subcommand alias', () => {
|
||||
const result = parseSlashCommand('/memory c', mockCommands);
|
||||
expect(result.commandToExecute?.name).toBe('clear');
|
||||
expect(result.args).toBe('');
|
||||
expect(result.canonicalPath).toEqual(['memory', 'clear']);
|
||||
});
|
||||
|
||||
it('should return undefined for an unknown command', () => {
|
||||
const result = parseSlashCommand('/unknown', mockCommands);
|
||||
expect(result.commandToExecute).toBeUndefined();
|
||||
expect(result.args).toBe('unknown');
|
||||
expect(result.canonicalPath).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return the parent command if subcommand is unknown', () => {
|
||||
const result = parseSlashCommand(
|
||||
'/memory unknownsub some args',
|
||||
mockCommands,
|
||||
);
|
||||
expect(result.commandToExecute?.name).toBe('memory');
|
||||
expect(result.args).toBe('unknownsub some args');
|
||||
expect(result.canonicalPath).toEqual(['memory']);
|
||||
});
|
||||
|
||||
it('should handle extra whitespace', () => {
|
||||
const result = parseSlashCommand(
|
||||
' /memory add some data ',
|
||||
mockCommands,
|
||||
);
|
||||
expect(result.commandToExecute?.name).toBe('add');
|
||||
expect(result.args).toBe('some data');
|
||||
expect(result.canonicalPath).toEqual(['memory', 'add']);
|
||||
});
|
||||
|
||||
it('should return undefined if query does not start with a slash', () => {
|
||||
const result = parseSlashCommand('help', mockCommands);
|
||||
expect(result.commandToExecute).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle an empty query', () => {
|
||||
const result = parseSlashCommand('', mockCommands);
|
||||
expect(result.commandToExecute).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle a query with only a slash', () => {
|
||||
const result = parseSlashCommand('/', mockCommands);
|
||||
expect(result.commandToExecute).toBeUndefined();
|
||||
expect(result.args).toBe('');
|
||||
expect(result.canonicalPath).toEqual([]);
|
||||
});
|
||||
});
|
||||
71
packages/cli/src/utils/commands.ts
Normal file
71
packages/cli/src/utils/commands.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { type SlashCommand } from '../ui/commands/types.js';
|
||||
|
||||
export type ParsedSlashCommand = {
|
||||
commandToExecute: SlashCommand | undefined;
|
||||
args: string;
|
||||
canonicalPath: string[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses a raw slash command string into its command, arguments, and canonical path.
|
||||
* If no valid command is found, the `commandToExecute` property will be `undefined`.
|
||||
*
|
||||
* @param query The raw input string, e.g., "/memory add some data" or "/help".
|
||||
* @param commands The list of available top-level slash commands.
|
||||
* @returns An object containing the resolved command, its arguments, and its canonical path.
|
||||
*/
|
||||
export const parseSlashCommand = (
|
||||
query: string,
|
||||
commands: readonly SlashCommand[],
|
||||
): ParsedSlashCommand => {
|
||||
const trimmed = query.trim();
|
||||
|
||||
const parts = trimmed.substring(1).trim().split(/\s+/);
|
||||
const commandPath = parts.filter((p) => p); // The parts of the command, e.g., ['memory', 'add']
|
||||
|
||||
let currentCommands = commands;
|
||||
let commandToExecute: SlashCommand | undefined;
|
||||
let pathIndex = 0;
|
||||
const canonicalPath: string[] = [];
|
||||
|
||||
for (const part of commandPath) {
|
||||
// TODO: For better performance and architectural clarity, this two-pass
|
||||
// search could be replaced. A more optimal approach would be to
|
||||
// pre-compute a single lookup map in `CommandService.ts` that resolves
|
||||
// all name and alias conflicts during the initial loading phase. The
|
||||
// processor would then perform a single, fast lookup on that map.
|
||||
|
||||
// First pass: check for an exact match on the primary command name.
|
||||
let foundCommand = currentCommands.find((cmd) => cmd.name === part);
|
||||
|
||||
// Second pass: if no primary name matches, check for an alias.
|
||||
if (!foundCommand) {
|
||||
foundCommand = currentCommands.find((cmd) =>
|
||||
cmd.altNames?.includes(part),
|
||||
);
|
||||
}
|
||||
|
||||
if (foundCommand) {
|
||||
commandToExecute = foundCommand;
|
||||
canonicalPath.push(foundCommand.name);
|
||||
pathIndex++;
|
||||
if (foundCommand.subCommands) {
|
||||
currentCommands = foundCommand.subCommands;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const args = parts.slice(pathIndex).join(' ');
|
||||
|
||||
return { commandToExecute, args, canonicalPath };
|
||||
};
|
||||
182
packages/cli/src/utils/commentJson.test.ts
Normal file
182
packages/cli/src/utils/commentJson.test.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { updateSettingsFilePreservingFormat } from './commentJson.js';
|
||||
|
||||
describe('commentJson', () => {
|
||||
let tempDir: string;
|
||||
let testFilePath: string;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a temporary directory for test files
|
||||
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'preserve-format-test-'));
|
||||
testFilePath = path.join(tempDir, 'settings.json');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Clean up temporary directory
|
||||
if (fs.existsSync(tempDir)) {
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
describe('updateSettingsFilePreservingFormat', () => {
|
||||
it('should preserve comments when updating settings', () => {
|
||||
const originalContent = `{
|
||||
// Model configuration
|
||||
"model": "gemini-2.5-pro",
|
||||
"ui": {
|
||||
// Theme setting
|
||||
"theme": "dark"
|
||||
}
|
||||
}`;
|
||||
|
||||
fs.writeFileSync(testFilePath, originalContent, 'utf-8');
|
||||
|
||||
updateSettingsFilePreservingFormat(testFilePath, {
|
||||
model: 'gemini-2.5-flash',
|
||||
});
|
||||
|
||||
const updatedContent = fs.readFileSync(testFilePath, 'utf-8');
|
||||
|
||||
expect(updatedContent).toContain('// Model configuration');
|
||||
expect(updatedContent).toContain('// Theme setting');
|
||||
expect(updatedContent).toContain('"model": "gemini-2.5-flash"');
|
||||
expect(updatedContent).toContain('"theme": "dark"');
|
||||
});
|
||||
|
||||
it('should handle nested object updates', () => {
|
||||
const originalContent = `{
|
||||
"ui": {
|
||||
"theme": "dark",
|
||||
"showLineNumbers": true
|
||||
}
|
||||
}`;
|
||||
|
||||
fs.writeFileSync(testFilePath, originalContent, 'utf-8');
|
||||
|
||||
updateSettingsFilePreservingFormat(testFilePath, {
|
||||
ui: {
|
||||
theme: 'light',
|
||||
showLineNumbers: true,
|
||||
},
|
||||
});
|
||||
|
||||
const updatedContent = fs.readFileSync(testFilePath, 'utf-8');
|
||||
expect(updatedContent).toContain('"theme": "light"');
|
||||
expect(updatedContent).toContain('"showLineNumbers": true');
|
||||
});
|
||||
|
||||
it('should add new fields while preserving existing structure', () => {
|
||||
const originalContent = `{
|
||||
// Existing config
|
||||
"model": "gemini-2.5-pro"
|
||||
}`;
|
||||
|
||||
fs.writeFileSync(testFilePath, originalContent, 'utf-8');
|
||||
|
||||
updateSettingsFilePreservingFormat(testFilePath, {
|
||||
model: 'gemini-2.5-pro',
|
||||
newField: 'newValue',
|
||||
});
|
||||
|
||||
const updatedContent = fs.readFileSync(testFilePath, 'utf-8');
|
||||
expect(updatedContent).toContain('// Existing config');
|
||||
expect(updatedContent).toContain('"newField": "newValue"');
|
||||
});
|
||||
|
||||
it('should create file if it does not exist', () => {
|
||||
updateSettingsFilePreservingFormat(testFilePath, {
|
||||
model: 'gemini-2.5-pro',
|
||||
});
|
||||
|
||||
expect(fs.existsSync(testFilePath)).toBe(true);
|
||||
const content = fs.readFileSync(testFilePath, 'utf-8');
|
||||
expect(content).toContain('"model": "gemini-2.5-pro"');
|
||||
});
|
||||
|
||||
it('should handle complex real-world scenario', () => {
|
||||
const complexContent = `{
|
||||
// Settings
|
||||
"model": "gemini-2.5-pro",
|
||||
"mcpServers": {
|
||||
// Active server
|
||||
"context7": {
|
||||
"headers": {
|
||||
"API_KEY": "test-key" // API key
|
||||
}
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
fs.writeFileSync(testFilePath, complexContent, 'utf-8');
|
||||
|
||||
updateSettingsFilePreservingFormat(testFilePath, {
|
||||
model: 'gemini-2.5-flash',
|
||||
mcpServers: {
|
||||
context7: {
|
||||
headers: {
|
||||
API_KEY: 'new-test-key',
|
||||
},
|
||||
},
|
||||
},
|
||||
newSection: {
|
||||
setting: 'value',
|
||||
},
|
||||
});
|
||||
|
||||
const updatedContent = fs.readFileSync(testFilePath, 'utf-8');
|
||||
|
||||
// Verify comments preserved
|
||||
expect(updatedContent).toContain('// Settings');
|
||||
expect(updatedContent).toContain('// Active server');
|
||||
expect(updatedContent).toContain('// API key');
|
||||
|
||||
// Verify updates applied
|
||||
expect(updatedContent).toContain('"model": "gemini-2.5-flash"');
|
||||
expect(updatedContent).toContain('"newSection"');
|
||||
expect(updatedContent).toContain('"API_KEY": "new-test-key"');
|
||||
});
|
||||
|
||||
it('should handle corrupted JSON files gracefully', () => {
|
||||
const corruptedContent = `{
|
||||
"model": "gemini-2.5-pro",
|
||||
"ui": {
|
||||
"theme": "dark"
|
||||
// Missing closing brace
|
||||
`;
|
||||
|
||||
fs.writeFileSync(testFilePath, corruptedContent, 'utf-8');
|
||||
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
expect(() => {
|
||||
updateSettingsFilePreservingFormat(testFilePath, {
|
||||
model: 'gemini-2.5-flash',
|
||||
});
|
||||
}).not.toThrow();
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
'Error parsing settings file:',
|
||||
expect.any(Error),
|
||||
);
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
'Settings file may be corrupted. Please check the JSON syntax.',
|
||||
);
|
||||
|
||||
const unchangedContent = fs.readFileSync(testFilePath, 'utf-8');
|
||||
expect(unchangedContent).toBe(corruptedContent);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
});
|
||||
67
packages/cli/src/utils/commentJson.ts
Normal file
67
packages/cli/src/utils/commentJson.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs';
|
||||
import { parse, stringify } from 'comment-json';
|
||||
|
||||
/**
|
||||
* Updates a JSON file while preserving comments and formatting.
|
||||
*/
|
||||
export function updateSettingsFilePreservingFormat(
|
||||
filePath: string,
|
||||
updates: Record<string, unknown>,
|
||||
): void {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
fs.writeFileSync(filePath, JSON.stringify(updates, null, 2), 'utf-8');
|
||||
return;
|
||||
}
|
||||
|
||||
const originalContent = fs.readFileSync(filePath, 'utf-8');
|
||||
|
||||
let parsed: Record<string, unknown>;
|
||||
try {
|
||||
parsed = parse(originalContent) as Record<string, unknown>;
|
||||
} catch (error) {
|
||||
console.error('Error parsing settings file:', error);
|
||||
console.error(
|
||||
'Settings file may be corrupted. Please check the JSON syntax.',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const updatedStructure = applyUpdates(parsed, updates);
|
||||
const updatedContent = stringify(updatedStructure, null, 2);
|
||||
|
||||
fs.writeFileSync(filePath, updatedContent, 'utf-8');
|
||||
}
|
||||
|
||||
function applyUpdates(
|
||||
current: Record<string, unknown>,
|
||||
updates: Record<string, unknown>,
|
||||
): Record<string, unknown> {
|
||||
const result = current;
|
||||
|
||||
for (const key of Object.getOwnPropertyNames(updates)) {
|
||||
const value = updates[key];
|
||||
if (
|
||||
typeof value === 'object' &&
|
||||
value !== null &&
|
||||
!Array.isArray(value) &&
|
||||
typeof result[key] === 'object' &&
|
||||
result[key] !== null &&
|
||||
!Array.isArray(result[key])
|
||||
) {
|
||||
result[key] = applyUpdates(
|
||||
result[key] as Record<string, unknown>,
|
||||
value as Record<string, unknown>,
|
||||
);
|
||||
} else {
|
||||
result[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
163
packages/cli/src/utils/deepMerge.test.ts
Normal file
163
packages/cli/src/utils/deepMerge.test.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { customDeepMerge } from './deepMerge.js';
|
||||
import { MergeStrategy } from '../config/settingsSchema.js';
|
||||
|
||||
describe('customDeepMerge', () => {
|
||||
it('should merge simple objects', () => {
|
||||
const target = { a: 1, b: 2 };
|
||||
const source = { b: 3, c: 4 };
|
||||
const getMergeStrategy = () => undefined;
|
||||
const result = customDeepMerge(getMergeStrategy, target, source);
|
||||
expect(result).toEqual({ a: 1, b: 3, c: 4 });
|
||||
});
|
||||
|
||||
it('should merge nested objects', () => {
|
||||
const target = { a: { x: 1 }, b: 2 };
|
||||
const source = { a: { y: 2 }, c: 3 };
|
||||
const getMergeStrategy = () => undefined;
|
||||
const result = customDeepMerge(getMergeStrategy, target, source);
|
||||
expect(result).toEqual({ a: { x: 1, y: 2 }, b: 2, c: 3 });
|
||||
});
|
||||
|
||||
it('should replace arrays by default', () => {
|
||||
const target = { a: [1, 2] };
|
||||
const source = { a: [3, 4] };
|
||||
const getMergeStrategy = () => undefined;
|
||||
const result = customDeepMerge(getMergeStrategy, target, source);
|
||||
expect(result).toEqual({ a: [3, 4] });
|
||||
});
|
||||
|
||||
it('should concatenate arrays with CONCAT strategy', () => {
|
||||
const target = { a: [1, 2] };
|
||||
const source = { a: [3, 4] };
|
||||
const getMergeStrategy = (path: string[]) =>
|
||||
path.join('.') === 'a' ? MergeStrategy.CONCAT : undefined;
|
||||
const result = customDeepMerge(getMergeStrategy, target, source);
|
||||
expect(result).toEqual({ a: [1, 2, 3, 4] });
|
||||
});
|
||||
|
||||
it('should union arrays with UNION strategy', () => {
|
||||
const target = { a: [1, 2, 3] };
|
||||
const source = { a: [3, 4, 5] };
|
||||
const getMergeStrategy = (path: string[]) =>
|
||||
path.join('.') === 'a' ? MergeStrategy.UNION : undefined;
|
||||
const result = customDeepMerge(getMergeStrategy, target, source);
|
||||
expect(result).toEqual({ a: [1, 2, 3, 4, 5] });
|
||||
});
|
||||
|
||||
it('should shallow merge objects with SHALLOW_MERGE strategy', () => {
|
||||
const target = { a: { x: 1, y: 1 } };
|
||||
const source = { a: { y: 2, z: 2 } };
|
||||
const getMergeStrategy = (path: string[]) =>
|
||||
path.join('.') === 'a' ? MergeStrategy.SHALLOW_MERGE : undefined;
|
||||
const result = customDeepMerge(getMergeStrategy, target, source);
|
||||
// This is still a deep merge, but the properties of the object are merged.
|
||||
expect(result).toEqual({ a: { x: 1, y: 2, z: 2 } });
|
||||
});
|
||||
|
||||
it('should handle multiple source objects', () => {
|
||||
const target = { a: 1 };
|
||||
const source1 = { b: 2 };
|
||||
const source2 = { c: 3 };
|
||||
const getMergeStrategy = () => undefined;
|
||||
const result = customDeepMerge(getMergeStrategy, target, source1, source2);
|
||||
expect(result).toEqual({ a: 1, b: 2, c: 3 });
|
||||
});
|
||||
|
||||
it('should return an empty object if no sources are provided', () => {
|
||||
const getMergeStrategy = () => undefined;
|
||||
const result = customDeepMerge(getMergeStrategy);
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
|
||||
it('should return a deep copy of the first source if only one is provided', () => {
|
||||
const target = { a: { b: 1 } };
|
||||
const getMergeStrategy = () => undefined;
|
||||
const result = customDeepMerge(getMergeStrategy, target);
|
||||
expect(result).toEqual(target);
|
||||
expect(result).not.toBe(target);
|
||||
});
|
||||
|
||||
it('should not mutate the original source objects', () => {
|
||||
const target = { a: { x: 1 }, b: [1, 2] };
|
||||
const source = { a: { y: 2 }, b: [3, 4] };
|
||||
const originalTarget = JSON.parse(JSON.stringify(target));
|
||||
const originalSource = JSON.parse(JSON.stringify(source));
|
||||
const getMergeStrategy = () => undefined;
|
||||
|
||||
customDeepMerge(getMergeStrategy, target, source);
|
||||
|
||||
expect(target).toEqual(originalTarget);
|
||||
expect(source).toEqual(originalSource);
|
||||
});
|
||||
|
||||
it('should not mutate sources when merging multiple levels deep', () => {
|
||||
const s1 = { data: { common: { val: 'from s1' }, s1_only: true } };
|
||||
const s2 = { data: { common: { val: 'from s2' }, s2_only: true } };
|
||||
const s1_original = JSON.parse(JSON.stringify(s1));
|
||||
const s2_original = JSON.parse(JSON.stringify(s2));
|
||||
|
||||
const getMergeStrategy = () => undefined;
|
||||
const result = customDeepMerge(getMergeStrategy, s1, s2);
|
||||
|
||||
expect(s1).toEqual(s1_original);
|
||||
expect(s2).toEqual(s2_original);
|
||||
expect(result).toEqual({
|
||||
data: {
|
||||
common: { val: 'from s2' },
|
||||
s1_only: true,
|
||||
s2_only: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle complex nested strategies', () => {
|
||||
const target = {
|
||||
level1: {
|
||||
arr1: [1, 2],
|
||||
arr2: [1, 2],
|
||||
obj1: { a: 1 },
|
||||
},
|
||||
};
|
||||
const source = {
|
||||
level1: {
|
||||
arr1: [3, 4],
|
||||
arr2: [2, 3],
|
||||
obj1: { b: 2 },
|
||||
},
|
||||
};
|
||||
const getMergeStrategy = (path: string[]) => {
|
||||
const p = path.join('.');
|
||||
if (p === 'level1.arr1') return MergeStrategy.CONCAT;
|
||||
if (p === 'level1.arr2') return MergeStrategy.UNION;
|
||||
if (p === 'level1.obj1') return MergeStrategy.SHALLOW_MERGE;
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const result = customDeepMerge(getMergeStrategy, target, source);
|
||||
|
||||
expect(result).toEqual({
|
||||
level1: {
|
||||
arr1: [1, 2, 3, 4],
|
||||
arr2: [1, 2, 3],
|
||||
obj1: { a: 1, b: 2 },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should not pollute the prototype', () => {
|
||||
const maliciousSource = JSON.parse('{"__proto__": {"polluted": "true"}}');
|
||||
const getMergeStrategy = () => undefined;
|
||||
const result = customDeepMerge(getMergeStrategy, {}, maliciousSource);
|
||||
|
||||
expect(result).toEqual({});
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
expect(({} as any).polluted).toBeUndefined();
|
||||
});
|
||||
});
|
||||
90
packages/cli/src/utils/deepMerge.ts
Normal file
90
packages/cli/src/utils/deepMerge.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { MergeStrategy } from '../config/settingsSchema.js';
|
||||
|
||||
export type Mergeable =
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| null
|
||||
| undefined
|
||||
| object
|
||||
| Mergeable[];
|
||||
|
||||
export type MergeableObject = Record<string, Mergeable>;
|
||||
|
||||
function isPlainObject(item: unknown): item is MergeableObject {
|
||||
return !!item && typeof item === 'object' && !Array.isArray(item);
|
||||
}
|
||||
|
||||
function mergeRecursively(
|
||||
target: MergeableObject,
|
||||
source: MergeableObject,
|
||||
getMergeStrategyForPath: (path: string[]) => MergeStrategy | undefined,
|
||||
path: string[] = [],
|
||||
) {
|
||||
for (const key of Object.keys(source)) {
|
||||
if (key === '__proto__' || key === 'constructor' || key === 'prototype') {
|
||||
continue;
|
||||
}
|
||||
const newPath = [...path, key];
|
||||
const srcValue = source[key];
|
||||
const objValue = target[key];
|
||||
const mergeStrategy = getMergeStrategyForPath(newPath);
|
||||
|
||||
if (mergeStrategy === MergeStrategy.SHALLOW_MERGE && objValue && srcValue) {
|
||||
const obj1 =
|
||||
typeof objValue === 'object' && objValue !== null ? objValue : {};
|
||||
const obj2 =
|
||||
typeof srcValue === 'object' && srcValue !== null ? srcValue : {};
|
||||
target[key] = { ...obj1, ...obj2 };
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(objValue)) {
|
||||
const srcArray = Array.isArray(srcValue) ? srcValue : [srcValue];
|
||||
if (mergeStrategy === MergeStrategy.CONCAT) {
|
||||
target[key] = objValue.concat(srcArray);
|
||||
continue;
|
||||
}
|
||||
if (mergeStrategy === MergeStrategy.UNION) {
|
||||
target[key] = [...new Set(objValue.concat(srcArray))];
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (isPlainObject(objValue) && isPlainObject(srcValue)) {
|
||||
mergeRecursively(objValue, srcValue, getMergeStrategyForPath, newPath);
|
||||
} else if (isPlainObject(srcValue)) {
|
||||
target[key] = {};
|
||||
mergeRecursively(
|
||||
target[key] as MergeableObject,
|
||||
srcValue,
|
||||
getMergeStrategyForPath,
|
||||
newPath,
|
||||
);
|
||||
} else {
|
||||
target[key] = srcValue;
|
||||
}
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
export function customDeepMerge(
|
||||
getMergeStrategyForPath: (path: string[]) => MergeStrategy | undefined,
|
||||
...sources: MergeableObject[]
|
||||
): MergeableObject {
|
||||
const result: MergeableObject = {};
|
||||
|
||||
for (const source of sources) {
|
||||
if (source) {
|
||||
mergeRecursively(result, source, getMergeStrategyForPath);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
297
packages/cli/src/utils/envVarResolver.test.ts
Normal file
297
packages/cli/src/utils/envVarResolver.test.ts
Normal file
@@ -0,0 +1,297 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import {
|
||||
resolveEnvVarsInString,
|
||||
resolveEnvVarsInObject,
|
||||
} from './envVarResolver.js';
|
||||
|
||||
describe('resolveEnvVarsInString', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
it('should resolve $VAR_NAME format', () => {
|
||||
process.env['TEST_VAR'] = 'test-value';
|
||||
|
||||
const result = resolveEnvVarsInString('Value is $TEST_VAR');
|
||||
|
||||
expect(result).toBe('Value is test-value');
|
||||
});
|
||||
|
||||
it('should resolve ${VAR_NAME} format', () => {
|
||||
process.env['TEST_VAR'] = 'test-value';
|
||||
|
||||
const result = resolveEnvVarsInString('Value is ${TEST_VAR}');
|
||||
|
||||
expect(result).toBe('Value is test-value');
|
||||
});
|
||||
|
||||
it('should resolve multiple variables in the same string', () => {
|
||||
process.env['HOST'] = 'localhost';
|
||||
process.env['PORT'] = '3000';
|
||||
|
||||
const result = resolveEnvVarsInString('URL: http://$HOST:${PORT}/api');
|
||||
|
||||
expect(result).toBe('URL: http://localhost:3000/api');
|
||||
});
|
||||
|
||||
it('should leave undefined variables unchanged', () => {
|
||||
const result = resolveEnvVarsInString('Value is $UNDEFINED_VAR');
|
||||
|
||||
expect(result).toBe('Value is $UNDEFINED_VAR');
|
||||
});
|
||||
|
||||
it('should leave undefined variables with braces unchanged', () => {
|
||||
const result = resolveEnvVarsInString('Value is ${UNDEFINED_VAR}');
|
||||
|
||||
expect(result).toBe('Value is ${UNDEFINED_VAR}');
|
||||
});
|
||||
|
||||
it('should handle empty string', () => {
|
||||
const result = resolveEnvVarsInString('');
|
||||
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
it('should handle string without variables', () => {
|
||||
const result = resolveEnvVarsInString('No variables here');
|
||||
|
||||
expect(result).toBe('No variables here');
|
||||
});
|
||||
|
||||
it('should handle mixed defined and undefined variables', () => {
|
||||
process.env['DEFINED'] = 'value';
|
||||
|
||||
const result = resolveEnvVarsInString('$DEFINED and $UNDEFINED mixed');
|
||||
|
||||
expect(result).toBe('value and $UNDEFINED mixed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveEnvVarsInObject', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
it('should resolve variables in nested objects', () => {
|
||||
process.env['API_KEY'] = 'secret-123';
|
||||
process.env['DB_URL'] = 'postgresql://localhost/test';
|
||||
|
||||
const config = {
|
||||
server: {
|
||||
auth: {
|
||||
key: '$API_KEY',
|
||||
},
|
||||
database: '${DB_URL}',
|
||||
},
|
||||
port: 3000,
|
||||
};
|
||||
|
||||
const result = resolveEnvVarsInObject(config);
|
||||
|
||||
expect(result).toEqual({
|
||||
server: {
|
||||
auth: {
|
||||
key: 'secret-123',
|
||||
},
|
||||
database: 'postgresql://localhost/test',
|
||||
},
|
||||
port: 3000,
|
||||
});
|
||||
});
|
||||
|
||||
it('should resolve variables in arrays', () => {
|
||||
process.env['ENV'] = 'production';
|
||||
process.env['VERSION'] = '1.0.0';
|
||||
|
||||
const config = {
|
||||
tags: ['$ENV', 'app', '${VERSION}'],
|
||||
metadata: {
|
||||
env: '$ENV',
|
||||
},
|
||||
};
|
||||
|
||||
const result = resolveEnvVarsInObject(config);
|
||||
|
||||
expect(result).toEqual({
|
||||
tags: ['production', 'app', '1.0.0'],
|
||||
metadata: {
|
||||
env: 'production',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should preserve non-string types', () => {
|
||||
const config = {
|
||||
enabled: true,
|
||||
count: 42,
|
||||
value: null,
|
||||
data: undefined,
|
||||
tags: ['item1', 'item2'],
|
||||
};
|
||||
|
||||
const result = resolveEnvVarsInObject(config);
|
||||
|
||||
expect(result).toEqual(config);
|
||||
});
|
||||
|
||||
it('should handle MCP server config structure', () => {
|
||||
process.env['API_TOKEN'] = 'token-123';
|
||||
process.env['SERVER_PORT'] = '8080';
|
||||
|
||||
const extensionConfig = {
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
command: 'node',
|
||||
args: ['server.js', '--port', '${SERVER_PORT}'],
|
||||
env: {
|
||||
API_KEY: '$API_TOKEN',
|
||||
STATIC_VALUE: 'unchanged',
|
||||
},
|
||||
timeout: 5000,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = resolveEnvVarsInObject(extensionConfig);
|
||||
|
||||
expect(result).toEqual({
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
command: 'node',
|
||||
args: ['server.js', '--port', '8080'],
|
||||
env: {
|
||||
API_KEY: 'token-123',
|
||||
STATIC_VALUE: 'unchanged',
|
||||
},
|
||||
timeout: 5000,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle empty and null values', () => {
|
||||
const config = {
|
||||
empty: '',
|
||||
nullValue: null,
|
||||
undefinedValue: undefined,
|
||||
zero: 0,
|
||||
false: false,
|
||||
};
|
||||
|
||||
const result = resolveEnvVarsInObject(config);
|
||||
|
||||
expect(result).toEqual(config);
|
||||
});
|
||||
|
||||
it('should handle circular references in objects without infinite recursion', () => {
|
||||
process.env['TEST_VAR'] = 'resolved-value';
|
||||
|
||||
type ConfigWithCircularRef = {
|
||||
name: string;
|
||||
value: number;
|
||||
self?: ConfigWithCircularRef;
|
||||
};
|
||||
|
||||
const config: ConfigWithCircularRef = {
|
||||
name: '$TEST_VAR',
|
||||
value: 42,
|
||||
};
|
||||
// Create circular reference
|
||||
config.self = config;
|
||||
|
||||
const result = resolveEnvVarsInObject(config);
|
||||
|
||||
expect(result.name).toBe('resolved-value');
|
||||
expect(result.value).toBe(42);
|
||||
expect(result.self).toBeDefined();
|
||||
expect(result.self?.name).toBe('$TEST_VAR'); // Circular reference should be shallow copied
|
||||
expect(result.self?.value).toBe(42);
|
||||
// Verify it doesn't create infinite recursion by checking it's not the same object
|
||||
expect(result.self).not.toBe(result);
|
||||
});
|
||||
|
||||
it('should handle circular references in arrays without infinite recursion', () => {
|
||||
process.env['ARRAY_VAR'] = 'array-value';
|
||||
|
||||
type ArrayWithCircularRef = Array<string | number | ArrayWithCircularRef>;
|
||||
const arr: ArrayWithCircularRef = ['$ARRAY_VAR', 123];
|
||||
// Create circular reference
|
||||
arr.push(arr);
|
||||
|
||||
const result = resolveEnvVarsInObject(arr) as ArrayWithCircularRef;
|
||||
|
||||
expect(result[0]).toBe('array-value');
|
||||
expect(result[1]).toBe(123);
|
||||
expect(Array.isArray(result[2])).toBe(true);
|
||||
const subArray = result[2] as ArrayWithCircularRef;
|
||||
expect(subArray[0]).toBe('$ARRAY_VAR'); // Circular reference should be shallow copied
|
||||
expect(subArray[1]).toBe(123);
|
||||
// Verify it doesn't create infinite recursion
|
||||
expect(result[2]).not.toBe(result);
|
||||
});
|
||||
|
||||
it('should handle complex nested circular references', () => {
|
||||
process.env['NESTED_VAR'] = 'nested-resolved';
|
||||
|
||||
type ObjWithRef = {
|
||||
name: string;
|
||||
id: number;
|
||||
ref?: ObjWithRef;
|
||||
};
|
||||
|
||||
const obj1: ObjWithRef = { name: '$NESTED_VAR', id: 1 };
|
||||
const obj2: ObjWithRef = { name: 'static', id: 2 };
|
||||
|
||||
// Create cross-references
|
||||
obj1.ref = obj2;
|
||||
obj2.ref = obj1;
|
||||
|
||||
const config = {
|
||||
primary: obj1,
|
||||
secondary: obj2,
|
||||
value: '$NESTED_VAR',
|
||||
};
|
||||
|
||||
const result = resolveEnvVarsInObject(config);
|
||||
|
||||
expect(result.value).toBe('nested-resolved');
|
||||
expect(result.primary.name).toBe('nested-resolved');
|
||||
expect(result.primary.id).toBe(1);
|
||||
expect(result.secondary.name).toBe('static');
|
||||
expect(result.secondary.id).toBe(2);
|
||||
|
||||
// Check that circular references are handled (shallow copied)
|
||||
expect(result.primary.ref).toBeDefined();
|
||||
expect(result.secondary.ref).toBeDefined();
|
||||
expect(result.primary.ref?.name).toBe('static'); // Should be shallow copy
|
||||
expect(result.secondary.ref?.name).toBe('nested-resolved'); // The shallow copy still gets processed
|
||||
|
||||
// Most importantly: verify no infinite recursion by checking objects are different
|
||||
expect(result.primary.ref).not.toBe(result.secondary);
|
||||
expect(result.secondary.ref).not.toBe(result.primary);
|
||||
expect(result.primary).not.toBe(obj1); // New object created
|
||||
expect(result.secondary).not.toBe(obj2); // New object created
|
||||
});
|
||||
});
|
||||
112
packages/cli/src/utils/envVarResolver.ts
Normal file
112
packages/cli/src/utils/envVarResolver.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Resolves environment variables in a string.
|
||||
* Replaces $VAR_NAME and ${VAR_NAME} with their corresponding environment variable values.
|
||||
* If the environment variable is not defined, the original placeholder is preserved.
|
||||
*
|
||||
* @param value - The string that may contain environment variable placeholders
|
||||
* @returns The string with environment variables resolved
|
||||
*
|
||||
* @example
|
||||
* resolveEnvVarsInString("Token: $API_KEY") // Returns "Token: secret-123"
|
||||
* resolveEnvVarsInString("URL: ${BASE_URL}/api") // Returns "URL: https://api.example.com/api"
|
||||
* resolveEnvVarsInString("Missing: $UNDEFINED_VAR") // Returns "Missing: $UNDEFINED_VAR"
|
||||
*/
|
||||
export function resolveEnvVarsInString(value: string): string {
|
||||
const envVarRegex = /\$(?:(\w+)|{([^}]+)})/g; // Find $VAR_NAME or ${VAR_NAME}
|
||||
return value.replace(envVarRegex, (match, varName1, varName2) => {
|
||||
const varName = varName1 || varName2;
|
||||
if (process && process.env && typeof process.env[varName] === 'string') {
|
||||
return process.env[varName]!;
|
||||
}
|
||||
return match;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively resolves environment variables in an object of any type.
|
||||
* Handles strings, arrays, nested objects, and preserves other primitive types.
|
||||
* Protected against circular references using a WeakSet to track visited objects.
|
||||
*
|
||||
* @param obj - The object to process for environment variable resolution
|
||||
* @returns A new object with environment variables resolved
|
||||
*
|
||||
* @example
|
||||
* const config = {
|
||||
* server: {
|
||||
* host: "$HOST",
|
||||
* port: "${PORT}",
|
||||
* enabled: true,
|
||||
* tags: ["$ENV", "api"]
|
||||
* }
|
||||
* };
|
||||
* const resolved = resolveEnvVarsInObject(config);
|
||||
*/
|
||||
export function resolveEnvVarsInObject<T>(obj: T): T {
|
||||
return resolveEnvVarsInObjectInternal(obj, new WeakSet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal implementation of resolveEnvVarsInObject with circular reference protection.
|
||||
*
|
||||
* @param obj - The object to process
|
||||
* @param visited - WeakSet to track visited objects and prevent circular references
|
||||
* @returns A new object with environment variables resolved
|
||||
*/
|
||||
function resolveEnvVarsInObjectInternal<T>(
|
||||
obj: T,
|
||||
visited: WeakSet<object>,
|
||||
): T {
|
||||
if (
|
||||
obj === null ||
|
||||
obj === undefined ||
|
||||
typeof obj === 'boolean' ||
|
||||
typeof obj === 'number'
|
||||
) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (typeof obj === 'string') {
|
||||
return resolveEnvVarsInString(obj) as unknown as T;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
// Check for circular reference
|
||||
if (visited.has(obj)) {
|
||||
// Return a shallow copy to break the cycle
|
||||
return [...obj] as unknown as T;
|
||||
}
|
||||
|
||||
visited.add(obj);
|
||||
const result = obj.map((item) =>
|
||||
resolveEnvVarsInObjectInternal(item, visited),
|
||||
) as unknown as T;
|
||||
visited.delete(obj);
|
||||
return result;
|
||||
}
|
||||
|
||||
if (typeof obj === 'object') {
|
||||
// Check for circular reference
|
||||
if (visited.has(obj as object)) {
|
||||
// Return a shallow copy to break the cycle
|
||||
return { ...obj } as T;
|
||||
}
|
||||
|
||||
visited.add(obj as object);
|
||||
const newObj = { ...obj } as T;
|
||||
for (const key in newObj) {
|
||||
if (Object.prototype.hasOwnProperty.call(newObj, key)) {
|
||||
newObj[key] = resolveEnvVarsInObjectInternal(newObj[key], visited);
|
||||
}
|
||||
}
|
||||
visited.delete(obj as object);
|
||||
return newObj;
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
476
packages/cli/src/utils/errors.test.ts
Normal file
476
packages/cli/src/utils/errors.test.ts
Normal file
@@ -0,0 +1,476 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi, type MockInstance } from 'vitest';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import { OutputFormat, FatalInputError } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
getErrorMessage,
|
||||
handleError,
|
||||
handleToolError,
|
||||
handleCancellationError,
|
||||
handleMaxTurnsExceededError,
|
||||
} from './errors.js';
|
||||
|
||||
// Mock the core modules
|
||||
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
|
||||
const original =
|
||||
await importOriginal<typeof import('@qwen-code/qwen-code-core')>();
|
||||
|
||||
return {
|
||||
...original,
|
||||
parseAndFormatApiError: vi.fn((error: unknown) => {
|
||||
if (error instanceof Error) {
|
||||
return `API Error: ${error.message}`;
|
||||
}
|
||||
return `API Error: ${String(error)}`;
|
||||
}),
|
||||
JsonFormatter: vi.fn().mockImplementation(() => ({
|
||||
formatError: vi.fn((error: Error, code?: string | number) =>
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: error.constructor.name,
|
||||
message: error.message,
|
||||
...(code && { code }),
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
),
|
||||
})),
|
||||
FatalToolExecutionError: class extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'FatalToolExecutionError';
|
||||
this.exitCode = 54;
|
||||
}
|
||||
exitCode: number;
|
||||
},
|
||||
FatalCancellationError: class extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'FatalCancellationError';
|
||||
this.exitCode = 130;
|
||||
}
|
||||
exitCode: number;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
describe('errors', () => {
|
||||
let mockConfig: Config;
|
||||
let processExitSpy: MockInstance;
|
||||
let consoleErrorSpy: MockInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset mocks
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Mock console.error
|
||||
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
|
||||
// Mock process.exit to throw instead of actually exiting
|
||||
processExitSpy = vi.spyOn(process, 'exit').mockImplementation((code) => {
|
||||
throw new Error(`process.exit called with code: ${code}`);
|
||||
});
|
||||
|
||||
// Create mock config
|
||||
mockConfig = {
|
||||
getOutputFormat: vi.fn().mockReturnValue(OutputFormat.TEXT),
|
||||
getContentGeneratorConfig: vi.fn().mockReturnValue({ authType: 'test' }),
|
||||
} as unknown as Config;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
consoleErrorSpy.mockRestore();
|
||||
processExitSpy.mockRestore();
|
||||
});
|
||||
|
||||
describe('getErrorMessage', () => {
|
||||
it('should return error message for Error instances', () => {
|
||||
const error = new Error('Test error message');
|
||||
expect(getErrorMessage(error)).toBe('Test error message');
|
||||
});
|
||||
|
||||
it('should convert non-Error values to strings', () => {
|
||||
expect(getErrorMessage('string error')).toBe('string error');
|
||||
expect(getErrorMessage(123)).toBe('123');
|
||||
expect(getErrorMessage(null)).toBe('null');
|
||||
expect(getErrorMessage(undefined)).toBe('undefined');
|
||||
});
|
||||
|
||||
it('should handle objects', () => {
|
||||
const obj = { message: 'test' };
|
||||
expect(getErrorMessage(obj)).toBe('[object Object]');
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleError', () => {
|
||||
describe('in text mode', () => {
|
||||
beforeEach(() => {
|
||||
(
|
||||
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
|
||||
).mockReturnValue(OutputFormat.TEXT);
|
||||
});
|
||||
|
||||
it('should log error message and re-throw', () => {
|
||||
const testError = new Error('Test error');
|
||||
|
||||
expect(() => {
|
||||
handleError(testError, mockConfig);
|
||||
}).toThrow(testError);
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith('API Error: Test error');
|
||||
});
|
||||
|
||||
it('should handle non-Error objects', () => {
|
||||
const testError = 'String error';
|
||||
|
||||
expect(() => {
|
||||
handleError(testError, mockConfig);
|
||||
}).toThrow(testError);
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith('API Error: String error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('in JSON mode', () => {
|
||||
beforeEach(() => {
|
||||
(
|
||||
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
|
||||
).mockReturnValue(OutputFormat.JSON);
|
||||
});
|
||||
|
||||
it('should format error as JSON and exit with default code', () => {
|
||||
const testError = new Error('Test error');
|
||||
|
||||
expect(() => {
|
||||
handleError(testError, mockConfig);
|
||||
}).toThrow('process.exit called with code: 1');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: 'Error',
|
||||
message: 'Test error',
|
||||
code: 1,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use custom error code when provided', () => {
|
||||
const testError = new Error('Test error');
|
||||
|
||||
expect(() => {
|
||||
handleError(testError, mockConfig, 42);
|
||||
}).toThrow('process.exit called with code: 42');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: 'Error',
|
||||
message: 'Test error',
|
||||
code: 42,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should extract exitCode from FatalError instances', () => {
|
||||
const fatalError = new FatalInputError('Fatal error');
|
||||
|
||||
expect(() => {
|
||||
handleError(fatalError, mockConfig);
|
||||
}).toThrow('process.exit called with code: 42');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: 'FatalInputError',
|
||||
message: 'Fatal error',
|
||||
code: 42,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle error with code property', () => {
|
||||
const errorWithCode = new Error('Error with code') as Error & {
|
||||
code: number;
|
||||
};
|
||||
errorWithCode.code = 404;
|
||||
|
||||
expect(() => {
|
||||
handleError(errorWithCode, mockConfig);
|
||||
}).toThrow('process.exit called with code: 404');
|
||||
});
|
||||
|
||||
it('should handle error with status property', () => {
|
||||
const errorWithStatus = new Error('Error with status') as Error & {
|
||||
status: string;
|
||||
};
|
||||
errorWithStatus.status = 'TIMEOUT';
|
||||
|
||||
expect(() => {
|
||||
handleError(errorWithStatus, mockConfig);
|
||||
}).toThrow('process.exit called with code: 1'); // string codes become 1
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: 'Error',
|
||||
message: 'Error with status',
|
||||
code: 'TIMEOUT',
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleToolError', () => {
|
||||
const toolName = 'test-tool';
|
||||
const toolError = new Error('Tool failed');
|
||||
|
||||
describe('in text mode', () => {
|
||||
beforeEach(() => {
|
||||
(
|
||||
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
|
||||
).mockReturnValue(OutputFormat.TEXT);
|
||||
});
|
||||
|
||||
it('should log error message to stderr', () => {
|
||||
handleToolError(toolName, toolError, mockConfig);
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'Error executing tool test-tool: Tool failed',
|
||||
);
|
||||
});
|
||||
|
||||
it('should use resultDisplay when provided', () => {
|
||||
handleToolError(
|
||||
toolName,
|
||||
toolError,
|
||||
mockConfig,
|
||||
'CUSTOM_ERROR',
|
||||
'Custom display message',
|
||||
);
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'Error executing tool test-tool: Custom display message',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('in JSON mode', () => {
|
||||
beforeEach(() => {
|
||||
(
|
||||
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
|
||||
).mockReturnValue(OutputFormat.JSON);
|
||||
});
|
||||
|
||||
it('should format error as JSON and exit with default code', () => {
|
||||
expect(() => {
|
||||
handleToolError(toolName, toolError, mockConfig);
|
||||
}).toThrow('process.exit called with code: 54');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: 'FatalToolExecutionError',
|
||||
message: 'Error executing tool test-tool: Tool failed',
|
||||
code: 54,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use custom error code', () => {
|
||||
expect(() => {
|
||||
handleToolError(toolName, toolError, mockConfig, 'CUSTOM_TOOL_ERROR');
|
||||
}).toThrow('process.exit called with code: 54');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: 'FatalToolExecutionError',
|
||||
message: 'Error executing tool test-tool: Tool failed',
|
||||
code: 'CUSTOM_TOOL_ERROR',
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use numeric error code and exit with that code', () => {
|
||||
expect(() => {
|
||||
handleToolError(toolName, toolError, mockConfig, 500);
|
||||
}).toThrow('process.exit called with code: 500');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: 'FatalToolExecutionError',
|
||||
message: 'Error executing tool test-tool: Tool failed',
|
||||
code: 500,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should prefer resultDisplay over error message', () => {
|
||||
expect(() => {
|
||||
handleToolError(
|
||||
toolName,
|
||||
toolError,
|
||||
mockConfig,
|
||||
'DISPLAY_ERROR',
|
||||
'Display message',
|
||||
);
|
||||
}).toThrow('process.exit called with code: 54');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: 'FatalToolExecutionError',
|
||||
message: 'Error executing tool test-tool: Display message',
|
||||
code: 'DISPLAY_ERROR',
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleCancellationError', () => {
|
||||
describe('in text mode', () => {
|
||||
beforeEach(() => {
|
||||
(
|
||||
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
|
||||
).mockReturnValue(OutputFormat.TEXT);
|
||||
});
|
||||
|
||||
it('should log cancellation message and exit with 130', () => {
|
||||
expect(() => {
|
||||
handleCancellationError(mockConfig);
|
||||
}).toThrow('process.exit called with code: 130');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith('Operation cancelled.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('in JSON mode', () => {
|
||||
beforeEach(() => {
|
||||
(
|
||||
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
|
||||
).mockReturnValue(OutputFormat.JSON);
|
||||
});
|
||||
|
||||
it('should format cancellation as JSON and exit with 130', () => {
|
||||
expect(() => {
|
||||
handleCancellationError(mockConfig);
|
||||
}).toThrow('process.exit called with code: 130');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: 'FatalCancellationError',
|
||||
message: 'Operation cancelled.',
|
||||
code: 130,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleMaxTurnsExceededError', () => {
|
||||
describe('in text mode', () => {
|
||||
beforeEach(() => {
|
||||
(
|
||||
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
|
||||
).mockReturnValue(OutputFormat.TEXT);
|
||||
});
|
||||
|
||||
it('should log max turns message and exit with 53', () => {
|
||||
expect(() => {
|
||||
handleMaxTurnsExceededError(mockConfig);
|
||||
}).toThrow('process.exit called with code: 53');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'Reached max session turns for this session. Increase the number of turns by specifying maxSessionTurns in settings.json.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('in JSON mode', () => {
|
||||
beforeEach(() => {
|
||||
(
|
||||
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
|
||||
).mockReturnValue(OutputFormat.JSON);
|
||||
});
|
||||
|
||||
it('should format max turns error as JSON and exit with 53', () => {
|
||||
expect(() => {
|
||||
handleMaxTurnsExceededError(mockConfig);
|
||||
}).toThrow('process.exit called with code: 53');
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
JSON.stringify(
|
||||
{
|
||||
error: {
|
||||
type: 'FatalTurnLimitedError',
|
||||
message:
|
||||
'Reached max session turns for this session. Increase the number of turns by specifying maxSessionTurns in settings.json.',
|
||||
code: 53,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -4,9 +4,159 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
OutputFormat,
|
||||
JsonFormatter,
|
||||
parseAndFormatApiError,
|
||||
FatalTurnLimitedError,
|
||||
FatalToolExecutionError,
|
||||
FatalCancellationError,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
|
||||
export function getErrorMessage(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
return String(error);
|
||||
}
|
||||
|
||||
interface ErrorWithCode extends Error {
|
||||
exitCode?: number;
|
||||
code?: string | number;
|
||||
status?: string | number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the appropriate error code from an error object.
|
||||
*/
|
||||
function extractErrorCode(error: unknown): string | number {
|
||||
const errorWithCode = error as ErrorWithCode;
|
||||
|
||||
// Prioritize exitCode for FatalError types, fall back to other codes
|
||||
if (typeof errorWithCode.exitCode === 'number') {
|
||||
return errorWithCode.exitCode;
|
||||
}
|
||||
if (errorWithCode.code !== undefined) {
|
||||
return errorWithCode.code;
|
||||
}
|
||||
if (errorWithCode.status !== undefined) {
|
||||
return errorWithCode.status;
|
||||
}
|
||||
|
||||
return 1; // Default exit code
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an error code to a numeric exit code.
|
||||
*/
|
||||
function getNumericExitCode(errorCode: string | number): number {
|
||||
return typeof errorCode === 'number' ? errorCode : 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles errors consistently for both JSON and text output formats.
|
||||
* In JSON mode, outputs formatted JSON error and exits.
|
||||
* In text mode, outputs error message and re-throws.
|
||||
*/
|
||||
export function handleError(
|
||||
error: unknown,
|
||||
config: Config,
|
||||
customErrorCode?: string | number,
|
||||
): never {
|
||||
const errorMessage = parseAndFormatApiError(
|
||||
error,
|
||||
config.getContentGeneratorConfig()?.authType,
|
||||
);
|
||||
|
||||
if (config.getOutputFormat() === OutputFormat.JSON) {
|
||||
const formatter = new JsonFormatter();
|
||||
const errorCode = customErrorCode ?? extractErrorCode(error);
|
||||
|
||||
const formattedError = formatter.formatError(
|
||||
error instanceof Error ? error : new Error(getErrorMessage(error)),
|
||||
errorCode,
|
||||
);
|
||||
|
||||
console.error(formattedError);
|
||||
process.exit(getNumericExitCode(errorCode));
|
||||
} else {
|
||||
console.error(errorMessage);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles tool execution errors specifically.
|
||||
* In JSON mode, outputs formatted JSON error and exits.
|
||||
* In text mode, outputs error message to stderr only.
|
||||
*/
|
||||
export function handleToolError(
|
||||
toolName: string,
|
||||
toolError: Error,
|
||||
config: Config,
|
||||
errorCode?: string | number,
|
||||
resultDisplay?: string,
|
||||
): void {
|
||||
const errorMessage = `Error executing tool ${toolName}: ${resultDisplay || toolError.message}`;
|
||||
const toolExecutionError = new FatalToolExecutionError(errorMessage);
|
||||
|
||||
if (config.getOutputFormat() === OutputFormat.JSON) {
|
||||
const formatter = new JsonFormatter();
|
||||
const formattedError = formatter.formatError(
|
||||
toolExecutionError,
|
||||
errorCode ?? toolExecutionError.exitCode,
|
||||
);
|
||||
|
||||
console.error(formattedError);
|
||||
process.exit(
|
||||
typeof errorCode === 'number' ? errorCode : toolExecutionError.exitCode,
|
||||
);
|
||||
} else {
|
||||
console.error(errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles cancellation/abort signals consistently.
|
||||
*/
|
||||
export function handleCancellationError(config: Config): never {
|
||||
const cancellationError = new FatalCancellationError('Operation cancelled.');
|
||||
|
||||
if (config.getOutputFormat() === OutputFormat.JSON) {
|
||||
const formatter = new JsonFormatter();
|
||||
const formattedError = formatter.formatError(
|
||||
cancellationError,
|
||||
cancellationError.exitCode,
|
||||
);
|
||||
|
||||
console.error(formattedError);
|
||||
process.exit(cancellationError.exitCode);
|
||||
} else {
|
||||
console.error(cancellationError.message);
|
||||
process.exit(cancellationError.exitCode);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles max session turns exceeded consistently.
|
||||
*/
|
||||
export function handleMaxTurnsExceededError(config: Config): never {
|
||||
const maxTurnsError = new FatalTurnLimitedError(
|
||||
'Reached max session turns for this session. Increase the number of turns by specifying maxSessionTurns in settings.json.',
|
||||
);
|
||||
|
||||
if (config.getOutputFormat() === OutputFormat.JSON) {
|
||||
const formatter = new JsonFormatter();
|
||||
const formattedError = formatter.formatError(
|
||||
maxTurnsError,
|
||||
maxTurnsError.exitCode,
|
||||
);
|
||||
|
||||
console.error(formattedError);
|
||||
process.exit(maxTurnsError.exitCode);
|
||||
} else {
|
||||
console.error(maxTurnsError.message);
|
||||
process.exit(maxTurnsError.exitCode);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import { EventEmitter } from 'node:events';
|
||||
export enum AppEvent {
|
||||
OpenDebugConsole = 'open-debug-console',
|
||||
LogError = 'log-error',
|
||||
OauthDisplayMessage = 'oauth-display-message',
|
||||
}
|
||||
|
||||
export const appEvents = new EventEmitter();
|
||||
|
||||
15
packages/cli/src/utils/math.ts
Normal file
15
packages/cli/src/utils/math.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Linearly interpolates between two values.
|
||||
*
|
||||
* @param start The start value.
|
||||
* @param end The end value.
|
||||
* @param t The interpolation amount (typically between 0 and 1).
|
||||
*/
|
||||
export const lerp = (start: number, end: number, t: number): number =>
|
||||
start + (end - start) * t;
|
||||
22
packages/cli/src/utils/processUtils.test.ts
Normal file
22
packages/cli/src/utils/processUtils.test.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi } from 'vitest';
|
||||
import { RELAUNCH_EXIT_CODE, relaunchApp } from './processUtils.js';
|
||||
import * as cleanup from './cleanup.js';
|
||||
|
||||
describe('processUtils', () => {
|
||||
const processExit = vi
|
||||
.spyOn(process, 'exit')
|
||||
.mockReturnValue(undefined as never);
|
||||
const runExitCleanup = vi.spyOn(cleanup, 'runExitCleanup');
|
||||
|
||||
it('should run cleanup and exit with the relaunch code', async () => {
|
||||
await relaunchApp();
|
||||
expect(runExitCleanup).toHaveBeenCalledTimes(1);
|
||||
expect(processExit).toHaveBeenCalledWith(RELAUNCH_EXIT_CODE);
|
||||
});
|
||||
});
|
||||
20
packages/cli/src/utils/processUtils.ts
Normal file
20
packages/cli/src/utils/processUtils.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { runExitCleanup } from './cleanup.js';
|
||||
|
||||
/**
|
||||
* Exit code used to signal that the CLI should be relaunched.
|
||||
*/
|
||||
export const RELAUNCH_EXIT_CODE = 42;
|
||||
|
||||
/**
|
||||
* Exits the process with a special code to signal that the parent process should relaunch it.
|
||||
*/
|
||||
export async function relaunchApp(): Promise<void> {
|
||||
await runExitCleanup();
|
||||
process.exit(RELAUNCH_EXIT_CODE);
|
||||
}
|
||||
345
packages/cli/src/utils/relaunch.test.ts
Normal file
345
packages/cli/src/utils/relaunch.test.ts
Normal file
@@ -0,0 +1,345 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import {
|
||||
vi,
|
||||
describe,
|
||||
it,
|
||||
expect,
|
||||
beforeEach,
|
||||
afterEach,
|
||||
type MockInstance,
|
||||
} from 'vitest';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { RELAUNCH_EXIT_CODE } from './processUtils.js';
|
||||
import type { ChildProcess } from 'node:child_process';
|
||||
import { spawn } from 'node:child_process';
|
||||
|
||||
vi.mock('node:child_process', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('node:child_process')>();
|
||||
return {
|
||||
...actual,
|
||||
spawn: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
const mockedSpawn = vi.mocked(spawn);
|
||||
|
||||
// Import the functions initially
|
||||
import { relaunchAppInChildProcess, relaunchOnExitCode } from './relaunch.js';
|
||||
|
||||
describe('relaunchOnExitCode', () => {
|
||||
let processExitSpy: MockInstance;
|
||||
let consoleErrorSpy: MockInstance;
|
||||
let stdinResumeSpy: MockInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
processExitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('PROCESS_EXIT_CALLED');
|
||||
});
|
||||
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
stdinResumeSpy = vi
|
||||
.spyOn(process.stdin, 'resume')
|
||||
.mockImplementation(() => process.stdin);
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
processExitSpy.mockRestore();
|
||||
consoleErrorSpy.mockRestore();
|
||||
stdinResumeSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should exit with non-RELAUNCH_EXIT_CODE', async () => {
|
||||
const runner = vi.fn().mockResolvedValue(0);
|
||||
|
||||
await expect(relaunchOnExitCode(runner)).rejects.toThrow(
|
||||
'PROCESS_EXIT_CALLED',
|
||||
);
|
||||
|
||||
expect(runner).toHaveBeenCalledTimes(1);
|
||||
expect(processExitSpy).toHaveBeenCalledWith(0);
|
||||
});
|
||||
|
||||
it('should continue running when RELAUNCH_EXIT_CODE is returned', async () => {
|
||||
let callCount = 0;
|
||||
const runner = vi.fn().mockImplementation(async () => {
|
||||
callCount++;
|
||||
if (callCount === 1) return RELAUNCH_EXIT_CODE;
|
||||
if (callCount === 2) return RELAUNCH_EXIT_CODE;
|
||||
return 0; // Exit on third call
|
||||
});
|
||||
|
||||
await expect(relaunchOnExitCode(runner)).rejects.toThrow(
|
||||
'PROCESS_EXIT_CALLED',
|
||||
);
|
||||
|
||||
expect(runner).toHaveBeenCalledTimes(3);
|
||||
expect(processExitSpy).toHaveBeenCalledWith(0);
|
||||
});
|
||||
|
||||
it('should handle runner errors', async () => {
|
||||
const error = new Error('Runner failed');
|
||||
const runner = vi.fn().mockRejectedValue(error);
|
||||
|
||||
await expect(relaunchOnExitCode(runner)).rejects.toThrow(
|
||||
'PROCESS_EXIT_CALLED',
|
||||
);
|
||||
|
||||
expect(runner).toHaveBeenCalledTimes(1);
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'Fatal error: Failed to relaunch the CLI process.',
|
||||
error,
|
||||
);
|
||||
expect(stdinResumeSpy).toHaveBeenCalled();
|
||||
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('relaunchAppInChildProcess', () => {
|
||||
let processExitSpy: MockInstance;
|
||||
let consoleErrorSpy: MockInstance;
|
||||
let stdinPauseSpy: MockInstance;
|
||||
let stdinResumeSpy: MockInstance;
|
||||
|
||||
// Store original values to restore later
|
||||
const originalEnv = { ...process.env };
|
||||
const originalExecArgv = [...process.execArgv];
|
||||
const originalArgv = [...process.argv];
|
||||
const originalExecPath = process.execPath;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
process.env = { ...originalEnv };
|
||||
delete process.env['GEMINI_CLI_NO_RELAUNCH'];
|
||||
|
||||
process.execArgv = [...originalExecArgv];
|
||||
process.argv = [...originalArgv];
|
||||
process.execPath = '/usr/bin/node';
|
||||
|
||||
processExitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('PROCESS_EXIT_CALLED');
|
||||
});
|
||||
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
stdinPauseSpy = vi
|
||||
.spyOn(process.stdin, 'pause')
|
||||
.mockImplementation(() => process.stdin);
|
||||
stdinResumeSpy = vi
|
||||
.spyOn(process.stdin, 'resume')
|
||||
.mockImplementation(() => process.stdin);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = { ...originalEnv };
|
||||
process.execArgv = [...originalExecArgv];
|
||||
process.argv = [...originalArgv];
|
||||
process.execPath = originalExecPath;
|
||||
|
||||
processExitSpy.mockRestore();
|
||||
consoleErrorSpy.mockRestore();
|
||||
stdinPauseSpy.mockRestore();
|
||||
stdinResumeSpy.mockRestore();
|
||||
});
|
||||
|
||||
describe('when GEMINI_CLI_NO_RELAUNCH is set', () => {
|
||||
it('should return early without spawning a child process', async () => {
|
||||
process.env['GEMINI_CLI_NO_RELAUNCH'] = 'true';
|
||||
|
||||
await relaunchAppInChildProcess(['--test'], ['--verbose']);
|
||||
|
||||
expect(mockedSpawn).not.toHaveBeenCalled();
|
||||
expect(processExitSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when GEMINI_CLI_NO_RELAUNCH is not set', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env['GEMINI_CLI_NO_RELAUNCH'];
|
||||
});
|
||||
|
||||
it('should construct correct node arguments from execArgv, additionalNodeArgs, script, additionalScriptArgs, and argv', () => {
|
||||
// Test the argument construction logic directly by extracting it into a testable function
|
||||
// This tests the same logic that's used in relaunchAppInChildProcess
|
||||
|
||||
// Setup test data to verify argument ordering
|
||||
const mockExecArgv = ['--inspect=9229', '--trace-warnings'];
|
||||
const mockArgv = [
|
||||
'/usr/bin/node',
|
||||
'/path/to/cli.js',
|
||||
'command',
|
||||
'--flag=value',
|
||||
'--verbose',
|
||||
];
|
||||
const additionalNodeArgs = [
|
||||
'--max-old-space-size=4096',
|
||||
'--experimental-modules',
|
||||
];
|
||||
const additionalScriptArgs = ['--model', 'gemini-1.5-pro', '--debug'];
|
||||
|
||||
// Extract the argument construction logic from relaunchAppInChildProcess
|
||||
const script = mockArgv[1];
|
||||
const scriptArgs = mockArgv.slice(2);
|
||||
|
||||
const nodeArgs = [
|
||||
...mockExecArgv,
|
||||
...additionalNodeArgs,
|
||||
script,
|
||||
...additionalScriptArgs,
|
||||
...scriptArgs,
|
||||
];
|
||||
|
||||
// Verify the argument construction follows the expected pattern:
|
||||
// [...process.execArgv, ...additionalNodeArgs, script, ...additionalScriptArgs, ...scriptArgs]
|
||||
const expectedArgs = [
|
||||
// Original node execution arguments
|
||||
'--inspect=9229',
|
||||
'--trace-warnings',
|
||||
// Additional node arguments passed to function
|
||||
'--max-old-space-size=4096',
|
||||
'--experimental-modules',
|
||||
// The script path
|
||||
'/path/to/cli.js',
|
||||
// Additional script arguments passed to function
|
||||
'--model',
|
||||
'gemini-1.5-pro',
|
||||
'--debug',
|
||||
// Original script arguments (everything after the script in process.argv)
|
||||
'command',
|
||||
'--flag=value',
|
||||
'--verbose',
|
||||
];
|
||||
|
||||
expect(nodeArgs).toEqual(expectedArgs);
|
||||
});
|
||||
|
||||
it('should handle empty additional arguments correctly', () => {
|
||||
// Test edge cases with empty arrays
|
||||
const mockExecArgv = ['--trace-warnings'];
|
||||
const mockArgv = ['/usr/bin/node', '/app/cli.js', 'start'];
|
||||
const additionalNodeArgs: string[] = [];
|
||||
const additionalScriptArgs: string[] = [];
|
||||
|
||||
// Extract the argument construction logic
|
||||
const script = mockArgv[1];
|
||||
const scriptArgs = mockArgv.slice(2);
|
||||
|
||||
const nodeArgs = [
|
||||
...mockExecArgv,
|
||||
...additionalNodeArgs,
|
||||
script,
|
||||
...additionalScriptArgs,
|
||||
...scriptArgs,
|
||||
];
|
||||
|
||||
const expectedArgs = ['--trace-warnings', '/app/cli.js', 'start'];
|
||||
|
||||
expect(nodeArgs).toEqual(expectedArgs);
|
||||
});
|
||||
|
||||
it('should handle complex argument patterns', () => {
|
||||
// Test with various argument types including flags with values, boolean flags, etc.
|
||||
const mockExecArgv = ['--max-old-space-size=8192'];
|
||||
const mockArgv = [
|
||||
'/usr/bin/node',
|
||||
'/cli.js',
|
||||
'--config=/path/to/config.json',
|
||||
'--verbose',
|
||||
'subcommand',
|
||||
'--output',
|
||||
'file.txt',
|
||||
];
|
||||
const additionalNodeArgs = ['--inspect-brk=9230'];
|
||||
const additionalScriptArgs = ['--model=gpt-4', '--temperature=0.7'];
|
||||
|
||||
const script = mockArgv[1];
|
||||
const scriptArgs = mockArgv.slice(2);
|
||||
|
||||
const nodeArgs = [
|
||||
...mockExecArgv,
|
||||
...additionalNodeArgs,
|
||||
script,
|
||||
...additionalScriptArgs,
|
||||
...scriptArgs,
|
||||
];
|
||||
|
||||
const expectedArgs = [
|
||||
'--max-old-space-size=8192',
|
||||
'--inspect-brk=9230',
|
||||
'/cli.js',
|
||||
'--model=gpt-4',
|
||||
'--temperature=0.7',
|
||||
'--config=/path/to/config.json',
|
||||
'--verbose',
|
||||
'subcommand',
|
||||
'--output',
|
||||
'file.txt',
|
||||
];
|
||||
|
||||
expect(nodeArgs).toEqual(expectedArgs);
|
||||
});
|
||||
|
||||
// Note: Additional integration tests for spawn behavior are complex due to module mocking
|
||||
// limitations with ES modules. The core logic is tested in relaunchOnExitCode tests.
|
||||
|
||||
it('should handle null exit code from child process', async () => {
|
||||
process.argv = ['/usr/bin/node', '/app/cli.js'];
|
||||
|
||||
const mockChild = createMockChildProcess(0, false); // Don't auto-close
|
||||
mockedSpawn.mockImplementation(() => {
|
||||
// Emit close with null code immediately
|
||||
setImmediate(() => {
|
||||
mockChild.emit('close', null);
|
||||
});
|
||||
return mockChild;
|
||||
});
|
||||
|
||||
// Start the relaunch process
|
||||
const promise = relaunchAppInChildProcess([], []);
|
||||
|
||||
await expect(promise).rejects.toThrow('PROCESS_EXIT_CALLED');
|
||||
|
||||
// Should default to exit code 1
|
||||
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Creates a mock child process that emits events asynchronously
|
||||
*/
|
||||
function createMockChildProcess(
|
||||
exitCode: number = 0,
|
||||
autoClose: boolean = false,
|
||||
): ChildProcess {
|
||||
const mockChild = new EventEmitter() as ChildProcess;
|
||||
|
||||
Object.assign(mockChild, {
|
||||
stdin: null,
|
||||
stdout: null,
|
||||
stderr: null,
|
||||
stdio: [null, null, null],
|
||||
pid: 12345,
|
||||
killed: false,
|
||||
exitCode: null,
|
||||
signalCode: null,
|
||||
spawnargs: [],
|
||||
spawnfile: '',
|
||||
kill: vi.fn(),
|
||||
send: vi.fn(),
|
||||
disconnect: vi.fn(),
|
||||
unref: vi.fn(),
|
||||
ref: vi.fn(),
|
||||
});
|
||||
|
||||
if (autoClose) {
|
||||
setImmediate(() => {
|
||||
mockChild.emit('close', exitCode);
|
||||
});
|
||||
}
|
||||
|
||||
return mockChild;
|
||||
}
|
||||
68
packages/cli/src/utils/relaunch.ts
Normal file
68
packages/cli/src/utils/relaunch.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { spawn } from 'node:child_process';
|
||||
import { RELAUNCH_EXIT_CODE } from './processUtils.js';
|
||||
|
||||
export async function relaunchOnExitCode(runner: () => Promise<number>) {
|
||||
while (true) {
|
||||
try {
|
||||
const exitCode = await runner();
|
||||
|
||||
if (exitCode !== RELAUNCH_EXIT_CODE) {
|
||||
process.exit(exitCode);
|
||||
}
|
||||
} catch (error) {
|
||||
process.stdin.resume();
|
||||
console.error('Fatal error: Failed to relaunch the CLI process.', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function relaunchAppInChildProcess(
|
||||
additionalNodeArgs: string[],
|
||||
additionalScriptArgs: string[],
|
||||
) {
|
||||
if (process.env['GEMINI_CLI_NO_RELAUNCH']) {
|
||||
return;
|
||||
}
|
||||
|
||||
const runner = () => {
|
||||
// process.argv is [node, script, ...args]
|
||||
// We want to construct [ ...nodeArgs, script, ...scriptArgs]
|
||||
const script = process.argv[1];
|
||||
const scriptArgs = process.argv.slice(2);
|
||||
|
||||
const nodeArgs = [
|
||||
...process.execArgv,
|
||||
...additionalNodeArgs,
|
||||
script,
|
||||
...additionalScriptArgs,
|
||||
...scriptArgs,
|
||||
];
|
||||
const newEnv = { ...process.env, GEMINI_CLI_NO_RELAUNCH: 'true' };
|
||||
|
||||
// The parent process should not be reading from stdin while the child is running.
|
||||
process.stdin.pause();
|
||||
|
||||
const child = spawn(process.execPath, nodeArgs, {
|
||||
stdio: 'inherit',
|
||||
env: newEnv,
|
||||
});
|
||||
|
||||
return new Promise<number>((resolve, reject) => {
|
||||
child.on('error', reject);
|
||||
child.on('close', (code) => {
|
||||
// Resume stdin before the parent process exits.
|
||||
process.stdin.resume();
|
||||
resolve(code ?? 1);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
await relaunchOnExitCode(runner);
|
||||
}
|
||||
@@ -19,6 +19,7 @@ import { promisify } from 'node:util';
|
||||
import type { Config, SandboxConfig } from '@qwen-code/qwen-code-core';
|
||||
import { FatalSandboxError } from '@qwen-code/qwen-code-core';
|
||||
import { ConsolePatcher } from '../ui/utils/ConsolePatcher.js';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -188,7 +189,7 @@ export async function start_sandbox(
|
||||
nodeArgs: string[] = [],
|
||||
cliConfig?: Config,
|
||||
cliArgs: string[] = [],
|
||||
) {
|
||||
): Promise<number> {
|
||||
const patcher = new ConsolePatcher({
|
||||
debugMode: cliConfig?.getDebugMode() || !!process.env['DEBUG'],
|
||||
stderr: true,
|
||||
@@ -339,11 +340,17 @@ export async function start_sandbox(
|
||||
);
|
||||
}
|
||||
// spawn child and let it inherit stdio
|
||||
process.stdin.pause();
|
||||
sandboxProcess = spawn(config.command, args, {
|
||||
stdio: 'inherit',
|
||||
});
|
||||
await new Promise((resolve) => sandboxProcess?.on('close', resolve));
|
||||
return;
|
||||
return new Promise((resolve, reject) => {
|
||||
sandboxProcess?.on('error', reject);
|
||||
sandboxProcess?.on('close', (code) => {
|
||||
process.stdin.resume();
|
||||
resolve(code ?? 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
console.error(`hopping into sandbox (command: ${config.command}) ...`);
|
||||
@@ -424,6 +431,9 @@ export async function start_sandbox(
|
||||
args.push('-t');
|
||||
}
|
||||
|
||||
// allow access to host.docker.internal
|
||||
args.push('--add-host', 'host.docker.internal:host-gateway');
|
||||
|
||||
// mount current directory as working directory in sandbox (set via --workdir)
|
||||
args.push('--volume', `${workdir}:${containerWorkdir}`);
|
||||
|
||||
@@ -549,20 +559,39 @@ export async function start_sandbox(
|
||||
}
|
||||
}
|
||||
|
||||
// name container after image, plus numeric suffix to avoid conflicts
|
||||
// name container after image, plus random suffix to avoid conflicts
|
||||
const imageName = parseImageName(image);
|
||||
let index = 0;
|
||||
const containerNameCheck = execSync(
|
||||
`${config.command} ps -a --format "{{.Names}}"`,
|
||||
)
|
||||
.toString()
|
||||
.trim();
|
||||
while (containerNameCheck.includes(`${imageName}-${index}`)) {
|
||||
index++;
|
||||
const isIntegrationTest =
|
||||
process.env['GEMINI_CLI_INTEGRATION_TEST'] === 'true';
|
||||
let containerName;
|
||||
if (isIntegrationTest) {
|
||||
containerName = `gemini-cli-integration-test-${randomBytes(4).toString(
|
||||
'hex',
|
||||
)}`;
|
||||
console.log(`ContainerName: ${containerName}`);
|
||||
} else {
|
||||
let index = 0;
|
||||
const containerNameCheck = execSync(
|
||||
`${config.command} ps -a --format "{{.Names}}"`,
|
||||
)
|
||||
.toString()
|
||||
.trim();
|
||||
while (containerNameCheck.includes(`${imageName}-${index}`)) {
|
||||
index++;
|
||||
}
|
||||
containerName = `${imageName}-${index}`;
|
||||
console.log(`ContainerName (regular): ${containerName}`);
|
||||
}
|
||||
const containerName = `${imageName}-${index}`;
|
||||
args.push('--name', containerName, '--hostname', containerName);
|
||||
|
||||
// copy GEMINI_CLI_TEST_VAR for integration tests
|
||||
if (process.env['GEMINI_CLI_TEST_VAR']) {
|
||||
args.push(
|
||||
'--env',
|
||||
`GEMINI_CLI_TEST_VAR=${process.env['GEMINI_CLI_TEST_VAR']}`,
|
||||
);
|
||||
}
|
||||
|
||||
// copy GEMINI_API_KEY(s)
|
||||
if (process.env['GEMINI_API_KEY']) {
|
||||
args.push('--env', `GEMINI_API_KEY=${process.env['GEMINI_API_KEY']}`);
|
||||
@@ -805,22 +834,25 @@ export async function start_sandbox(
|
||||
}
|
||||
|
||||
// spawn child and let it inherit stdio
|
||||
process.stdin.pause();
|
||||
sandboxProcess = spawn(config.command, args, {
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
sandboxProcess.on('error', (err) => {
|
||||
console.error('Sandbox process error:', err);
|
||||
});
|
||||
return new Promise<number>((resolve, reject) => {
|
||||
sandboxProcess.on('error', (err) => {
|
||||
console.error('Sandbox process error:', err);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
sandboxProcess?.on('close', (code, signal) => {
|
||||
if (code !== 0) {
|
||||
process.stdin.resume();
|
||||
if (code !== 0 && code !== null) {
|
||||
console.log(
|
||||
`Sandbox process exited with code: ${code}, signal: ${signal}`,
|
||||
);
|
||||
}
|
||||
resolve();
|
||||
resolve(code ?? 1);
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,18 +12,19 @@ import type {
|
||||
import type {
|
||||
SettingDefinition,
|
||||
SettingsSchema,
|
||||
SettingsType,
|
||||
SettingsValue,
|
||||
} from '../config/settingsSchema.js';
|
||||
import { SETTINGS_SCHEMA } from '../config/settingsSchema.js';
|
||||
import { getSettingsSchema } from '../config/settingsSchema.js';
|
||||
|
||||
// The schema is now nested, but many parts of the UI and logic work better
|
||||
// with a flattened structure and dot-notation keys. This section flattens the
|
||||
// schema into a map for easier lookups.
|
||||
|
||||
function flattenSchema(
|
||||
schema: SettingsSchema,
|
||||
prefix = '',
|
||||
): Record<string, SettingDefinition & { key: string }> {
|
||||
let result: Record<string, SettingDefinition & { key: string }> = {};
|
||||
type FlattenedSchema = Record<string, SettingDefinition & { key: string }>;
|
||||
|
||||
function flattenSchema(schema: SettingsSchema, prefix = ''): FlattenedSchema {
|
||||
let result: FlattenedSchema = {};
|
||||
for (const key in schema) {
|
||||
const newKey = prefix ? `${prefix}.${key}` : key;
|
||||
const definition = schema[key];
|
||||
@@ -35,7 +36,19 @@ function flattenSchema(
|
||||
return result;
|
||||
}
|
||||
|
||||
const FLATTENED_SCHEMA = flattenSchema(SETTINGS_SCHEMA);
|
||||
let _FLATTENED_SCHEMA: FlattenedSchema | undefined;
|
||||
|
||||
/** Returns a flattened schema, the first call is memoized for future requests. */
|
||||
export function getFlattenedSchema() {
|
||||
return (
|
||||
_FLATTENED_SCHEMA ??
|
||||
(_FLATTENED_SCHEMA = flattenSchema(getSettingsSchema()))
|
||||
);
|
||||
}
|
||||
|
||||
function clearFlattenedSchema() {
|
||||
_FLATTENED_SCHEMA = undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all settings grouped by category
|
||||
@@ -49,7 +62,7 @@ export function getSettingsByCategory(): Record<
|
||||
Array<SettingDefinition & { key: string }>
|
||||
> = {};
|
||||
|
||||
Object.values(FLATTENED_SCHEMA).forEach((definition) => {
|
||||
Object.values(getFlattenedSchema()).forEach((definition) => {
|
||||
const category = definition.category;
|
||||
if (!categories[category]) {
|
||||
categories[category] = [];
|
||||
@@ -66,28 +79,28 @@ export function getSettingsByCategory(): Record<
|
||||
export function getSettingDefinition(
|
||||
key: string,
|
||||
): (SettingDefinition & { key: string }) | undefined {
|
||||
return FLATTENED_SCHEMA[key];
|
||||
return getFlattenedSchema()[key];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a setting requires restart
|
||||
*/
|
||||
export function requiresRestart(key: string): boolean {
|
||||
return FLATTENED_SCHEMA[key]?.requiresRestart ?? false;
|
||||
return getFlattenedSchema()[key]?.requiresRestart ?? false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the default value for a setting
|
||||
*/
|
||||
export function getDefaultValue(key: string): SettingDefinition['default'] {
|
||||
return FLATTENED_SCHEMA[key]?.default;
|
||||
export function getDefaultValue(key: string): SettingsValue {
|
||||
return getFlattenedSchema()[key]?.default;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all setting keys that require restart
|
||||
*/
|
||||
export function getRestartRequiredSettings(): string[] {
|
||||
return Object.values(FLATTENED_SCHEMA)
|
||||
return Object.values(getFlattenedSchema())
|
||||
.filter((definition) => definition.requiresRestart)
|
||||
.map((definition) => definition.key);
|
||||
}
|
||||
@@ -121,7 +134,7 @@ export function getEffectiveValue(
|
||||
key: string,
|
||||
settings: Settings,
|
||||
mergedSettings: Settings,
|
||||
): SettingDefinition['default'] {
|
||||
): SettingsValue {
|
||||
const definition = getSettingDefinition(key);
|
||||
if (!definition) {
|
||||
return undefined;
|
||||
@@ -132,13 +145,13 @@ export function getEffectiveValue(
|
||||
// Check the current scope's settings first
|
||||
let value = getNestedValue(settings as Record<string, unknown>, path);
|
||||
if (value !== undefined) {
|
||||
return value as SettingDefinition['default'];
|
||||
return value as SettingsValue;
|
||||
}
|
||||
|
||||
// Check the merged settings for an inherited value
|
||||
value = getNestedValue(mergedSettings as Record<string, unknown>, path);
|
||||
if (value !== undefined) {
|
||||
return value as SettingDefinition['default'];
|
||||
return value as SettingsValue;
|
||||
}
|
||||
|
||||
// Return default value if no value is set anywhere
|
||||
@@ -149,16 +162,16 @@ export function getEffectiveValue(
|
||||
* Get all setting keys from the schema
|
||||
*/
|
||||
export function getAllSettingKeys(): string[] {
|
||||
return Object.keys(FLATTENED_SCHEMA);
|
||||
return Object.keys(getFlattenedSchema());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get settings by type
|
||||
*/
|
||||
export function getSettingsByType(
|
||||
type: SettingDefinition['type'],
|
||||
type: SettingsType,
|
||||
): Array<SettingDefinition & { key: string }> {
|
||||
return Object.values(FLATTENED_SCHEMA).filter(
|
||||
return Object.values(getFlattenedSchema()).filter(
|
||||
(definition) => definition.type === type,
|
||||
);
|
||||
}
|
||||
@@ -171,7 +184,7 @@ export function getSettingsRequiringRestart(): Array<
|
||||
key: string;
|
||||
}
|
||||
> {
|
||||
return Object.values(FLATTENED_SCHEMA).filter(
|
||||
return Object.values(getFlattenedSchema()).filter(
|
||||
(definition) => definition.requiresRestart,
|
||||
);
|
||||
}
|
||||
@@ -180,21 +193,21 @@ export function getSettingsRequiringRestart(): Array<
|
||||
* Validate if a setting key exists in the schema
|
||||
*/
|
||||
export function isValidSettingKey(key: string): boolean {
|
||||
return key in FLATTENED_SCHEMA;
|
||||
return key in getFlattenedSchema();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the category for a setting
|
||||
*/
|
||||
export function getSettingCategory(key: string): string | undefined {
|
||||
return FLATTENED_SCHEMA[key]?.category;
|
||||
return getFlattenedSchema()[key]?.category;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a setting should be shown in the settings dialog
|
||||
*/
|
||||
export function shouldShowInDialog(key: string): boolean {
|
||||
return FLATTENED_SCHEMA[key]?.showInDialog ?? true; // Default to true for backward compatibility
|
||||
return getFlattenedSchema()[key]?.showInDialog ?? true; // Default to true for backward compatibility
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -209,7 +222,7 @@ export function getDialogSettingsByCategory(): Record<
|
||||
Array<SettingDefinition & { key: string }>
|
||||
> = {};
|
||||
|
||||
Object.values(FLATTENED_SCHEMA)
|
||||
Object.values(getFlattenedSchema())
|
||||
.filter((definition) => definition.showInDialog !== false)
|
||||
.forEach((definition) => {
|
||||
const category = definition.category;
|
||||
@@ -226,9 +239,9 @@ export function getDialogSettingsByCategory(): Record<
|
||||
* Get settings by type that should be shown in the dialog
|
||||
*/
|
||||
export function getDialogSettingsByType(
|
||||
type: SettingDefinition['type'],
|
||||
type: SettingsType,
|
||||
): Array<SettingDefinition & { key: string }> {
|
||||
return Object.values(FLATTENED_SCHEMA).filter(
|
||||
return Object.values(getFlattenedSchema()).filter(
|
||||
(definition) =>
|
||||
definition.type === type && definition.showInDialog !== false,
|
||||
);
|
||||
@@ -238,7 +251,7 @@ export function getDialogSettingsByType(
|
||||
* Get all setting keys that should be shown in the dialog
|
||||
*/
|
||||
export function getDialogSettingKeys(): string[] {
|
||||
return Object.values(FLATTENED_SCHEMA)
|
||||
return Object.values(getFlattenedSchema())
|
||||
.filter((definition) => definition.showInDialog !== false)
|
||||
.map((definition) => definition.key);
|
||||
}
|
||||
@@ -344,7 +357,7 @@ export function setPendingSettingValue(
|
||||
*/
|
||||
export function setPendingSettingValueAny(
|
||||
key: string,
|
||||
value: unknown,
|
||||
value: SettingsValue,
|
||||
pendingSettings: Settings,
|
||||
): Settings {
|
||||
const path = key.split('.');
|
||||
@@ -415,25 +428,30 @@ export function getDisplayValue(
|
||||
pendingSettings?: Settings,
|
||||
): string {
|
||||
// Prioritize pending changes if user has modified this setting
|
||||
let value: boolean;
|
||||
const definition = getSettingDefinition(key);
|
||||
|
||||
let value: SettingsValue;
|
||||
if (pendingSettings && settingExistsInScope(key, pendingSettings)) {
|
||||
// Show the value from the pending (unsaved) edits when it exists
|
||||
value = getSettingValue(key, pendingSettings, {});
|
||||
value = getEffectiveValue(key, pendingSettings, {});
|
||||
} else if (settingExistsInScope(key, settings)) {
|
||||
// Show the value defined at the current scope if present
|
||||
value = getSettingValue(key, settings, {});
|
||||
value = getEffectiveValue(key, settings, {});
|
||||
} else {
|
||||
// Fall back to the schema default when the key is unset in this scope
|
||||
const defaultValue = getDefaultValue(key);
|
||||
value = typeof defaultValue === 'boolean' ? defaultValue : false;
|
||||
value = getDefaultValue(key);
|
||||
}
|
||||
|
||||
const valueString = String(value);
|
||||
let valueString = String(value);
|
||||
|
||||
if (definition?.type === 'enum' && definition.options) {
|
||||
const option = definition.options?.find((option) => option.value === value);
|
||||
valueString = option?.label ?? `${value}`;
|
||||
}
|
||||
|
||||
// Check if value is different from default OR if it's in modified settings OR if there are pending changes
|
||||
const defaultValue = getDefaultValue(key);
|
||||
const isChangedFromDefault =
|
||||
typeof defaultValue === 'boolean' ? value !== defaultValue : value === true;
|
||||
const isChangedFromDefault = value !== defaultValue;
|
||||
const isInModifiedSettings = modifiedSettings.has(key);
|
||||
|
||||
// Mark as modified if setting exists in current scope OR is in modified settings
|
||||
@@ -476,3 +494,5 @@ export function getEffectiveDisplayValue(
|
||||
): boolean {
|
||||
return getSettingValue(key, settings, mergedSettings);
|
||||
}
|
||||
|
||||
export const TEST_ONLY = { clearFlattenedSchema };
|
||||
|
||||
@@ -9,25 +9,26 @@ import { getStartupWarnings } from './startupWarnings.js';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import { getErrorMessage } from '@qwen-code/qwen-code-core';
|
||||
|
||||
vi.mock('fs/promises');
|
||||
vi.mock('node:fs/promises', { spy: true });
|
||||
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
|
||||
const actual = await importOriginal();
|
||||
const actual =
|
||||
await importOriginal<typeof import('@qwen-code/qwen-code-core')>();
|
||||
return {
|
||||
...actual,
|
||||
getErrorMessage: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe.skip('startupWarnings', () => {
|
||||
describe('startupWarnings', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
it('should return warnings from the file and delete it', async () => {
|
||||
const mockWarnings = 'Warning 1\nWarning 2';
|
||||
vi.spyOn(fs, 'access').mockResolvedValue();
|
||||
vi.spyOn(fs, 'readFile').mockResolvedValue(mockWarnings);
|
||||
vi.spyOn(fs, 'unlink').mockResolvedValue();
|
||||
vi.mocked(fs.access).mockResolvedValue();
|
||||
vi.mocked(fs.readFile).mockResolvedValue(mockWarnings);
|
||||
vi.mocked(fs.unlink).mockResolvedValue();
|
||||
|
||||
const warnings = await getStartupWarnings();
|
||||
|
||||
@@ -40,7 +41,7 @@ describe.skip('startupWarnings', () => {
|
||||
it('should return an empty array if the file does not exist', async () => {
|
||||
const error = new Error('File not found');
|
||||
(error as Error & { code: string }).code = 'ENOENT';
|
||||
vi.spyOn(fs, 'access').mockRejectedValue(error);
|
||||
vi.mocked(fs.access).mockRejectedValue(error);
|
||||
|
||||
const warnings = await getStartupWarnings();
|
||||
|
||||
@@ -49,7 +50,7 @@ describe.skip('startupWarnings', () => {
|
||||
|
||||
it('should return an error message if reading the file fails', async () => {
|
||||
const error = new Error('Permission denied');
|
||||
vi.spyOn(fs, 'access').mockRejectedValue(error);
|
||||
vi.mocked(fs.access).mockRejectedValue(error);
|
||||
vi.mocked(getErrorMessage).mockReturnValue('Permission denied');
|
||||
|
||||
const warnings = await getStartupWarnings();
|
||||
@@ -61,9 +62,9 @@ describe.skip('startupWarnings', () => {
|
||||
|
||||
it('should return a warning if deleting the file fails', async () => {
|
||||
const mockWarnings = 'Warning 1';
|
||||
vi.spyOn(fs, 'access').mockResolvedValue();
|
||||
vi.spyOn(fs, 'readFile').mockResolvedValue(mockWarnings);
|
||||
vi.spyOn(fs, 'unlink').mockRejectedValue(new Error('Permission denied'));
|
||||
vi.mocked(fs.access).mockResolvedValue();
|
||||
vi.mocked(fs.readFile).mockResolvedValue(mockWarnings);
|
||||
vi.mocked(fs.unlink).mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
const warnings = await getStartupWarnings();
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ const WARNING_CHECKS: readonly WarningCheck[] = [
|
||||
];
|
||||
|
||||
export async function getUserStartupWarnings(
|
||||
workspaceRoot: string,
|
||||
workspaceRoot: string = process.cwd(),
|
||||
): Promise<string[]> {
|
||||
const results = await Promise.all(
|
||||
WARNING_CHECKS.map((check) => check.check(workspaceRoot)),
|
||||
|
||||
59
packages/cli/src/utils/windowTitle.test.ts
Normal file
59
packages/cli/src/utils/windowTitle.test.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { computeWindowTitle } from './windowTitle.js';
|
||||
|
||||
describe('computeWindowTitle', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = process.env;
|
||||
vi.stubEnv('CLI_TITLE', undefined);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
it('should use default Qwen title when CLI_TITLE is not set', () => {
|
||||
const result = computeWindowTitle('my-project');
|
||||
expect(result).toBe('Qwen - my-project');
|
||||
});
|
||||
|
||||
it('should use CLI_TITLE environment variable when set', () => {
|
||||
vi.stubEnv('CLI_TITLE', 'Custom Title');
|
||||
const result = computeWindowTitle('my-project');
|
||||
expect(result).toBe('Custom Title');
|
||||
});
|
||||
|
||||
it('should remove control characters from title', () => {
|
||||
vi.stubEnv('CLI_TITLE', 'Title\x1b[31m with \x07 control chars');
|
||||
const result = computeWindowTitle('my-project');
|
||||
// The \x1b[31m (ANSI escape sequence) and \x07 (bell character) should be removed
|
||||
expect(result).toBe('Title[31m with control chars');
|
||||
});
|
||||
|
||||
it('should handle folder names with control characters', () => {
|
||||
const result = computeWindowTitle('project\x07name');
|
||||
expect(result).toBe('Qwen - projectname');
|
||||
});
|
||||
|
||||
it('should handle empty folder name', () => {
|
||||
const result = computeWindowTitle('');
|
||||
expect(result).toBe('Qwen - ');
|
||||
});
|
||||
|
||||
it('should handle folder names with spaces', () => {
|
||||
const result = computeWindowTitle('my project');
|
||||
expect(result).toBe('Qwen - my project');
|
||||
});
|
||||
|
||||
it('should handle folder names with special characters', () => {
|
||||
const result = computeWindowTitle('project-name_v1.0');
|
||||
expect(result).toBe('Qwen - project-name_v1.0');
|
||||
});
|
||||
});
|
||||
22
packages/cli/src/utils/windowTitle.ts
Normal file
22
packages/cli/src/utils/windowTitle.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Computes the window title for the Gemini CLI application.
|
||||
*
|
||||
* @param folderName - The name of the current folder/workspace to display in the title
|
||||
* @returns The computed window title, either from CLI_TITLE environment variable or the default Gemini title
|
||||
*/
|
||||
export function computeWindowTitle(folderName: string): string {
|
||||
const title = process.env['CLI_TITLE'] || `Qwen - ${folderName}`;
|
||||
|
||||
// Remove control characters that could cause issues in terminal titles
|
||||
return title.replace(
|
||||
// eslint-disable-next-line no-control-regex
|
||||
/[\x00-\x1F\x7F]/g,
|
||||
'',
|
||||
);
|
||||
}
|
||||
Reference in New Issue
Block a user