mirror of
https://github.com/QwenLM/qwen-code.git
synced 2025-12-20 16:57:46 +00:00
sync gemini-cli 0.1.17
Co-Authored-By: Qwen-Coder <qwen-coder@alibabacloud.com>
This commit is contained in:
@@ -27,11 +27,13 @@ vi.mock('../utils/editor.js', () => ({
|
||||
import { describe, it, expect, beforeEach, afterEach, vi, Mock } from 'vitest';
|
||||
import { EditTool, EditToolParams } from './edit.js';
|
||||
import { FileDiff } from './tools.js';
|
||||
import { ToolErrorType } from './tool-error.js';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
import { ApprovalMode, Config } from '../config/config.js';
|
||||
import { Content, Part, SchemaUnion } from '@google/genai';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
describe('EditTool', () => {
|
||||
let tool: EditTool;
|
||||
@@ -41,6 +43,7 @@ describe('EditTool', () => {
|
||||
let geminiClient: any;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'edit-tool-test-'));
|
||||
rootDir = path.join(tempDir, 'root');
|
||||
fs.mkdirSync(rootDir);
|
||||
@@ -54,6 +57,7 @@ describe('EditTool', () => {
|
||||
getTargetDir: () => rootDir,
|
||||
getApprovalMode: vi.fn(),
|
||||
setApprovalMode: vi.fn(),
|
||||
getWorkspaceContext: () => createMockWorkspaceContext(rootDir),
|
||||
// getGeminiConfig: () => ({ apiKey: 'test-api-key' }), // This was not a real Config method
|
||||
// Add other properties/methods of Config if EditTool uses them
|
||||
// Minimal other methods to satisfy Config type if needed by EditTool constructor or other direct uses:
|
||||
@@ -215,8 +219,9 @@ describe('EditTool', () => {
|
||||
old_string: 'old',
|
||||
new_string: 'new',
|
||||
};
|
||||
expect(tool.validateToolParams(params)).toMatch(
|
||||
/File path must be within the root directory/,
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -623,6 +628,98 @@ describe('EditTool', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Scenarios', () => {
|
||||
const testFile = 'error_test.txt';
|
||||
let filePath: string;
|
||||
|
||||
beforeEach(() => {
|
||||
filePath = path.join(rootDir, testFile);
|
||||
});
|
||||
|
||||
it('should return FILE_NOT_FOUND error', async () => {
|
||||
const params: EditToolParams = {
|
||||
file_path: filePath,
|
||||
old_string: 'any',
|
||||
new_string: 'new',
|
||||
};
|
||||
const result = await tool.execute(params, new AbortController().signal);
|
||||
expect(result.error?.type).toBe(ToolErrorType.FILE_NOT_FOUND);
|
||||
});
|
||||
|
||||
it('should return ATTEMPT_TO_CREATE_EXISTING_FILE error', async () => {
|
||||
fs.writeFileSync(filePath, 'existing content', 'utf8');
|
||||
const params: EditToolParams = {
|
||||
file_path: filePath,
|
||||
old_string: '',
|
||||
new_string: 'new content',
|
||||
};
|
||||
const result = await tool.execute(params, new AbortController().signal);
|
||||
expect(result.error?.type).toBe(
|
||||
ToolErrorType.ATTEMPT_TO_CREATE_EXISTING_FILE,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return NO_OCCURRENCE_FOUND error', async () => {
|
||||
fs.writeFileSync(filePath, 'content', 'utf8');
|
||||
const params: EditToolParams = {
|
||||
file_path: filePath,
|
||||
old_string: 'not-found',
|
||||
new_string: 'new',
|
||||
};
|
||||
const result = await tool.execute(params, new AbortController().signal);
|
||||
expect(result.error?.type).toBe(ToolErrorType.EDIT_NO_OCCURRENCE_FOUND);
|
||||
});
|
||||
|
||||
it('should return EXPECTED_OCCURRENCE_MISMATCH error', async () => {
|
||||
fs.writeFileSync(filePath, 'one one two', 'utf8');
|
||||
const params: EditToolParams = {
|
||||
file_path: filePath,
|
||||
old_string: 'one',
|
||||
new_string: 'new',
|
||||
expected_replacements: 3,
|
||||
};
|
||||
const result = await tool.execute(params, new AbortController().signal);
|
||||
expect(result.error?.type).toBe(
|
||||
ToolErrorType.EDIT_EXPECTED_OCCURRENCE_MISMATCH,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return NO_CHANGE error', async () => {
|
||||
fs.writeFileSync(filePath, 'content', 'utf8');
|
||||
const params: EditToolParams = {
|
||||
file_path: filePath,
|
||||
old_string: 'content',
|
||||
new_string: 'content',
|
||||
};
|
||||
const result = await tool.execute(params, new AbortController().signal);
|
||||
expect(result.error?.type).toBe(ToolErrorType.EDIT_NO_CHANGE);
|
||||
});
|
||||
|
||||
it('should return INVALID_PARAMETERS error for relative path', async () => {
|
||||
const params: EditToolParams = {
|
||||
file_path: 'relative/path.txt',
|
||||
old_string: 'a',
|
||||
new_string: 'b',
|
||||
};
|
||||
const result = await tool.execute(params, new AbortController().signal);
|
||||
expect(result.error?.type).toBe(ToolErrorType.INVALID_TOOL_PARAMS);
|
||||
});
|
||||
|
||||
it('should return FILE_WRITE_FAILURE on write error', async () => {
|
||||
fs.writeFileSync(filePath, 'content', 'utf8');
|
||||
// Make file readonly to trigger a write error
|
||||
fs.chmodSync(filePath, '444');
|
||||
|
||||
const params: EditToolParams = {
|
||||
file_path: filePath,
|
||||
old_string: 'content',
|
||||
new_string: 'new content',
|
||||
};
|
||||
const result = await tool.execute(params, new AbortController().signal);
|
||||
expect(result.error?.type).toBe(ToolErrorType.FILE_WRITE_FAILURE);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDescription', () => {
|
||||
it('should return "No file changes to..." if old_string and new_string are the same', () => {
|
||||
const testFileName = 'test.txt';
|
||||
@@ -675,4 +772,28 @@ describe('EditTool', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace boundary validation', () => {
|
||||
it('should validate paths are within workspace root', () => {
|
||||
const validPath = {
|
||||
file_path: path.join(rootDir, 'file.txt'),
|
||||
old_string: 'old',
|
||||
new_string: 'new',
|
||||
};
|
||||
expect(tool.validateToolParams(validPath)).toBeNull();
|
||||
});
|
||||
|
||||
it('should reject paths outside workspace root', () => {
|
||||
const invalidPath = {
|
||||
file_path: '/etc/passwd',
|
||||
old_string: 'root',
|
||||
new_string: 'hacked',
|
||||
};
|
||||
const error = tool.validateToolParams(invalidPath);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(rootDir);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
ToolResult,
|
||||
ToolResultDisplay,
|
||||
} from './tools.js';
|
||||
import { ToolErrorType } from './tool-error.js';
|
||||
import { Type } from '@google/genai';
|
||||
import { SchemaValidator } from '../utils/schemaValidator.js';
|
||||
import { makeRelative, shortenPath } from '../utils/paths.js';
|
||||
@@ -26,7 +27,6 @@ import { ensureCorrectEdit } from '../utils/editCorrector.js';
|
||||
import { DEFAULT_DIFF_OPTIONS } from './diffOptions.js';
|
||||
import { ReadFileTool } from './read-file.js';
|
||||
import { ModifiableTool, ModifyContext } from './modifiable-tool.js';
|
||||
import { isWithinRoot } from '../utils/fileUtils.js';
|
||||
|
||||
/**
|
||||
* Parameters for the Edit tool
|
||||
@@ -63,7 +63,7 @@ interface CalculatedEdit {
|
||||
currentContent: string | null;
|
||||
newContent: string;
|
||||
occurrences: number;
|
||||
error?: { display: string; raw: string };
|
||||
error?: { display: string; raw: string; type: ToolErrorType };
|
||||
isNewFile: boolean;
|
||||
}
|
||||
|
||||
@@ -137,8 +137,10 @@ Expectation for required parameters:
|
||||
return `File path must be absolute: ${params.file_path}`;
|
||||
}
|
||||
|
||||
if (!isWithinRoot(params.file_path, this.config.getTargetDir())) {
|
||||
return `File path must be within the root directory (${this.config.getTargetDir()}): ${params.file_path}`;
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(params.file_path)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
return `File path must be within one of the workspace directories: ${directories.join(', ')}`;
|
||||
}
|
||||
|
||||
return null;
|
||||
@@ -190,7 +192,9 @@ Expectation for required parameters:
|
||||
let finalNewString = params.new_string;
|
||||
let finalOldString = params.old_string;
|
||||
let occurrences = 0;
|
||||
let error: { display: string; raw: string } | undefined = undefined;
|
||||
let error:
|
||||
| { display: string; raw: string; type: ToolErrorType }
|
||||
| undefined = undefined;
|
||||
|
||||
try {
|
||||
currentContent = fs.readFileSync(params.file_path, 'utf8');
|
||||
@@ -213,6 +217,7 @@ Expectation for required parameters:
|
||||
error = {
|
||||
display: `File not found. Cannot apply edit. Use an empty old_string to create a new file.`,
|
||||
raw: `File not found: ${params.file_path}`,
|
||||
type: ToolErrorType.FILE_NOT_FOUND,
|
||||
};
|
||||
} else if (currentContent !== null) {
|
||||
// Editing an existing file
|
||||
@@ -232,11 +237,13 @@ Expectation for required parameters:
|
||||
error = {
|
||||
display: `Failed to edit. Attempted to create a file that already exists.`,
|
||||
raw: `File already exists, cannot create: ${params.file_path}`,
|
||||
type: ToolErrorType.ATTEMPT_TO_CREATE_EXISTING_FILE,
|
||||
};
|
||||
} else if (occurrences === 0) {
|
||||
error = {
|
||||
display: `Failed to edit, could not find the string to replace.`,
|
||||
raw: `Failed to edit, 0 occurrences found for old_string in ${params.file_path}. No edits made. The exact text in old_string was not found. Ensure you're not escaping content incorrectly and check whitespace, indentation, and context. Use ${ReadFileTool.Name} tool to verify.`,
|
||||
type: ToolErrorType.EDIT_NO_OCCURRENCE_FOUND,
|
||||
};
|
||||
} else if (occurrences !== expectedReplacements) {
|
||||
const occurrenceTerm =
|
||||
@@ -245,11 +252,13 @@ Expectation for required parameters:
|
||||
error = {
|
||||
display: `Failed to edit, expected ${expectedReplacements} ${occurrenceTerm} but found ${occurrences}.`,
|
||||
raw: `Failed to edit, Expected ${expectedReplacements} ${occurrenceTerm} but found ${occurrences} for old_string in file: ${params.file_path}`,
|
||||
type: ToolErrorType.EDIT_EXPECTED_OCCURRENCE_MISMATCH,
|
||||
};
|
||||
} else if (finalOldString === finalNewString) {
|
||||
error = {
|
||||
display: `No changes to apply. The old_string and new_string are identical.`,
|
||||
raw: `No changes to apply. The old_string and new_string are identical in file: ${params.file_path}`,
|
||||
type: ToolErrorType.EDIT_NO_CHANGE,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
@@ -257,6 +266,7 @@ Expectation for required parameters:
|
||||
error = {
|
||||
display: `Failed to read content of file.`,
|
||||
raw: `Failed to read content of existing file: ${params.file_path}`,
|
||||
type: ToolErrorType.READ_CONTENT_FAILURE,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -373,6 +383,10 @@ Expectation for required parameters:
|
||||
return {
|
||||
llmContent: `Error: Invalid parameters provided. Reason: ${validationError}`,
|
||||
returnDisplay: `Error: ${validationError}`,
|
||||
error: {
|
||||
message: validationError,
|
||||
type: ToolErrorType.INVALID_TOOL_PARAMS,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -384,6 +398,10 @@ Expectation for required parameters:
|
||||
return {
|
||||
llmContent: `Error preparing edit: ${errorMsg}`,
|
||||
returnDisplay: `Error preparing edit: ${errorMsg}`,
|
||||
error: {
|
||||
message: errorMsg,
|
||||
type: ToolErrorType.EDIT_PREPARATION_FAILURE,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -391,6 +409,10 @@ Expectation for required parameters:
|
||||
return {
|
||||
llmContent: editData.error.raw,
|
||||
returnDisplay: `Error: ${editData.error.display}`,
|
||||
error: {
|
||||
message: editData.error.raw,
|
||||
type: editData.error.type,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -441,6 +463,10 @@ Expectation for required parameters:
|
||||
return {
|
||||
llmContent: `Error executing edit: ${errorMsg}`,
|
||||
returnDisplay: `Error writing file: ${errorMsg}`,
|
||||
error: {
|
||||
message: errorMsg,
|
||||
type: ToolErrorType.FILE_WRITE_FAILURE,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,9 +9,10 @@ import { partListUnionToString } from '../core/geminiRequest.js';
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import os from 'os';
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'; // Removed vi
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import { Config } from '../config/config.js';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
describe('GlobTool', () => {
|
||||
let tempRootDir: string; // This will be the rootDirectory for the GlobTool instance
|
||||
@@ -23,6 +24,7 @@ describe('GlobTool', () => {
|
||||
getFileService: () => new FileDiscoveryService(tempRootDir),
|
||||
getFileFilteringRespectGitIgnore: () => true,
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () => createMockWorkspaceContext(tempRootDir),
|
||||
} as unknown as Config;
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -243,7 +245,7 @@ describe('GlobTool', () => {
|
||||
path: '../../../../../../../../../../tmp',
|
||||
}; // Definitely outside
|
||||
expect(specificGlobTool.validateToolParams(paramsOutside)).toContain(
|
||||
"resolves outside the tool's root directory",
|
||||
'resolves outside the allowed workspace directories',
|
||||
);
|
||||
});
|
||||
|
||||
@@ -264,6 +266,37 @@ describe('GlobTool', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace boundary validation', () => {
|
||||
it('should validate search paths are within workspace boundaries', () => {
|
||||
const validPath = { pattern: '*.ts', path: 'sub' };
|
||||
const invalidPath = { pattern: '*.ts', path: '../..' };
|
||||
|
||||
expect(globTool.validateToolParams(validPath)).toBeNull();
|
||||
expect(globTool.validateToolParams(invalidPath)).toContain(
|
||||
'resolves outside the allowed workspace directories',
|
||||
);
|
||||
});
|
||||
|
||||
it('should provide clear error messages when path is outside workspace', () => {
|
||||
const invalidPath = { pattern: '*.ts', path: '/etc' };
|
||||
const error = globTool.validateToolParams(invalidPath);
|
||||
|
||||
expect(error).toContain(
|
||||
'resolves outside the allowed workspace directories',
|
||||
);
|
||||
expect(error).toContain(tempRootDir);
|
||||
});
|
||||
|
||||
it('should work with paths in workspace subdirectories', async () => {
|
||||
const params: GlobToolParams = { pattern: '*.md', path: 'sub' };
|
||||
const result = await globTool.execute(params, abortSignal);
|
||||
|
||||
expect(result.llmContent).toContain('Found 2 file(s)');
|
||||
expect(result.llmContent).toContain('fileC.md');
|
||||
expect(result.llmContent).toContain('FileD.MD');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('sortFileEntries', () => {
|
||||
|
||||
@@ -11,7 +11,6 @@ import { SchemaValidator } from '../utils/schemaValidator.js';
|
||||
import { BaseTool, Icon, ToolResult } from './tools.js';
|
||||
import { Type } from '@google/genai';
|
||||
import { shortenPath, makeRelative } from '../utils/paths.js';
|
||||
import { isWithinRoot } from '../utils/fileUtils.js';
|
||||
import { Config } from '../config/config.js';
|
||||
|
||||
// Subset of 'Path' interface provided by 'glob' that we can implement for testing
|
||||
@@ -130,8 +129,10 @@ export class GlobTool extends BaseTool<GlobToolParams, ToolResult> {
|
||||
params.path || '.',
|
||||
);
|
||||
|
||||
if (!isWithinRoot(searchDirAbsolute, this.config.getTargetDir())) {
|
||||
return `Search path ("${searchDirAbsolute}") resolves outside the tool's root directory ("${this.config.getTargetDir()}").`;
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(searchDirAbsolute)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
return `Search path ("${searchDirAbsolute}") resolves outside the allowed workspace directories: ${directories.join(', ')}`;
|
||||
}
|
||||
|
||||
const targetDir = searchDirAbsolute || this.config.getTargetDir();
|
||||
@@ -189,10 +190,27 @@ export class GlobTool extends BaseTool<GlobToolParams, ToolResult> {
|
||||
}
|
||||
|
||||
try {
|
||||
const searchDirAbsolute = path.resolve(
|
||||
this.config.getTargetDir(),
|
||||
params.path || '.',
|
||||
);
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
const workspaceDirectories = workspaceContext.getDirectories();
|
||||
|
||||
// If a specific path is provided, resolve it and check if it's within workspace
|
||||
let searchDirectories: readonly string[];
|
||||
if (params.path) {
|
||||
const searchDirAbsolute = path.resolve(
|
||||
this.config.getTargetDir(),
|
||||
params.path,
|
||||
);
|
||||
if (!workspaceContext.isPathWithinWorkspace(searchDirAbsolute)) {
|
||||
return {
|
||||
llmContent: `Error: Path "${params.path}" is not within any workspace directory`,
|
||||
returnDisplay: `Path is not within workspace`,
|
||||
};
|
||||
}
|
||||
searchDirectories = [searchDirAbsolute];
|
||||
} else {
|
||||
// Search across all workspace directories
|
||||
searchDirectories = workspaceDirectories;
|
||||
}
|
||||
|
||||
// Get centralized file discovery service
|
||||
const respectGitIgnore =
|
||||
@@ -200,17 +218,26 @@ export class GlobTool extends BaseTool<GlobToolParams, ToolResult> {
|
||||
this.config.getFileFilteringRespectGitIgnore();
|
||||
const fileDiscovery = this.config.getFileService();
|
||||
|
||||
const entries = await glob(params.pattern, {
|
||||
cwd: searchDirAbsolute,
|
||||
withFileTypes: true,
|
||||
nodir: true,
|
||||
stat: true,
|
||||
nocase: !params.case_sensitive,
|
||||
dot: true,
|
||||
ignore: ['**/node_modules/**', '**/.git/**'],
|
||||
follow: false,
|
||||
signal,
|
||||
});
|
||||
// Collect entries from all search directories
|
||||
let allEntries: GlobPath[] = [];
|
||||
|
||||
for (const searchDir of searchDirectories) {
|
||||
const entries = (await glob(params.pattern, {
|
||||
cwd: searchDir,
|
||||
withFileTypes: true,
|
||||
nodir: true,
|
||||
stat: true,
|
||||
nocase: !params.case_sensitive,
|
||||
dot: true,
|
||||
ignore: ['**/node_modules/**', '**/.git/**'],
|
||||
follow: false,
|
||||
signal,
|
||||
})) as GlobPath[];
|
||||
|
||||
allEntries = allEntries.concat(entries);
|
||||
}
|
||||
|
||||
const entries = allEntries;
|
||||
|
||||
// Apply git-aware filtering if enabled and in git repository
|
||||
let filteredEntries = entries;
|
||||
@@ -236,7 +263,12 @@ export class GlobTool extends BaseTool<GlobToolParams, ToolResult> {
|
||||
}
|
||||
|
||||
if (!filteredEntries || filteredEntries.length === 0) {
|
||||
let message = `No files found matching pattern "${params.pattern}" within ${searchDirAbsolute}.`;
|
||||
let message = `No files found matching pattern "${params.pattern}"`;
|
||||
if (searchDirectories.length === 1) {
|
||||
message += ` within ${searchDirectories[0]}`;
|
||||
} else {
|
||||
message += ` within ${searchDirectories.length} workspace directories`;
|
||||
}
|
||||
if (gitIgnoredCount > 0) {
|
||||
message += ` (${gitIgnoredCount} files were git-ignored)`;
|
||||
}
|
||||
@@ -263,7 +295,12 @@ export class GlobTool extends BaseTool<GlobToolParams, ToolResult> {
|
||||
const fileListDescription = sortedAbsolutePaths.join('\n');
|
||||
const fileCount = sortedAbsolutePaths.length;
|
||||
|
||||
let resultMessage = `Found ${fileCount} file(s) matching "${params.pattern}" within ${searchDirAbsolute}`;
|
||||
let resultMessage = `Found ${fileCount} file(s) matching "${params.pattern}"`;
|
||||
if (searchDirectories.length === 1) {
|
||||
resultMessage += ` within ${searchDirectories[0]}`;
|
||||
} else {
|
||||
resultMessage += ` across ${searchDirectories.length} workspace directories`;
|
||||
}
|
||||
if (gitIgnoredCount > 0) {
|
||||
resultMessage += ` (${gitIgnoredCount} additional files were git-ignored)`;
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import os from 'os';
|
||||
import { Config } from '../config/config.js';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
// Mock the child_process module to control grep/git grep behavior
|
||||
vi.mock('child_process', () => ({
|
||||
@@ -33,6 +34,7 @@ describe('GrepTool', () => {
|
||||
|
||||
const mockConfig = {
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () => createMockWorkspaceContext(tempRootDir),
|
||||
} as unknown as Config;
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -120,7 +122,7 @@ describe('GrepTool', () => {
|
||||
const params: GrepToolParams = { pattern: 'world' };
|
||||
const result = await grepTool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 3 matches for pattern "world" in path "."',
|
||||
'Found 3 matches for pattern "world" in the workspace directory',
|
||||
);
|
||||
expect(result.llmContent).toContain('File: fileA.txt');
|
||||
expect(result.llmContent).toContain('L1: hello world');
|
||||
@@ -147,7 +149,7 @@ describe('GrepTool', () => {
|
||||
const params: GrepToolParams = { pattern: 'hello', include: '*.js' };
|
||||
const result = await grepTool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 1 match for pattern "hello" in path "." (filter: "*.js")',
|
||||
'Found 1 match for pattern "hello" in the workspace directory (filter: "*.js"):',
|
||||
);
|
||||
expect(result.llmContent).toContain('File: fileB.js');
|
||||
expect(result.llmContent).toContain(
|
||||
@@ -179,7 +181,7 @@ describe('GrepTool', () => {
|
||||
const params: GrepToolParams = { pattern: 'nonexistentpattern' };
|
||||
const result = await grepTool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toContain(
|
||||
'No matches found for pattern "nonexistentpattern" in path "."',
|
||||
'No matches found for pattern "nonexistentpattern" in the workspace directory.',
|
||||
);
|
||||
expect(result.returnDisplay).toBe('No matches found');
|
||||
});
|
||||
@@ -188,7 +190,7 @@ describe('GrepTool', () => {
|
||||
const params: GrepToolParams = { pattern: 'foo.*bar' }; // Matches 'const foo = "bar";'
|
||||
const result = await grepTool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 1 match for pattern "foo.*bar" in path "."',
|
||||
'Found 1 match for pattern "foo.*bar" in the workspace directory:',
|
||||
);
|
||||
expect(result.llmContent).toContain('File: fileB.js');
|
||||
expect(result.llmContent).toContain('L1: const foo = "bar";');
|
||||
@@ -198,7 +200,7 @@ describe('GrepTool', () => {
|
||||
const params: GrepToolParams = { pattern: 'HELLO' };
|
||||
const result = await grepTool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 2 matches for pattern "HELLO" in path "."',
|
||||
'Found 2 matches for pattern "HELLO" in the workspace directory:',
|
||||
);
|
||||
expect(result.llmContent).toContain('File: fileA.txt');
|
||||
expect(result.llmContent).toContain('L1: hello world');
|
||||
@@ -220,6 +222,98 @@ describe('GrepTool', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('multi-directory workspace', () => {
|
||||
it('should search across all workspace directories when no path is specified', async () => {
|
||||
// Create additional directory with test files
|
||||
const secondDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'grep-tool-second-'),
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(secondDir, 'other.txt'),
|
||||
'hello from second directory\nworld in second',
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(secondDir, 'another.js'),
|
||||
'function world() { return "test"; }',
|
||||
);
|
||||
|
||||
// Create a mock config with multiple directories
|
||||
const multiDirConfig = {
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [secondDir]),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGrepTool = new GrepTool(multiDirConfig);
|
||||
const params: GrepToolParams = { pattern: 'world' };
|
||||
const result = await multiDirGrepTool.execute(params, abortSignal);
|
||||
|
||||
// Should find matches in both directories
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 5 matches for pattern "world"',
|
||||
);
|
||||
|
||||
// Matches from first directory
|
||||
expect(result.llmContent).toContain('fileA.txt');
|
||||
expect(result.llmContent).toContain('L1: hello world');
|
||||
expect(result.llmContent).toContain('L2: second line with world');
|
||||
expect(result.llmContent).toContain('fileC.txt');
|
||||
expect(result.llmContent).toContain('L1: another world in sub dir');
|
||||
|
||||
// Matches from second directory (with directory name prefix)
|
||||
const secondDirName = path.basename(secondDir);
|
||||
expect(result.llmContent).toContain(
|
||||
`File: ${path.join(secondDirName, 'other.txt')}`,
|
||||
);
|
||||
expect(result.llmContent).toContain('L2: world in second');
|
||||
expect(result.llmContent).toContain(
|
||||
`File: ${path.join(secondDirName, 'another.js')}`,
|
||||
);
|
||||
expect(result.llmContent).toContain('L1: function world()');
|
||||
|
||||
// Clean up
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should search only specified path within workspace directories', async () => {
|
||||
// Create additional directory
|
||||
const secondDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'grep-tool-second-'),
|
||||
);
|
||||
await fs.mkdir(path.join(secondDir, 'sub'));
|
||||
await fs.writeFile(
|
||||
path.join(secondDir, 'sub', 'test.txt'),
|
||||
'hello from second sub directory',
|
||||
);
|
||||
|
||||
// Create a mock config with multiple directories
|
||||
const multiDirConfig = {
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [secondDir]),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGrepTool = new GrepTool(multiDirConfig);
|
||||
|
||||
// Search only in the 'sub' directory of the first workspace
|
||||
const params: GrepToolParams = { pattern: 'world', path: 'sub' };
|
||||
const result = await multiDirGrepTool.execute(params, abortSignal);
|
||||
|
||||
// Should only find matches in the specified sub directory
|
||||
expect(result.llmContent).toContain(
|
||||
'Found 1 match for pattern "world" in path "sub"',
|
||||
);
|
||||
expect(result.llmContent).toContain('File: fileC.txt');
|
||||
expect(result.llmContent).toContain('L1: another world in sub dir');
|
||||
|
||||
// Should not contain matches from second directory
|
||||
expect(result.llmContent).not.toContain('test.txt');
|
||||
|
||||
// Clean up
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDescription', () => {
|
||||
it('should generate correct description with pattern only', () => {
|
||||
const params: GrepToolParams = { pattern: 'testPattern' };
|
||||
@@ -246,6 +340,21 @@ describe('GrepTool', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should indicate searching across all workspace directories when no path specified', () => {
|
||||
// Create a mock config with multiple directories
|
||||
const multiDirConfig = {
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, ['/another/dir']),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGrepTool = new GrepTool(multiDirConfig);
|
||||
const params: GrepToolParams = { pattern: 'testPattern' };
|
||||
expect(multiDirGrepTool.getDescription(params)).toBe(
|
||||
"'testPattern' across all workspace directories",
|
||||
);
|
||||
});
|
||||
|
||||
it('should generate correct description with pattern, include, and path', () => {
|
||||
const params: GrepToolParams = {
|
||||
pattern: 'testPattern',
|
||||
|
||||
@@ -92,22 +92,23 @@ export class GrepTool extends BaseTool<GrepToolParams, ToolResult> {
|
||||
/**
|
||||
* Checks if a path is within the root directory and resolves it.
|
||||
* @param relativePath Path relative to the root directory (or undefined for root).
|
||||
* @returns The absolute path if valid and exists.
|
||||
* @returns The absolute path if valid and exists, or null if no path specified (to search all directories).
|
||||
* @throws {Error} If path is outside root, doesn't exist, or isn't a directory.
|
||||
*/
|
||||
private resolveAndValidatePath(relativePath?: string): string {
|
||||
const targetPath = path.resolve(
|
||||
this.config.getTargetDir(),
|
||||
relativePath || '.',
|
||||
);
|
||||
private resolveAndValidatePath(relativePath?: string): string | null {
|
||||
// If no path specified, return null to indicate searching all workspace directories
|
||||
if (!relativePath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Security Check: Ensure the resolved path is still within the root directory.
|
||||
if (
|
||||
!targetPath.startsWith(this.config.getTargetDir()) &&
|
||||
targetPath !== this.config.getTargetDir()
|
||||
) {
|
||||
const targetPath = path.resolve(this.config.getTargetDir(), relativePath);
|
||||
|
||||
// Security Check: Ensure the resolved path is within workspace boundaries
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(targetPath)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
throw new Error(
|
||||
`Path validation failed: Attempted path "${relativePath || '.'}" resolves outside the allowed root directory "${this.config.getTargetDir()}".`,
|
||||
`Path validation failed: Attempted path "${relativePath}" resolves outside the allowed workspace directories: ${directories.join(', ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -146,10 +147,13 @@ export class GrepTool extends BaseTool<GrepToolParams, ToolResult> {
|
||||
return `Invalid regular expression pattern provided: ${params.pattern}. Error: ${getErrorMessage(error)}`;
|
||||
}
|
||||
|
||||
try {
|
||||
this.resolveAndValidatePath(params.path);
|
||||
} catch (error) {
|
||||
return getErrorMessage(error);
|
||||
// Only validate path if one is provided
|
||||
if (params.path) {
|
||||
try {
|
||||
this.resolveAndValidatePath(params.path);
|
||||
} catch (error) {
|
||||
return getErrorMessage(error);
|
||||
}
|
||||
}
|
||||
|
||||
return null; // Parameters are valid
|
||||
@@ -174,44 +178,78 @@ export class GrepTool extends BaseTool<GrepToolParams, ToolResult> {
|
||||
};
|
||||
}
|
||||
|
||||
let searchDirAbs: string;
|
||||
try {
|
||||
searchDirAbs = this.resolveAndValidatePath(params.path);
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
const searchDirAbs = this.resolveAndValidatePath(params.path);
|
||||
const searchDirDisplay = params.path || '.';
|
||||
|
||||
const matches: GrepMatch[] = await this.performGrepSearch({
|
||||
pattern: params.pattern,
|
||||
path: searchDirAbs,
|
||||
include: params.include,
|
||||
signal,
|
||||
});
|
||||
// Determine which directories to search
|
||||
let searchDirectories: readonly string[];
|
||||
if (searchDirAbs === null) {
|
||||
// No path specified - search all workspace directories
|
||||
searchDirectories = workspaceContext.getDirectories();
|
||||
} else {
|
||||
// Specific path provided - search only that directory
|
||||
searchDirectories = [searchDirAbs];
|
||||
}
|
||||
|
||||
if (matches.length === 0) {
|
||||
const noMatchMsg = `No matches found for pattern "${params.pattern}" in path "${searchDirDisplay}"${params.include ? ` (filter: "${params.include}")` : ''}.`;
|
||||
// Collect matches from all search directories
|
||||
let allMatches: GrepMatch[] = [];
|
||||
for (const searchDir of searchDirectories) {
|
||||
const matches = await this.performGrepSearch({
|
||||
pattern: params.pattern,
|
||||
path: searchDir,
|
||||
include: params.include,
|
||||
signal,
|
||||
});
|
||||
|
||||
// Add directory prefix if searching multiple directories
|
||||
if (searchDirectories.length > 1) {
|
||||
const dirName = path.basename(searchDir);
|
||||
matches.forEach((match) => {
|
||||
match.filePath = path.join(dirName, match.filePath);
|
||||
});
|
||||
}
|
||||
|
||||
allMatches = allMatches.concat(matches);
|
||||
}
|
||||
|
||||
let searchLocationDescription: string;
|
||||
if (searchDirAbs === null) {
|
||||
const numDirs = workspaceContext.getDirectories().length;
|
||||
searchLocationDescription =
|
||||
numDirs > 1
|
||||
? `across ${numDirs} workspace directories`
|
||||
: `in the workspace directory`;
|
||||
} else {
|
||||
searchLocationDescription = `in path "${searchDirDisplay}"`;
|
||||
}
|
||||
|
||||
if (allMatches.length === 0) {
|
||||
const noMatchMsg = `No matches found for pattern "${params.pattern}" ${searchLocationDescription}${params.include ? ` (filter: "${params.include}")` : ''}.`;
|
||||
return { llmContent: noMatchMsg, returnDisplay: `No matches found` };
|
||||
}
|
||||
|
||||
const matchesByFile = matches.reduce(
|
||||
// Group matches by file
|
||||
const matchesByFile = allMatches.reduce(
|
||||
(acc, match) => {
|
||||
const relativeFilePath =
|
||||
path.relative(
|
||||
searchDirAbs,
|
||||
path.resolve(searchDirAbs, match.filePath),
|
||||
) || path.basename(match.filePath);
|
||||
if (!acc[relativeFilePath]) {
|
||||
acc[relativeFilePath] = [];
|
||||
const fileKey = match.filePath;
|
||||
if (!acc[fileKey]) {
|
||||
acc[fileKey] = [];
|
||||
}
|
||||
acc[relativeFilePath].push(match);
|
||||
acc[relativeFilePath].sort((a, b) => a.lineNumber - b.lineNumber);
|
||||
acc[fileKey].push(match);
|
||||
acc[fileKey].sort((a, b) => a.lineNumber - b.lineNumber);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, GrepMatch[]>,
|
||||
);
|
||||
|
||||
const matchCount = matches.length;
|
||||
const matchCount = allMatches.length;
|
||||
const matchTerm = matchCount === 1 ? 'match' : 'matches';
|
||||
|
||||
let llmContent = `Found ${matchCount} ${matchTerm} for pattern "${params.pattern}" in path "${searchDirDisplay}"${params.include ? ` (filter: "${params.include}")` : ''}:\n---\n`;
|
||||
let llmContent = `Found ${matchCount} ${matchTerm} for pattern "${params.pattern}" ${searchLocationDescription}${params.include ? ` (filter: "${params.include}")` : ''}:
|
||||
---
|
||||
`;
|
||||
|
||||
for (const filePath in matchesByFile) {
|
||||
llmContent += `File: ${filePath}\n`;
|
||||
@@ -334,6 +372,13 @@ export class GrepTool extends BaseTool<GrepToolParams, ToolResult> {
|
||||
);
|
||||
description += ` within ${shortenPath(relativePath)}`;
|
||||
}
|
||||
} else {
|
||||
// When no path is specified, indicate searching all workspace directories
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
const directories = workspaceContext.getDirectories();
|
||||
if (directories.length > 1) {
|
||||
description += ` across all workspace directories`;
|
||||
}
|
||||
}
|
||||
return description;
|
||||
}
|
||||
|
||||
496
packages/core/src/tools/ls.test.ts
Normal file
496
packages/core/src/tools/ls.test.ts
Normal file
@@ -0,0 +1,496 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
vi.mock('fs', () => ({
|
||||
default: {
|
||||
statSync: vi.fn(),
|
||||
readdirSync: vi.fn(),
|
||||
},
|
||||
statSync: vi.fn(),
|
||||
readdirSync: vi.fn(),
|
||||
}));
|
||||
import { LSTool } from './ls.js';
|
||||
import { Config } from '../config/config.js';
|
||||
import { WorkspaceContext } from '../utils/workspaceContext.js';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
|
||||
describe('LSTool', () => {
|
||||
let lsTool: LSTool;
|
||||
let mockConfig: Config;
|
||||
let mockWorkspaceContext: WorkspaceContext;
|
||||
let mockFileService: FileDiscoveryService;
|
||||
const mockPrimaryDir = '/home/user/project';
|
||||
const mockSecondaryDir = '/home/user/other-project';
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
|
||||
// Mock WorkspaceContext
|
||||
mockWorkspaceContext = {
|
||||
getDirectories: vi
|
||||
.fn()
|
||||
.mockReturnValue([mockPrimaryDir, mockSecondaryDir]),
|
||||
isPathWithinWorkspace: vi
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
(path) =>
|
||||
path.startsWith(mockPrimaryDir) ||
|
||||
path.startsWith(mockSecondaryDir),
|
||||
),
|
||||
addDirectory: vi.fn(),
|
||||
} as unknown as WorkspaceContext;
|
||||
|
||||
// Mock FileService
|
||||
mockFileService = {
|
||||
shouldGitIgnoreFile: vi.fn().mockReturnValue(false),
|
||||
shouldGeminiIgnoreFile: vi.fn().mockReturnValue(false),
|
||||
} as unknown as FileDiscoveryService;
|
||||
|
||||
// Mock Config
|
||||
mockConfig = {
|
||||
getTargetDir: vi.fn().mockReturnValue(mockPrimaryDir),
|
||||
getWorkspaceContext: vi.fn().mockReturnValue(mockWorkspaceContext),
|
||||
getFileService: vi.fn().mockReturnValue(mockFileService),
|
||||
getFileFilteringOptions: vi.fn().mockReturnValue({
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
}),
|
||||
} as unknown as Config;
|
||||
|
||||
lsTool = new LSTool(mockConfig);
|
||||
});
|
||||
|
||||
describe('parameter validation', () => {
|
||||
it('should accept valid absolute paths within workspace', () => {
|
||||
const params = {
|
||||
path: '/home/user/project/src',
|
||||
};
|
||||
|
||||
const error = lsTool.validateToolParams(params);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
it('should reject relative paths', () => {
|
||||
const params = {
|
||||
path: './src',
|
||||
};
|
||||
|
||||
const error = lsTool.validateToolParams(params);
|
||||
expect(error).toBe('Path must be absolute: ./src');
|
||||
});
|
||||
|
||||
it('should reject paths outside workspace with clear error message', () => {
|
||||
const params = {
|
||||
path: '/etc/passwd',
|
||||
};
|
||||
|
||||
const error = lsTool.validateToolParams(params);
|
||||
expect(error).toBe(
|
||||
'Path must be within one of the workspace directories: /home/user/project, /home/user/other-project',
|
||||
);
|
||||
});
|
||||
|
||||
it('should accept paths in secondary workspace directory', () => {
|
||||
const params = {
|
||||
path: '/home/user/other-project/lib',
|
||||
};
|
||||
|
||||
const error = lsTool.validateToolParams(params);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('execute', () => {
|
||||
it('should list files in a directory', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['file1.ts', 'file2.ts', 'subdir'];
|
||||
const mockStats = {
|
||||
isDirectory: vi.fn(),
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
};
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
const pathStr = path.toString();
|
||||
if (pathStr === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
// For individual files
|
||||
if (pathStr.toString().endsWith('subdir')) {
|
||||
return { ...mockStats, isDirectory: () => true, size: 0 } as fs.Stats;
|
||||
}
|
||||
return { ...mockStats, isDirectory: () => false } as fs.Stats;
|
||||
});
|
||||
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('[DIR] subdir');
|
||||
expect(result.llmContent).toContain('file1.ts');
|
||||
expect(result.llmContent).toContain('file2.ts');
|
||||
expect(result.returnDisplay).toBe('Listed 3 item(s).');
|
||||
});
|
||||
|
||||
it('should list files from secondary workspace directory', async () => {
|
||||
const testPath = '/home/user/other-project/lib';
|
||||
const mockFiles = ['module1.js', 'module2.js'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
if (path.toString() === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 2048,
|
||||
} as fs.Stats;
|
||||
});
|
||||
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('module1.js');
|
||||
expect(result.llmContent).toContain('module2.js');
|
||||
expect(result.returnDisplay).toBe('Listed 2 item(s).');
|
||||
});
|
||||
|
||||
it('should handle empty directories', async () => {
|
||||
const testPath = '/home/user/project/empty';
|
||||
|
||||
vi.mocked(fs.statSync).mockReturnValue({
|
||||
isDirectory: () => true,
|
||||
} as fs.Stats);
|
||||
vi.mocked(fs.readdirSync).mockReturnValue([]);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toBe(
|
||||
'Directory /home/user/project/empty is empty.',
|
||||
);
|
||||
expect(result.returnDisplay).toBe('Directory is empty.');
|
||||
});
|
||||
|
||||
it('should respect ignore patterns', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['test.js', 'test.spec.js', 'index.js'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
const pathStr = path.toString();
|
||||
if (pathStr === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
} as fs.Stats;
|
||||
});
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath, ignore: ['*.spec.js'] },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('test.js');
|
||||
expect(result.llmContent).toContain('index.js');
|
||||
expect(result.llmContent).not.toContain('test.spec.js');
|
||||
expect(result.returnDisplay).toBe('Listed 2 item(s).');
|
||||
});
|
||||
|
||||
it('should respect gitignore patterns', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['file1.js', 'file2.js', 'ignored.js'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
const pathStr = path.toString();
|
||||
if (pathStr === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
} as fs.Stats;
|
||||
});
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
(mockFileService.shouldGitIgnoreFile as any).mockImplementation(
|
||||
(path: string) => path.includes('ignored.js'),
|
||||
);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('file1.js');
|
||||
expect(result.llmContent).toContain('file2.js');
|
||||
expect(result.llmContent).not.toContain('ignored.js');
|
||||
expect(result.returnDisplay).toBe('Listed 2 item(s). (1 git-ignored)');
|
||||
});
|
||||
|
||||
it('should respect geminiignore patterns', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['file1.js', 'file2.js', 'private.js'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
const pathStr = path.toString();
|
||||
if (pathStr === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
} as fs.Stats;
|
||||
});
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
(mockFileService.shouldGeminiIgnoreFile as any).mockImplementation(
|
||||
(path: string) => path.includes('private.js'),
|
||||
);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('file1.js');
|
||||
expect(result.llmContent).toContain('file2.js');
|
||||
expect(result.llmContent).not.toContain('private.js');
|
||||
expect(result.returnDisplay).toBe('Listed 2 item(s). (1 gemini-ignored)');
|
||||
});
|
||||
|
||||
it('should handle non-directory paths', async () => {
|
||||
const testPath = '/home/user/project/file.txt';
|
||||
|
||||
vi.mocked(fs.statSync).mockReturnValue({
|
||||
isDirectory: () => false,
|
||||
} as fs.Stats);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('Path is not a directory');
|
||||
expect(result.returnDisplay).toBe('Error: Path is not a directory.');
|
||||
});
|
||||
|
||||
it('should handle non-existent paths', async () => {
|
||||
const testPath = '/home/user/project/does-not-exist';
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation(() => {
|
||||
throw new Error('ENOENT: no such file or directory');
|
||||
});
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('Error listing directory');
|
||||
expect(result.returnDisplay).toBe('Error: Failed to list directory.');
|
||||
});
|
||||
|
||||
it('should sort directories first, then files alphabetically', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['z-file.ts', 'a-dir', 'b-file.ts', 'c-dir'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
if (path.toString() === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
if (path.toString().endsWith('-dir')) {
|
||||
return {
|
||||
isDirectory: () => true,
|
||||
mtime: new Date(),
|
||||
size: 0,
|
||||
} as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
} as fs.Stats;
|
||||
});
|
||||
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
const lines = (
|
||||
typeof result.llmContent === 'string' ? result.llmContent : ''
|
||||
).split('\n');
|
||||
const entries = lines.slice(1).filter((line: string) => line.trim()); // Skip header
|
||||
expect(entries[0]).toBe('[DIR] a-dir');
|
||||
expect(entries[1]).toBe('[DIR] c-dir');
|
||||
expect(entries[2]).toBe('b-file.ts');
|
||||
expect(entries[3]).toBe('z-file.ts');
|
||||
});
|
||||
|
||||
it('should handle permission errors gracefully', async () => {
|
||||
const testPath = '/home/user/project/restricted';
|
||||
|
||||
vi.mocked(fs.statSync).mockReturnValue({
|
||||
isDirectory: () => true,
|
||||
} as fs.Stats);
|
||||
vi.mocked(fs.readdirSync).mockImplementation(() => {
|
||||
throw new Error('EACCES: permission denied');
|
||||
});
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('Error listing directory');
|
||||
expect(result.llmContent).toContain('permission denied');
|
||||
expect(result.returnDisplay).toBe('Error: Failed to list directory.');
|
||||
});
|
||||
|
||||
it('should validate parameters and return error for invalid params', async () => {
|
||||
const result = await lsTool.execute(
|
||||
{ path: '../outside' },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('Invalid parameters provided');
|
||||
expect(result.returnDisplay).toBe('Error: Failed to execute tool.');
|
||||
});
|
||||
|
||||
it('should handle errors accessing individual files during listing', async () => {
|
||||
const testPath = '/home/user/project/src';
|
||||
const mockFiles = ['accessible.ts', 'inaccessible.ts'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
if (path.toString() === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
if (path.toString().endsWith('inaccessible.ts')) {
|
||||
throw new Error('EACCES: permission denied');
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 1024,
|
||||
} as fs.Stats;
|
||||
});
|
||||
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
// Spy on console.error to verify it's called
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
// Should still list the accessible file
|
||||
expect(result.llmContent).toContain('accessible.ts');
|
||||
expect(result.llmContent).not.toContain('inaccessible.ts');
|
||||
expect(result.returnDisplay).toBe('Listed 1 item(s).');
|
||||
|
||||
// Verify error was logged
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error accessing'),
|
||||
);
|
||||
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDescription', () => {
|
||||
it('should return shortened relative path', () => {
|
||||
const params = {
|
||||
path: path.join(mockPrimaryDir, 'deeply', 'nested', 'directory'),
|
||||
};
|
||||
|
||||
const description = lsTool.getDescription(params);
|
||||
expect(description).toBe(path.join('deeply', 'nested', 'directory'));
|
||||
});
|
||||
|
||||
it('should handle paths in secondary workspace', () => {
|
||||
const params = {
|
||||
path: path.join(mockSecondaryDir, 'lib'),
|
||||
};
|
||||
|
||||
const description = lsTool.getDescription(params);
|
||||
expect(description).toBe(path.join('..', 'other-project', 'lib'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace boundary validation', () => {
|
||||
it('should accept paths in primary workspace directory', () => {
|
||||
const params = { path: `${mockPrimaryDir}/src` };
|
||||
expect(lsTool.validateToolParams(params)).toBeNull();
|
||||
});
|
||||
|
||||
it('should accept paths in secondary workspace directory', () => {
|
||||
const params = { path: `${mockSecondaryDir}/lib` };
|
||||
expect(lsTool.validateToolParams(params)).toBeNull();
|
||||
});
|
||||
|
||||
it('should reject paths outside all workspace directories', () => {
|
||||
const params = { path: '/etc/passwd' };
|
||||
const error = lsTool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'Path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(mockPrimaryDir);
|
||||
expect(error).toContain(mockSecondaryDir);
|
||||
});
|
||||
|
||||
it('should list files from secondary workspace directory', async () => {
|
||||
const testPath = `${mockSecondaryDir}/tests`;
|
||||
const mockFiles = ['test1.spec.ts', 'test2.spec.ts'];
|
||||
|
||||
vi.mocked(fs.statSync).mockImplementation((path: any) => {
|
||||
if (path.toString() === testPath) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return {
|
||||
isDirectory: () => false,
|
||||
mtime: new Date(),
|
||||
size: 512,
|
||||
} as fs.Stats;
|
||||
});
|
||||
|
||||
vi.mocked(fs.readdirSync).mockReturnValue(mockFiles as any);
|
||||
|
||||
const result = await lsTool.execute(
|
||||
{ path: testPath },
|
||||
new AbortController().signal,
|
||||
);
|
||||
|
||||
expect(result.llmContent).toContain('test1.spec.ts');
|
||||
expect(result.llmContent).toContain('test2.spec.ts');
|
||||
expect(result.returnDisplay).toBe('Listed 2 item(s).');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -11,7 +11,6 @@ import { Type } from '@google/genai';
|
||||
import { SchemaValidator } from '../utils/schemaValidator.js';
|
||||
import { makeRelative, shortenPath } from '../utils/paths.js';
|
||||
import { Config, DEFAULT_FILE_FILTERING_OPTIONS } from '../config/config.js';
|
||||
import { isWithinRoot } from '../utils/fileUtils.js';
|
||||
|
||||
/**
|
||||
* Parameters for the LS tool
|
||||
@@ -129,8 +128,11 @@ export class LSTool extends BaseTool<LSToolParams, ToolResult> {
|
||||
if (!path.isAbsolute(params.path)) {
|
||||
return `Path must be absolute: ${params.path}`;
|
||||
}
|
||||
if (!isWithinRoot(params.path, this.config.getTargetDir())) {
|
||||
return `Path must be within the root directory (${this.config.getTargetDir()}): ${params.path}`;
|
||||
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(params.path)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
return `Path must be within one of the workspace directories: ${directories.join(', ')}`;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -21,11 +21,14 @@ import { GoogleCredentialProvider } from '../mcp/google-auth-provider.js';
|
||||
import { AuthProviderType } from '../config/config.js';
|
||||
import { PromptRegistry } from '../prompts/prompt-registry.js';
|
||||
|
||||
import { DiscoveredMCPTool } from './mcp-tool.js';
|
||||
|
||||
vi.mock('@modelcontextprotocol/sdk/client/stdio.js');
|
||||
vi.mock('@modelcontextprotocol/sdk/client/index.js');
|
||||
vi.mock('@google/genai');
|
||||
vi.mock('../mcp/oauth-provider.js');
|
||||
vi.mock('../mcp/oauth-token-storage.js');
|
||||
vi.mock('./mcp-tool.js');
|
||||
|
||||
describe('mcp-client', () => {
|
||||
afterEach(() => {
|
||||
@@ -50,6 +53,52 @@ describe('mcp-client', () => {
|
||||
expect(tools.length).toBe(1);
|
||||
expect(mockedMcpToTool).toHaveBeenCalledOnce();
|
||||
});
|
||||
|
||||
it('should log an error if there is an error discovering a tool', async () => {
|
||||
const mockedClient = {} as unknown as ClientLib.Client;
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {
|
||||
// no-op
|
||||
});
|
||||
|
||||
const testError = new Error('Invalid tool name');
|
||||
vi.mocked(DiscoveredMCPTool).mockImplementation(
|
||||
(
|
||||
_mcpCallableTool: GenAiLib.CallableTool,
|
||||
_serverName: string,
|
||||
name: string,
|
||||
) => {
|
||||
if (name === 'invalid tool name') {
|
||||
throw testError;
|
||||
}
|
||||
return { name: 'validTool' } as DiscoveredMCPTool;
|
||||
},
|
||||
);
|
||||
|
||||
vi.mocked(GenAiLib.mcpToTool).mockReturnValue({
|
||||
tool: () =>
|
||||
Promise.resolve({
|
||||
functionDeclarations: [
|
||||
{
|
||||
name: 'validTool',
|
||||
},
|
||||
{
|
||||
name: 'invalid tool name', // this will fail validation
|
||||
},
|
||||
],
|
||||
}),
|
||||
} as unknown as GenAiLib.CallableTool);
|
||||
|
||||
const tools = await discoverTools('test-server', {}, mockedClient);
|
||||
|
||||
expect(tools.length).toBe(1);
|
||||
expect(tools[0].name).toBe('validTool');
|
||||
expect(consoleErrorSpy).toHaveBeenCalledOnce();
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
`Error discovering tool: 'invalid tool name' from MCP server 'test-server': ${testError.message}`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('discoverPrompts', () => {
|
||||
|
||||
@@ -145,20 +145,6 @@ export function getMCPDiscoveryState(): MCPDiscoveryState {
|
||||
return mcpDiscoveryState;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse www-authenticate header to extract OAuth metadata URI.
|
||||
*
|
||||
* @param wwwAuthenticate The www-authenticate header value
|
||||
* @returns The resource metadata URI if found, null otherwise
|
||||
*/
|
||||
function _parseWWWAuthenticate(wwwAuthenticate: string): string | null {
|
||||
// Parse header like: Bearer realm="MCP Server", resource_metadata_uri="https://..."
|
||||
const resourceMetadataMatch = wwwAuthenticate.match(
|
||||
/resource_metadata_uri="([^"]+)"/,
|
||||
);
|
||||
return resourceMetadataMatch ? resourceMetadataMatch[1] : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract WWW-Authenticate header from error message string.
|
||||
* This is a more robust approach than regex matching.
|
||||
@@ -380,33 +366,47 @@ export async function connectAndDiscover(
|
||||
): Promise<void> {
|
||||
updateMCPServerStatus(mcpServerName, MCPServerStatus.CONNECTING);
|
||||
|
||||
let mcpClient: Client | undefined;
|
||||
try {
|
||||
const mcpClient = await connectToMcpServer(
|
||||
mcpClient = await connectToMcpServer(
|
||||
mcpServerName,
|
||||
mcpServerConfig,
|
||||
debugMode,
|
||||
);
|
||||
try {
|
||||
updateMCPServerStatus(mcpServerName, MCPServerStatus.CONNECTED);
|
||||
mcpClient.onerror = (error) => {
|
||||
console.error(`MCP ERROR (${mcpServerName}):`, error.toString());
|
||||
updateMCPServerStatus(mcpServerName, MCPServerStatus.DISCONNECTED);
|
||||
};
|
||||
await discoverPrompts(mcpServerName, mcpClient, promptRegistry);
|
||||
|
||||
const tools = await discoverTools(
|
||||
mcpServerName,
|
||||
mcpServerConfig,
|
||||
mcpClient,
|
||||
);
|
||||
for (const tool of tools) {
|
||||
toolRegistry.registerTool(tool);
|
||||
}
|
||||
} catch (error) {
|
||||
mcpClient.close();
|
||||
throw error;
|
||||
mcpClient.onerror = (error) => {
|
||||
console.error(`MCP ERROR (${mcpServerName}):`, error.toString());
|
||||
updateMCPServerStatus(mcpServerName, MCPServerStatus.DISCONNECTED);
|
||||
};
|
||||
|
||||
// Attempt to discover both prompts and tools
|
||||
const prompts = await discoverPrompts(
|
||||
mcpServerName,
|
||||
mcpClient,
|
||||
promptRegistry,
|
||||
);
|
||||
const tools = await discoverTools(
|
||||
mcpServerName,
|
||||
mcpServerConfig,
|
||||
mcpClient,
|
||||
);
|
||||
|
||||
// If we have neither prompts nor tools, it's a failed discovery
|
||||
if (prompts.length === 0 && tools.length === 0) {
|
||||
throw new Error('No prompts or tools found on the server.');
|
||||
}
|
||||
|
||||
// If we found anything, the server is connected
|
||||
updateMCPServerStatus(mcpServerName, MCPServerStatus.CONNECTED);
|
||||
|
||||
// Register any discovered tools
|
||||
for (const tool of tools) {
|
||||
toolRegistry.registerTool(tool);
|
||||
}
|
||||
} catch (error) {
|
||||
if (mcpClient) {
|
||||
mcpClient.close();
|
||||
}
|
||||
console.error(
|
||||
`Error connecting to MCP server '${mcpServerName}': ${getErrorMessage(
|
||||
error,
|
||||
@@ -437,30 +437,49 @@ export async function discoverTools(
|
||||
const tool = await mcpCallableTool.tool();
|
||||
|
||||
if (!Array.isArray(tool.functionDeclarations)) {
|
||||
throw new Error(`Server did not return valid function declarations.`);
|
||||
// This is a valid case for a prompt-only server
|
||||
return [];
|
||||
}
|
||||
|
||||
const discoveredTools: DiscoveredMCPTool[] = [];
|
||||
for (const funcDecl of tool.functionDeclarations) {
|
||||
if (!isEnabled(funcDecl, mcpServerName, mcpServerConfig)) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
if (!isEnabled(funcDecl, mcpServerName, mcpServerConfig)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
discoveredTools.push(
|
||||
new DiscoveredMCPTool(
|
||||
mcpCallableTool,
|
||||
mcpServerName,
|
||||
funcDecl.name!,
|
||||
funcDecl.description ?? '',
|
||||
funcDecl.parametersJsonSchema ?? { type: 'object', properties: {} },
|
||||
mcpServerConfig.timeout ?? MCP_DEFAULT_TIMEOUT_MSEC,
|
||||
mcpServerConfig.trust,
|
||||
),
|
||||
);
|
||||
discoveredTools.push(
|
||||
new DiscoveredMCPTool(
|
||||
mcpCallableTool,
|
||||
mcpServerName,
|
||||
funcDecl.name!,
|
||||
funcDecl.description ?? '',
|
||||
funcDecl.parametersJsonSchema ?? { type: 'object', properties: {} },
|
||||
mcpServerConfig.timeout ?? MCP_DEFAULT_TIMEOUT_MSEC,
|
||||
mcpServerConfig.trust,
|
||||
),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error discovering tool: '${
|
||||
funcDecl.name
|
||||
}' from MCP server '${mcpServerName}': ${(error as Error).message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
return discoveredTools;
|
||||
} catch (error) {
|
||||
throw new Error(`Error discovering tools: ${error}`);
|
||||
if (
|
||||
error instanceof Error &&
|
||||
!error.message?.includes('Method not found')
|
||||
) {
|
||||
console.error(
|
||||
`Error discovering tools from ${mcpServerName}: ${getErrorMessage(
|
||||
error,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -475,7 +494,7 @@ export async function discoverPrompts(
|
||||
mcpServerName: string,
|
||||
mcpClient: Client,
|
||||
promptRegistry: PromptRegistry,
|
||||
): Promise<void> {
|
||||
): Promise<Prompt[]> {
|
||||
try {
|
||||
const response = await mcpClient.request(
|
||||
{ method: 'prompts/list', params: {} },
|
||||
@@ -490,6 +509,7 @@ export async function discoverPrompts(
|
||||
invokeMcpPrompt(mcpServerName, mcpClient, prompt.name, params),
|
||||
});
|
||||
}
|
||||
return response.prompts;
|
||||
} catch (error) {
|
||||
// It's okay if this fails, not all servers will have prompts.
|
||||
// Don't log an error if the method is not found, which is a common case.
|
||||
@@ -503,6 +523,7 @@ export async function discoverPrompts(
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import { ToolConfirmationOutcome } from './tools.js';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('fs/promises');
|
||||
@@ -46,7 +47,7 @@ describe('MemoryTool', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(os.homedir).mockReturnValue('/mock/home');
|
||||
vi.mocked(os.homedir).mockReturnValue(path.join('/mock', 'home'));
|
||||
mockFsAdapter.readFile.mockReset();
|
||||
mockFsAdapter.writeFile.mockReset().mockResolvedValue(undefined);
|
||||
mockFsAdapter.mkdir
|
||||
@@ -85,11 +86,11 @@ describe('MemoryTool', () => {
|
||||
});
|
||||
|
||||
describe('performAddMemoryEntry (static method)', () => {
|
||||
const testFilePath = path.join(
|
||||
'/mock/home',
|
||||
'.qwen',
|
||||
DEFAULT_CONTEXT_FILENAME, // Use the default for basic tests
|
||||
);
|
||||
let testFilePath: string;
|
||||
|
||||
beforeEach(() => {
|
||||
testFilePath = path.join(os.homedir(), '.qwen', DEFAULT_CONTEXT_FILENAME);
|
||||
});
|
||||
|
||||
it('should create section and save a fact if file does not exist', async () => {
|
||||
mockFsAdapter.readFile.mockRejectedValue({ code: 'ENOENT' }); // Simulate file not found
|
||||
@@ -206,7 +207,7 @@ describe('MemoryTool', () => {
|
||||
const result = await memoryTool.execute(params, mockAbortSignal);
|
||||
// Use getCurrentGeminiMdFilename for the default expectation before any setGeminiMdFilename calls in a test
|
||||
const expectedFilePath = path.join(
|
||||
'/mock/home',
|
||||
os.homedir(),
|
||||
'.qwen',
|
||||
getCurrentGeminiMdFilename(), // This will be DEFAULT_CONTEXT_FILENAME unless changed by a test
|
||||
);
|
||||
@@ -262,4 +263,151 @@ describe('MemoryTool', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('shouldConfirmExecute', () => {
|
||||
let memoryTool: MemoryTool;
|
||||
|
||||
beforeEach(() => {
|
||||
memoryTool = new MemoryTool();
|
||||
// Clear the allowlist before each test
|
||||
(MemoryTool as unknown as { allowlist: Set<string> }).allowlist.clear();
|
||||
// Mock fs.readFile to return empty string (file doesn't exist)
|
||||
vi.mocked(fs.readFile).mockResolvedValue('');
|
||||
});
|
||||
|
||||
it('should return confirmation details when memory file is not allowlisted', async () => {
|
||||
const params = { fact: 'Test fact' };
|
||||
const result = await memoryTool.shouldConfirmExecute(
|
||||
params,
|
||||
mockAbortSignal,
|
||||
);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result).not.toBe(false);
|
||||
|
||||
if (result && result.type === 'edit') {
|
||||
const expectedPath = path.join('~', '.qwen', 'QWEN.md');
|
||||
expect(result.title).toBe(`Confirm Memory Save: ${expectedPath}`);
|
||||
expect(result.fileName).toContain(path.join('mock', 'home', '.qwen'));
|
||||
expect(result.fileName).toContain('QWEN.md');
|
||||
expect(result.fileDiff).toContain('Index: QWEN.md');
|
||||
expect(result.fileDiff).toContain('+## Qwen Added Memories');
|
||||
expect(result.fileDiff).toContain('+- Test fact');
|
||||
expect(result.originalContent).toBe('');
|
||||
expect(result.newContent).toContain('## Qwen Added Memories');
|
||||
expect(result.newContent).toContain('- Test fact');
|
||||
}
|
||||
});
|
||||
|
||||
it('should return false when memory file is already allowlisted', async () => {
|
||||
const params = { fact: 'Test fact' };
|
||||
const memoryFilePath = path.join(
|
||||
os.homedir(),
|
||||
'.qwen',
|
||||
getCurrentGeminiMdFilename(),
|
||||
);
|
||||
|
||||
// Add the memory file to the allowlist
|
||||
(MemoryTool as unknown as { allowlist: Set<string> }).allowlist.add(
|
||||
memoryFilePath,
|
||||
);
|
||||
|
||||
const result = await memoryTool.shouldConfirmExecute(
|
||||
params,
|
||||
mockAbortSignal,
|
||||
);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should add memory file to allowlist when ProceedAlways is confirmed', async () => {
|
||||
const params = { fact: 'Test fact' };
|
||||
const memoryFilePath = path.join(
|
||||
os.homedir(),
|
||||
'.qwen',
|
||||
getCurrentGeminiMdFilename(),
|
||||
);
|
||||
|
||||
const result = await memoryTool.shouldConfirmExecute(
|
||||
params,
|
||||
mockAbortSignal,
|
||||
);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result).not.toBe(false);
|
||||
|
||||
if (result && result.type === 'edit') {
|
||||
// Simulate the onConfirm callback
|
||||
await result.onConfirm(ToolConfirmationOutcome.ProceedAlways);
|
||||
|
||||
// Check that the memory file was added to the allowlist
|
||||
expect(
|
||||
(MemoryTool as unknown as { allowlist: Set<string> }).allowlist.has(
|
||||
memoryFilePath,
|
||||
),
|
||||
).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('should not add memory file to allowlist when other outcomes are confirmed', async () => {
|
||||
const params = { fact: 'Test fact' };
|
||||
const memoryFilePath = path.join(
|
||||
os.homedir(),
|
||||
'.qwen',
|
||||
getCurrentGeminiMdFilename(),
|
||||
);
|
||||
|
||||
const result = await memoryTool.shouldConfirmExecute(
|
||||
params,
|
||||
mockAbortSignal,
|
||||
);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result).not.toBe(false);
|
||||
|
||||
if (result && result.type === 'edit') {
|
||||
// Simulate the onConfirm callback with different outcomes
|
||||
await result.onConfirm(ToolConfirmationOutcome.ProceedOnce);
|
||||
expect(
|
||||
(MemoryTool as unknown as { allowlist: Set<string> }).allowlist.has(
|
||||
memoryFilePath,
|
||||
),
|
||||
).toBe(false);
|
||||
|
||||
await result.onConfirm(ToolConfirmationOutcome.Cancel);
|
||||
expect(
|
||||
(MemoryTool as unknown as { allowlist: Set<string> }).allowlist.has(
|
||||
memoryFilePath,
|
||||
),
|
||||
).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle existing memory file with content', async () => {
|
||||
const params = { fact: 'New fact' };
|
||||
const existingContent =
|
||||
'Some existing content.\n\n## Qwen Added Memories\n- Old fact\n';
|
||||
|
||||
// Mock fs.readFile to return existing content
|
||||
vi.mocked(fs.readFile).mockResolvedValue(existingContent);
|
||||
|
||||
const result = await memoryTool.shouldConfirmExecute(
|
||||
params,
|
||||
mockAbortSignal,
|
||||
);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result).not.toBe(false);
|
||||
|
||||
if (result && result.type === 'edit') {
|
||||
const expectedPath = path.join('~', '.qwen', 'QWEN.md');
|
||||
expect(result.title).toBe(`Confirm Memory Save: ${expectedPath}`);
|
||||
expect(result.fileDiff).toContain('Index: QWEN.md');
|
||||
expect(result.fileDiff).toContain('+- New fact');
|
||||
expect(result.originalContent).toBe(existingContent);
|
||||
expect(result.newContent).toContain('- Old fact');
|
||||
expect(result.newContent).toContain('- New fact');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,11 +4,21 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { BaseTool, Icon, ToolResult } from './tools.js';
|
||||
import {
|
||||
BaseTool,
|
||||
ToolResult,
|
||||
ToolEditConfirmationDetails,
|
||||
ToolConfirmationOutcome,
|
||||
Icon,
|
||||
} from './tools.js';
|
||||
import { FunctionDeclaration, Type } from '@google/genai';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { homedir } from 'os';
|
||||
import * as Diff from 'diff';
|
||||
import { DEFAULT_DIFF_OPTIONS } from './diffOptions.js';
|
||||
import { tildeifyPath } from '../utils/paths.js';
|
||||
import { ModifiableTool, ModifyContext } from './modifiable-tool.js';
|
||||
|
||||
const memoryToolSchemaData: FunctionDeclaration = {
|
||||
name: 'save_memory',
|
||||
@@ -80,6 +90,8 @@ export function getAllGeminiMdFilenames(): string[] {
|
||||
|
||||
interface SaveMemoryParams {
|
||||
fact: string;
|
||||
modified_by_user?: boolean;
|
||||
modified_content?: string;
|
||||
}
|
||||
|
||||
function getGlobalMemoryFilePath(): string {
|
||||
@@ -98,7 +110,12 @@ function ensureNewlineSeparation(currentContent: string): string {
|
||||
return '\n\n';
|
||||
}
|
||||
|
||||
export class MemoryTool extends BaseTool<SaveMemoryParams, ToolResult> {
|
||||
export class MemoryTool
|
||||
extends BaseTool<SaveMemoryParams, ToolResult>
|
||||
implements ModifiableTool<SaveMemoryParams>
|
||||
{
|
||||
private static readonly allowlist: Set<string> = new Set();
|
||||
|
||||
static readonly Name: string = memoryToolSchemaData.name!;
|
||||
constructor() {
|
||||
super(
|
||||
@@ -110,6 +127,111 @@ export class MemoryTool extends BaseTool<SaveMemoryParams, ToolResult> {
|
||||
);
|
||||
}
|
||||
|
||||
getDescription(_params: SaveMemoryParams): string {
|
||||
const memoryFilePath = getGlobalMemoryFilePath();
|
||||
return `in ${tildeifyPath(memoryFilePath)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the current content of the memory file
|
||||
*/
|
||||
private async readMemoryFileContent(): Promise<string> {
|
||||
try {
|
||||
return await fs.readFile(getGlobalMemoryFilePath(), 'utf-8');
|
||||
} catch (err) {
|
||||
const error = err as Error & { code?: string };
|
||||
if (!(error instanceof Error) || error.code !== 'ENOENT') throw err;
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the new content that would result from adding a memory entry
|
||||
*/
|
||||
private computeNewContent(currentContent: string, fact: string): string {
|
||||
let processedText = fact.trim();
|
||||
processedText = processedText.replace(/^(-+\s*)+/, '').trim();
|
||||
const newMemoryItem = `- ${processedText}`;
|
||||
|
||||
const headerIndex = currentContent.indexOf(MEMORY_SECTION_HEADER);
|
||||
|
||||
if (headerIndex === -1) {
|
||||
// Header not found, append header and then the entry
|
||||
const separator = ensureNewlineSeparation(currentContent);
|
||||
return (
|
||||
currentContent +
|
||||
`${separator}${MEMORY_SECTION_HEADER}\n${newMemoryItem}\n`
|
||||
);
|
||||
} else {
|
||||
// Header found, find where to insert the new memory entry
|
||||
const startOfSectionContent = headerIndex + MEMORY_SECTION_HEADER.length;
|
||||
let endOfSectionIndex = currentContent.indexOf(
|
||||
'\n## ',
|
||||
startOfSectionContent,
|
||||
);
|
||||
if (endOfSectionIndex === -1) {
|
||||
endOfSectionIndex = currentContent.length; // End of file
|
||||
}
|
||||
|
||||
const beforeSectionMarker = currentContent
|
||||
.substring(0, startOfSectionContent)
|
||||
.trimEnd();
|
||||
let sectionContent = currentContent
|
||||
.substring(startOfSectionContent, endOfSectionIndex)
|
||||
.trimEnd();
|
||||
const afterSectionMarker = currentContent.substring(endOfSectionIndex);
|
||||
|
||||
sectionContent += `\n${newMemoryItem}`;
|
||||
return (
|
||||
`${beforeSectionMarker}\n${sectionContent.trimStart()}\n${afterSectionMarker}`.trimEnd() +
|
||||
'\n'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async shouldConfirmExecute(
|
||||
params: SaveMemoryParams,
|
||||
_abortSignal: AbortSignal,
|
||||
): Promise<ToolEditConfirmationDetails | false> {
|
||||
const memoryFilePath = getGlobalMemoryFilePath();
|
||||
const allowlistKey = memoryFilePath;
|
||||
|
||||
if (MemoryTool.allowlist.has(allowlistKey)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Read current content of the memory file
|
||||
const currentContent = await this.readMemoryFileContent();
|
||||
|
||||
// Calculate the new content that will be written to the memory file
|
||||
const newContent = this.computeNewContent(currentContent, params.fact);
|
||||
|
||||
const fileName = path.basename(memoryFilePath);
|
||||
const fileDiff = Diff.createPatch(
|
||||
fileName,
|
||||
currentContent,
|
||||
newContent,
|
||||
'Current',
|
||||
'Proposed',
|
||||
DEFAULT_DIFF_OPTIONS,
|
||||
);
|
||||
|
||||
const confirmationDetails: ToolEditConfirmationDetails = {
|
||||
type: 'edit',
|
||||
title: `Confirm Memory Save: ${tildeifyPath(memoryFilePath)}`,
|
||||
fileName: memoryFilePath,
|
||||
fileDiff,
|
||||
originalContent: currentContent,
|
||||
newContent,
|
||||
onConfirm: async (outcome: ToolConfirmationOutcome) => {
|
||||
if (outcome === ToolConfirmationOutcome.ProceedAlways) {
|
||||
MemoryTool.allowlist.add(allowlistKey);
|
||||
}
|
||||
},
|
||||
};
|
||||
return confirmationDetails;
|
||||
}
|
||||
|
||||
static async performAddMemoryEntry(
|
||||
text: string,
|
||||
memoryFilePath: string,
|
||||
@@ -184,7 +306,7 @@ export class MemoryTool extends BaseTool<SaveMemoryParams, ToolResult> {
|
||||
params: SaveMemoryParams,
|
||||
_signal: AbortSignal,
|
||||
): Promise<ToolResult> {
|
||||
const { fact } = params;
|
||||
const { fact, modified_by_user, modified_content } = params;
|
||||
|
||||
if (!fact || typeof fact !== 'string' || fact.trim() === '') {
|
||||
const errorMessage = 'Parameter "fact" must be a non-empty string.';
|
||||
@@ -195,17 +317,44 @@ export class MemoryTool extends BaseTool<SaveMemoryParams, ToolResult> {
|
||||
}
|
||||
|
||||
try {
|
||||
// Use the static method with actual fs promises
|
||||
await MemoryTool.performAddMemoryEntry(fact, getGlobalMemoryFilePath(), {
|
||||
readFile: fs.readFile,
|
||||
writeFile: fs.writeFile,
|
||||
mkdir: fs.mkdir,
|
||||
});
|
||||
const successMessage = `Okay, I've remembered that: "${fact}"`;
|
||||
return {
|
||||
llmContent: JSON.stringify({ success: true, message: successMessage }),
|
||||
returnDisplay: successMessage,
|
||||
};
|
||||
if (modified_by_user && modified_content !== undefined) {
|
||||
// User modified the content in external editor, write it directly
|
||||
await fs.mkdir(path.dirname(getGlobalMemoryFilePath()), {
|
||||
recursive: true,
|
||||
});
|
||||
await fs.writeFile(
|
||||
getGlobalMemoryFilePath(),
|
||||
modified_content,
|
||||
'utf-8',
|
||||
);
|
||||
const successMessage = `Okay, I've updated the memory file with your modifications.`;
|
||||
return {
|
||||
llmContent: JSON.stringify({
|
||||
success: true,
|
||||
message: successMessage,
|
||||
}),
|
||||
returnDisplay: successMessage,
|
||||
};
|
||||
} else {
|
||||
// Use the normal memory entry logic
|
||||
await MemoryTool.performAddMemoryEntry(
|
||||
fact,
|
||||
getGlobalMemoryFilePath(),
|
||||
{
|
||||
readFile: fs.readFile,
|
||||
writeFile: fs.writeFile,
|
||||
mkdir: fs.mkdir,
|
||||
},
|
||||
);
|
||||
const successMessage = `Okay, I've remembered that: "${fact}"`;
|
||||
return {
|
||||
llmContent: JSON.stringify({
|
||||
success: true,
|
||||
message: successMessage,
|
||||
}),
|
||||
returnDisplay: successMessage,
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
@@ -221,4 +370,25 @@ export class MemoryTool extends BaseTool<SaveMemoryParams, ToolResult> {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
getModifyContext(_abortSignal: AbortSignal): ModifyContext<SaveMemoryParams> {
|
||||
return {
|
||||
getFilePath: (_params: SaveMemoryParams) => getGlobalMemoryFilePath(),
|
||||
getCurrentContent: async (_params: SaveMemoryParams): Promise<string> =>
|
||||
this.readMemoryFileContent(),
|
||||
getProposedContent: async (params: SaveMemoryParams): Promise<string> => {
|
||||
const currentContent = await this.readMemoryFileContent();
|
||||
return this.computeNewContent(currentContent, params.fact);
|
||||
},
|
||||
createUpdatedParams: (
|
||||
_oldContent: string,
|
||||
modifiedProposedContent: string,
|
||||
originalParams: SaveMemoryParams,
|
||||
): SaveMemoryParams => ({
|
||||
...originalParams,
|
||||
modified_by_user: true,
|
||||
modified_content: modifiedProposedContent,
|
||||
}),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import fs from 'fs';
|
||||
import fsp from 'fs/promises';
|
||||
import { Config } from '../config/config.js';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
describe('ReadFileTool', () => {
|
||||
let tempRootDir: string;
|
||||
@@ -27,6 +28,7 @@ describe('ReadFileTool', () => {
|
||||
const mockConfigInstance = {
|
||||
getFileService: () => new FileDiscoveryService(tempRootDir),
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () => createMockWorkspaceContext(tempRootDir),
|
||||
} as unknown as Config;
|
||||
tool = new ReadFileTool(mockConfigInstance);
|
||||
});
|
||||
@@ -65,8 +67,9 @@ describe('ReadFileTool', () => {
|
||||
it('should return error for path outside root', () => {
|
||||
const outsidePath = path.resolve(os.tmpdir(), 'outside-root.txt');
|
||||
const params: ReadFileToolParams = { absolute_path: outsidePath };
|
||||
expect(tool.validateToolParams(params)).toMatch(
|
||||
/File path must be within the root directory/,
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
});
|
||||
|
||||
@@ -219,7 +222,7 @@ describe('ReadFileTool', () => {
|
||||
'Line 7',
|
||||
'Line 8',
|
||||
].join('\n'),
|
||||
returnDisplay: '(truncated)',
|
||||
returnDisplay: 'Read lines 6-8 of 20 from paginated.txt',
|
||||
});
|
||||
});
|
||||
|
||||
@@ -261,4 +264,36 @@ describe('ReadFileTool', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace boundary validation', () => {
|
||||
it('should validate paths are within workspace root', () => {
|
||||
const params: ReadFileToolParams = {
|
||||
absolute_path: path.join(tempRootDir, 'file.txt'),
|
||||
};
|
||||
expect(tool.validateToolParams(params)).toBeNull();
|
||||
});
|
||||
|
||||
it('should reject paths outside workspace root', () => {
|
||||
const params: ReadFileToolParams = {
|
||||
absolute_path: '/etc/passwd',
|
||||
};
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(tempRootDir);
|
||||
});
|
||||
|
||||
it('should provide clear error message with workspace directories', () => {
|
||||
const outsidePath = path.join(os.tmpdir(), 'outside-workspace.txt');
|
||||
const params: ReadFileToolParams = {
|
||||
absolute_path: outsidePath,
|
||||
};
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(tempRootDir);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -10,7 +10,6 @@ import { makeRelative, shortenPath } from '../utils/paths.js';
|
||||
import { BaseTool, Icon, ToolLocation, ToolResult } from './tools.js';
|
||||
import { Type } from '@google/genai';
|
||||
import {
|
||||
isWithinRoot,
|
||||
processSingleFileContent,
|
||||
getSpecificMimeType,
|
||||
} from '../utils/fileUtils.js';
|
||||
@@ -86,8 +85,11 @@ export class ReadFileTool extends BaseTool<ReadFileToolParams, ToolResult> {
|
||||
if (!path.isAbsolute(filePath)) {
|
||||
return `File path must be absolute, but was relative: ${filePath}. You must provide an absolute path.`;
|
||||
}
|
||||
if (!isWithinRoot(filePath, this.config.getTargetDir())) {
|
||||
return `File path must be within the root directory (${this.config.getTargetDir()}): ${filePath}`;
|
||||
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(filePath)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
return `File path must be within one of the workspace directories: ${directories.join(', ')}`;
|
||||
}
|
||||
if (params.offset !== undefined && params.offset < 0) {
|
||||
return 'Offset must be a non-negative number';
|
||||
@@ -145,7 +147,7 @@ export class ReadFileTool extends BaseTool<ReadFileToolParams, ToolResult> {
|
||||
if (result.error) {
|
||||
return {
|
||||
llmContent: result.error, // The detailed error for LLM
|
||||
returnDisplay: result.returnDisplay, // User-friendly error
|
||||
returnDisplay: result.returnDisplay || 'Error reading file', // User-friendly error
|
||||
};
|
||||
}
|
||||
|
||||
@@ -163,8 +165,8 @@ export class ReadFileTool extends BaseTool<ReadFileToolParams, ToolResult> {
|
||||
);
|
||||
|
||||
return {
|
||||
llmContent: result.llmContent,
|
||||
returnDisplay: result.returnDisplay,
|
||||
llmContent: result.llmContent || '',
|
||||
returnDisplay: result.returnDisplay || '',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import path from 'path';
|
||||
import fs from 'fs'; // Actual fs for setup
|
||||
import os from 'os';
|
||||
import { Config } from '../config/config.js';
|
||||
import { WorkspaceContext } from '../utils/workspaceContext.js';
|
||||
|
||||
vi.mock('mime-types', () => {
|
||||
const lookup = (filename: string) => {
|
||||
@@ -48,11 +49,11 @@ describe('ReadManyFilesTool', () => {
|
||||
let mockReadFileFn: Mock;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempRootDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'read-many-files-root-'),
|
||||
tempRootDir = fs.realpathSync(
|
||||
fs.mkdtempSync(path.join(os.tmpdir(), 'read-many-files-root-')),
|
||||
);
|
||||
tempDirOutsideRoot = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'read-many-files-external-'),
|
||||
tempDirOutsideRoot = fs.realpathSync(
|
||||
fs.mkdtempSync(path.join(os.tmpdir(), 'read-many-files-external-')),
|
||||
);
|
||||
fs.writeFileSync(path.join(tempRootDir, '.geminiignore'), 'foo.*');
|
||||
const fileService = new FileDiscoveryService(tempRootDir);
|
||||
@@ -64,6 +65,8 @@ describe('ReadManyFilesTool', () => {
|
||||
respectGeminiIgnore: true,
|
||||
}),
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceDirs: () => [tempRootDir],
|
||||
getWorkspaceContext: () => new WorkspaceContext(tempRootDir),
|
||||
} as Partial<Config> as Config;
|
||||
tool = new ReadManyFilesTool(mockConfig);
|
||||
|
||||
@@ -424,5 +427,54 @@ describe('ReadManyFilesTool', () => {
|
||||
expect(result.returnDisplay).not.toContain('foo.quux');
|
||||
expect(result.returnDisplay).toContain('bar.ts');
|
||||
});
|
||||
|
||||
it('should read files from multiple workspace directories', async () => {
|
||||
const tempDir1 = fs.realpathSync(
|
||||
fs.mkdtempSync(path.join(os.tmpdir(), 'multi-dir-1-')),
|
||||
);
|
||||
const tempDir2 = fs.realpathSync(
|
||||
fs.mkdtempSync(path.join(os.tmpdir(), 'multi-dir-2-')),
|
||||
);
|
||||
const fileService = new FileDiscoveryService(tempDir1);
|
||||
const mockConfig = {
|
||||
getFileService: () => fileService,
|
||||
getFileFilteringOptions: () => ({
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
}),
|
||||
getWorkspaceContext: () => new WorkspaceContext(tempDir1, [tempDir2]),
|
||||
getTargetDir: () => tempDir1,
|
||||
} as Partial<Config> as Config;
|
||||
tool = new ReadManyFilesTool(mockConfig);
|
||||
|
||||
fs.writeFileSync(path.join(tempDir1, 'file1.txt'), 'Content1');
|
||||
fs.writeFileSync(path.join(tempDir2, 'file2.txt'), 'Content2');
|
||||
|
||||
const params = { paths: ['*.txt'] };
|
||||
const result = await tool.execute(params, new AbortController().signal);
|
||||
const content = result.llmContent as string[];
|
||||
if (!Array.isArray(content)) {
|
||||
throw new Error(`llmContent is not an array: ${content}`);
|
||||
}
|
||||
const expectedPath1 = path.join(tempDir1, 'file1.txt');
|
||||
const expectedPath2 = path.join(tempDir2, 'file2.txt');
|
||||
|
||||
expect(
|
||||
content.some((c) =>
|
||||
c.includes(`--- ${expectedPath1} ---\n\nContent1\n\n`),
|
||||
),
|
||||
).toBe(true);
|
||||
expect(
|
||||
content.some((c) =>
|
||||
c.includes(`--- ${expectedPath2} ---\n\nContent2\n\n`),
|
||||
),
|
||||
).toBe(true);
|
||||
expect(result.returnDisplay).toContain(
|
||||
'Successfully read and concatenated content from **2 file(s)**',
|
||||
);
|
||||
|
||||
fs.rmSync(tempDir1, { recursive: true, force: true });
|
||||
fs.rmSync(tempDir2, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -302,17 +302,27 @@ Use this tool when the user's query implies needing the content of several files
|
||||
}
|
||||
|
||||
try {
|
||||
const patterns = searchPatterns.map((p) => p.replace(/\\/g, '/'));
|
||||
const entries: string[] = await glob(patterns, {
|
||||
cwd: this.config.getTargetDir(),
|
||||
ignore: effectiveExcludes,
|
||||
nodir: true,
|
||||
dot: true,
|
||||
absolute: true,
|
||||
nocase: true,
|
||||
signal,
|
||||
withFileTypes: false,
|
||||
});
|
||||
const allEntries = new Set<string>();
|
||||
const workspaceDirs = this.config.getWorkspaceContext().getDirectories();
|
||||
|
||||
for (const dir of workspaceDirs) {
|
||||
const entriesInDir = await glob(
|
||||
searchPatterns.map((p) => p.replace(/\\/g, '/')),
|
||||
{
|
||||
cwd: dir,
|
||||
ignore: effectiveExcludes,
|
||||
nodir: true,
|
||||
dot: true,
|
||||
absolute: true,
|
||||
nocase: true,
|
||||
signal,
|
||||
},
|
||||
);
|
||||
for (const entry of entriesInDir) {
|
||||
allEntries.add(entry);
|
||||
}
|
||||
}
|
||||
const entries = Array.from(allEntries);
|
||||
|
||||
const gitFilteredEntries = fileFilteringOptions.respectGitIgnore
|
||||
? fileDiscovery
|
||||
@@ -345,11 +355,15 @@ Use this tool when the user's query implies needing the content of several files
|
||||
let geminiIgnoredCount = 0;
|
||||
|
||||
for (const absoluteFilePath of entries) {
|
||||
// Security check: ensure the glob library didn't return something outside targetDir.
|
||||
if (!absoluteFilePath.startsWith(this.config.getTargetDir())) {
|
||||
// Security check: ensure the glob library didn't return something outside the workspace.
|
||||
if (
|
||||
!this.config
|
||||
.getWorkspaceContext()
|
||||
.isPathWithinWorkspace(absoluteFilePath)
|
||||
) {
|
||||
skippedFiles.push({
|
||||
path: absoluteFilePath,
|
||||
reason: `Security: Glob library returned path outside target directory. Base: ${this.config.getTargetDir()}, Path: ${absoluteFilePath}`,
|
||||
reason: `Security: Glob library returned path outside workspace. Path: ${absoluteFilePath}`,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -37,6 +37,7 @@ import * as crypto from 'crypto';
|
||||
import * as summarizer from '../utils/summarizer.js';
|
||||
import { ToolConfirmationOutcome } from './tools.js';
|
||||
import { OUTPUT_UPDATE_INTERVAL_MS } from './shell.js';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
describe('ShellTool', () => {
|
||||
let shellTool: ShellTool;
|
||||
@@ -53,6 +54,7 @@ describe('ShellTool', () => {
|
||||
getDebugMode: vi.fn().mockReturnValue(false),
|
||||
getTargetDir: vi.fn().mockReturnValue('/test/dir'),
|
||||
getSummarizeToolOutputConfig: vi.fn().mockReturnValue(undefined),
|
||||
getWorkspaceContext: () => createMockWorkspaceContext('.'),
|
||||
getGeminiClient: vi.fn(),
|
||||
} as unknown as Config;
|
||||
|
||||
@@ -105,7 +107,7 @@ describe('ShellTool', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false);
|
||||
expect(
|
||||
shellTool.validateToolParams({ command: 'ls', directory: 'rel/path' }),
|
||||
).toBe('Directory must exist.');
|
||||
).toBe("Directory 'rel/path' is not a registered workspace directory.");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -385,3 +387,37 @@ describe('ShellTool', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateToolParams', () => {
|
||||
it('should return null for valid directory', () => {
|
||||
const config = {
|
||||
getCoreTools: () => undefined,
|
||||
getExcludeTools: () => undefined,
|
||||
getTargetDir: () => '/root',
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext('/root', ['/users/test']),
|
||||
} as unknown as Config;
|
||||
const shellTool = new ShellTool(config);
|
||||
const result = shellTool.validateToolParams({
|
||||
command: 'ls',
|
||||
directory: 'test',
|
||||
});
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return error for directory outside workspace', () => {
|
||||
const config = {
|
||||
getCoreTools: () => undefined,
|
||||
getExcludeTools: () => undefined,
|
||||
getTargetDir: () => '/root',
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext('/root', ['/users/test']),
|
||||
} as unknown as Config;
|
||||
const shellTool = new ShellTool(config);
|
||||
const result = shellTool.validateToolParams({
|
||||
command: 'ls',
|
||||
directory: 'test2',
|
||||
});
|
||||
expect(result).toContain('is not a registered workspace directory');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -124,14 +124,19 @@ export class ShellTool extends BaseTool<ShellToolParams, ToolResult> {
|
||||
}
|
||||
if (params.directory) {
|
||||
if (path.isAbsolute(params.directory)) {
|
||||
return 'Directory cannot be absolute. Must be relative to the project root directory.';
|
||||
return 'Directory cannot be absolute. Please refer to workspace directories by their name.';
|
||||
}
|
||||
const directory = path.resolve(
|
||||
this.config.getTargetDir(),
|
||||
params.directory,
|
||||
const workspaceDirs = this.config.getWorkspaceContext().getDirectories();
|
||||
const matchingDirs = workspaceDirs.filter(
|
||||
(dir) => path.basename(dir) === params.directory,
|
||||
);
|
||||
if (!fs.existsSync(directory)) {
|
||||
return 'Directory must exist.';
|
||||
|
||||
if (matchingDirs.length === 0) {
|
||||
return `Directory '${params.directory}' is not a registered workspace directory.`;
|
||||
}
|
||||
|
||||
if (matchingDirs.length > 1) {
|
||||
return `Directory name '${params.directory}' is ambiguous as it matches multiple workspace directories.`;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
||||
28
packages/core/src/tools/tool-error.ts
Normal file
28
packages/core/src/tools/tool-error.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* A type-safe enum for tool-related errors.
|
||||
*/
|
||||
export enum ToolErrorType {
|
||||
// General Errors
|
||||
INVALID_TOOL_PARAMS = 'invalid_tool_params',
|
||||
UNKNOWN = 'unknown',
|
||||
UNHANDLED_EXCEPTION = 'unhandled_exception',
|
||||
TOOL_NOT_REGISTERED = 'tool_not_registered',
|
||||
|
||||
// File System Errors
|
||||
FILE_NOT_FOUND = 'file_not_found',
|
||||
FILE_WRITE_FAILURE = 'file_write_failure',
|
||||
READ_CONTENT_FAILURE = 'read_content_failure',
|
||||
ATTEMPT_TO_CREATE_EXISTING_FILE = 'attempt_to_create_existing_file',
|
||||
|
||||
// Edit-specific Errors
|
||||
EDIT_PREPARATION_FAILURE = 'edit_preparation_failure',
|
||||
EDIT_NO_OCCURRENCE_FOUND = 'edit_no_occurrence_found',
|
||||
EDIT_EXPECTED_OCCURRENCE_MISMATCH = 'edit_expected_occurrence_mismatch',
|
||||
EDIT_NO_CHANGE = 'edit_no_change',
|
||||
}
|
||||
@@ -30,6 +30,10 @@ import {
|
||||
Schema,
|
||||
} from '@google/genai';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { IdeClient } from '../ide/ide-client.js';
|
||||
import fs from 'node:fs';
|
||||
|
||||
vi.mock('node:fs');
|
||||
|
||||
// Use vi.hoisted to define the mock function so it can be used in the vi.mock factory
|
||||
const mockDiscoverMcpTools = vi.hoisted(() => vi.fn());
|
||||
@@ -136,6 +140,7 @@ const baseConfigParams: ConfigParameters = {
|
||||
geminiMdFileCount: 0,
|
||||
approvalMode: ApprovalMode.DEFAULT,
|
||||
sessionId: 'test-session-id',
|
||||
ideClient: IdeClient.getInstance(false),
|
||||
};
|
||||
|
||||
describe('ToolRegistry', () => {
|
||||
@@ -144,6 +149,10 @@ describe('ToolRegistry', () => {
|
||||
let mockConfigGetToolDiscoveryCommand: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.statSync).mockReturnValue({
|
||||
isDirectory: () => true,
|
||||
} as fs.Stats);
|
||||
config = new Config(baseConfigParams);
|
||||
toolRegistry = new ToolRegistry(config);
|
||||
vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
*/
|
||||
|
||||
import { FunctionDeclaration, PartListUnion, Schema } from '@google/genai';
|
||||
import { ToolErrorType } from './tool-error.js';
|
||||
|
||||
/**
|
||||
* Interface representing the base Tool functionality
|
||||
@@ -217,6 +218,14 @@ export interface ToolResult {
|
||||
* For now, we keep it as the core logic in ReadFileTool currently produces it.
|
||||
*/
|
||||
returnDisplay: ToolResultDisplay;
|
||||
|
||||
/**
|
||||
* If this property is present, the tool call is considered a failure.
|
||||
*/
|
||||
error?: {
|
||||
message: string; // raw error message
|
||||
type?: ToolErrorType; // An optional machine-readable error type (e.g., 'FILE_NOT_FOUND').
|
||||
};
|
||||
}
|
||||
|
||||
export type ToolResultDisplay = string | FileDiff;
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
vi,
|
||||
type Mocked,
|
||||
} from 'vitest';
|
||||
import { WriteFileTool } from './write-file.js';
|
||||
import { WriteFileTool, WriteFileToolParams } from './write-file.js';
|
||||
import {
|
||||
FileDiff,
|
||||
ToolConfirmationOutcome,
|
||||
@@ -31,6 +31,7 @@ import {
|
||||
ensureCorrectFileContent,
|
||||
CorrectedEditResult,
|
||||
} from '../utils/editCorrector.js';
|
||||
import { createMockWorkspaceContext } from '../test-utils/mockWorkspaceContext.js';
|
||||
|
||||
const rootDir = path.resolve(os.tmpdir(), 'gemini-cli-test-root');
|
||||
|
||||
@@ -54,6 +55,7 @@ const mockConfigInternal = {
|
||||
getApprovalMode: vi.fn(() => ApprovalMode.DEFAULT),
|
||||
setApprovalMode: vi.fn(),
|
||||
getGeminiClient: vi.fn(), // Initialize as a plain mock function
|
||||
getWorkspaceContext: () => createMockWorkspaceContext(rootDir),
|
||||
getApiKey: () => 'test-key',
|
||||
getModel: () => 'test-model',
|
||||
getSandbox: () => false,
|
||||
@@ -83,6 +85,7 @@ describe('WriteFileTool', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Create a unique temporary directory for files created outside the root
|
||||
tempDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'write-file-test-external-'),
|
||||
@@ -98,6 +101,11 @@ describe('WriteFileTool', () => {
|
||||
) as Mocked<GeminiClient>;
|
||||
vi.mocked(GeminiClient).mockImplementation(() => mockGeminiClientInstance);
|
||||
|
||||
vi.mocked(ensureCorrectEdit).mockImplementation(mockEnsureCorrectEdit);
|
||||
vi.mocked(ensureCorrectFileContent).mockImplementation(
|
||||
mockEnsureCorrectFileContent,
|
||||
);
|
||||
|
||||
// Now that mockGeminiClientInstance is initialized, set the mock implementation for getGeminiClient
|
||||
mockConfigInternal.getGeminiClient.mockReturnValue(
|
||||
mockGeminiClientInstance,
|
||||
@@ -177,8 +185,9 @@ describe('WriteFileTool', () => {
|
||||
file_path: outsidePath,
|
||||
content: 'hello',
|
||||
};
|
||||
expect(tool.validateToolParams(params)).toMatch(
|
||||
/File path must be within the root directory/,
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
});
|
||||
|
||||
@@ -193,6 +202,32 @@ describe('WriteFileTool', () => {
|
||||
`Path is a directory, not a file: ${dirAsFilePath}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return error if the content is null', () => {
|
||||
const dirAsFilePath = path.join(rootDir, 'a_directory');
|
||||
fs.mkdirSync(dirAsFilePath);
|
||||
const params = {
|
||||
file_path: dirAsFilePath,
|
||||
content: null,
|
||||
} as unknown as WriteFileToolParams; // Intentionally non-conforming
|
||||
expect(tool.validateToolParams(params)).toMatch(
|
||||
`params/content must be string`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDescription', () => {
|
||||
it('should return error if the file_path is empty', () => {
|
||||
const dirAsFilePath = path.join(rootDir, 'a_directory');
|
||||
fs.mkdirSync(dirAsFilePath);
|
||||
const params = {
|
||||
file_path: '',
|
||||
content: '',
|
||||
};
|
||||
expect(tool.getDescription(params)).toMatch(
|
||||
`Model did not provide valid parameters for write file tool, missing or empty "file_path"`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_getCorrectedFileContent', () => {
|
||||
@@ -427,8 +462,8 @@ describe('WriteFileTool', () => {
|
||||
const params = { file_path: outsidePath, content: 'test' };
|
||||
const result = await tool.execute(params, abortSignal);
|
||||
expect(result.llmContent).toMatch(/Error: Invalid parameters provided/);
|
||||
expect(result.returnDisplay).toMatch(
|
||||
/Error: File path must be within the root directory/,
|
||||
expect(result.returnDisplay).toContain(
|
||||
'Error: File path must be within one of the workspace directories',
|
||||
);
|
||||
});
|
||||
|
||||
@@ -616,4 +651,39 @@ describe('WriteFileTool', () => {
|
||||
expect(result.llmContent).not.toMatch(/User modified the `content`/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('workspace boundary validation', () => {
|
||||
it('should validate paths are within workspace root', () => {
|
||||
const params = {
|
||||
file_path: path.join(rootDir, 'file.txt'),
|
||||
content: 'test content',
|
||||
};
|
||||
expect(tool.validateToolParams(params)).toBeNull();
|
||||
});
|
||||
|
||||
it('should reject paths outside workspace root', () => {
|
||||
const params = {
|
||||
file_path: '/etc/passwd',
|
||||
content: 'malicious',
|
||||
};
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(rootDir);
|
||||
});
|
||||
|
||||
it('should provide clear error message with workspace directories', () => {
|
||||
const outsidePath = path.join(tempDir, 'outside-root.txt');
|
||||
const params = {
|
||||
file_path: outsidePath,
|
||||
content: 'test',
|
||||
};
|
||||
const error = tool.validateToolParams(params);
|
||||
expect(error).toContain(
|
||||
'File path must be within one of the workspace directories',
|
||||
);
|
||||
expect(error).toContain(rootDir);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -27,7 +27,7 @@ import {
|
||||
} from '../utils/editCorrector.js';
|
||||
import { DEFAULT_DIFF_OPTIONS } from './diffOptions.js';
|
||||
import { ModifiableTool, ModifyContext } from './modifiable-tool.js';
|
||||
import { getSpecificMimeType, isWithinRoot } from '../utils/fileUtils.js';
|
||||
import { getSpecificMimeType } from '../utils/fileUtils.js';
|
||||
import {
|
||||
recordFileOperationMetric,
|
||||
FileOperation,
|
||||
@@ -105,8 +105,11 @@ export class WriteFileTool
|
||||
if (!path.isAbsolute(filePath)) {
|
||||
return `File path must be absolute: ${filePath}`;
|
||||
}
|
||||
if (!isWithinRoot(filePath, this.config.getTargetDir())) {
|
||||
return `File path must be within the root directory (${this.config.getTargetDir()}): ${filePath}`;
|
||||
|
||||
const workspaceContext = this.config.getWorkspaceContext();
|
||||
if (!workspaceContext.isPathWithinWorkspace(filePath)) {
|
||||
const directories = workspaceContext.getDirectories();
|
||||
return `File path must be within one of the workspace directories: ${directories.join(', ')}`;
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -128,8 +131,8 @@ export class WriteFileTool
|
||||
}
|
||||
|
||||
getDescription(params: WriteFileToolParams): string {
|
||||
if (!params.file_path || !params.content) {
|
||||
return `Model did not provide valid parameters for write file tool`;
|
||||
if (!params.file_path) {
|
||||
return `Model did not provide valid parameters for write file tool, missing or empty "file_path"`;
|
||||
}
|
||||
const relativePath = makeRelative(
|
||||
params.file_path,
|
||||
|
||||
Reference in New Issue
Block a user