mirror of
https://github.com/QwenLM/qwen-code.git
synced 2025-12-20 16:57:46 +00:00
Merge tag 'v0.3.0' into chore/sync-gemini-cli-v0.3.0
This commit is contained in:
@@ -5,9 +5,9 @@
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import * as fsPromises from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import * as fsPromises from 'node:fs/promises';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { bfsFileSearch } from './bfsFileSearch.js';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
|
||||
|
||||
@@ -4,10 +4,10 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import { FileFilteringOptions } from '../config/config.js';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as path from 'node:path';
|
||||
import type { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import type { FileFilteringOptions } from '../config/config.js';
|
||||
// Simple console logger for now.
|
||||
// TODO: Integrate with a more robust server-side logger.
|
||||
const logger = {
|
||||
|
||||
@@ -5,16 +5,9 @@
|
||||
*/
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import {
|
||||
vi,
|
||||
describe,
|
||||
it,
|
||||
expect,
|
||||
beforeEach,
|
||||
Mock,
|
||||
type Mocked,
|
||||
} from 'vitest';
|
||||
import * as fs from 'fs';
|
||||
import type { Mock } from 'vitest';
|
||||
import { vi, describe, it, expect, beforeEach, type Mocked } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import { EditTool } from '../tools/edit.js';
|
||||
|
||||
// MOCKS
|
||||
@@ -27,6 +20,7 @@ let mockSendMessageStream: any;
|
||||
|
||||
vi.mock('fs', () => ({
|
||||
statSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../core/client.js', () => ({
|
||||
|
||||
@@ -4,9 +4,10 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Content, GenerateContentConfig } from '@google/genai';
|
||||
import { GeminiClient } from '../core/client.js';
|
||||
import { EditToolParams, EditTool } from '../tools/edit.js';
|
||||
import type { Content, GenerateContentConfig } from '@google/genai';
|
||||
import type { GeminiClient } from '../core/client.js';
|
||||
import type { EditToolParams } from '../tools/edit.js';
|
||||
import { EditTool } from '../tools/edit.js';
|
||||
import { WriteFileTool } from '../tools/write-file.js';
|
||||
import { ReadFileTool } from '../tools/read-file.js';
|
||||
import { ReadManyFilesTool } from '../tools/read-many-files.js';
|
||||
@@ -17,7 +18,7 @@ import {
|
||||
isFunctionResponse,
|
||||
isFunctionCall,
|
||||
} from '../utils/messageInspectors.js';
|
||||
import * as fs from 'fs';
|
||||
import * as fs from 'node:fs';
|
||||
|
||||
const EditModel = DEFAULT_GEMINI_FLASH_LITE_MODEL;
|
||||
const EditConfig: GenerateContentConfig = {
|
||||
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
isEditorAvailable,
|
||||
type EditorType,
|
||||
} from './editor.js';
|
||||
import { execSync, spawn } from 'child_process';
|
||||
import { execSync, spawn } from 'node:child_process';
|
||||
|
||||
vi.mock('child_process', () => ({
|
||||
execSync: vi.fn(),
|
||||
@@ -290,7 +290,7 @@ describe('editor utils', () => {
|
||||
'-c',
|
||||
'wincmd l | setlocal statusline=%#StatusBold#NEW\\ FILE\\ :wqa(save\\ &\\ quit)\\ \\|\\ i/esc(toggle\\ edit\\ mode)',
|
||||
'-c',
|
||||
'autocmd WinClosed * wqa',
|
||||
'autocmd BufWritePost * wqa',
|
||||
'old.txt',
|
||||
'new.txt',
|
||||
],
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { execSync, spawn } from 'child_process';
|
||||
import { execSync, spawn } from 'node:child_process';
|
||||
|
||||
export type EditorType =
|
||||
| 'vscode'
|
||||
@@ -140,7 +140,7 @@ export function getDiffCommand(
|
||||
'wincmd l | setlocal statusline=%#StatusBold#NEW\\ FILE\\ :wqa(save\\ &\\ quit)\\ \\|\\ i/esc(toggle\\ edit\\ mode)',
|
||||
// Auto close all windows when one is closed
|
||||
'-c',
|
||||
'autocmd WinClosed * wqa',
|
||||
'autocmd BufWritePost * wqa',
|
||||
oldPath,
|
||||
newPath,
|
||||
],
|
||||
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
getEnvironmentContext,
|
||||
getDirectoryContextString,
|
||||
} from './environmentContext.js';
|
||||
import { Config } from '../config/config.js';
|
||||
import type { Config } from '../config/config.js';
|
||||
import { getFolderStructure } from './getFolderStructure.js';
|
||||
|
||||
vi.mock('../config/config.js');
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Part } from '@google/genai';
|
||||
import { Config } from '../config/config.js';
|
||||
import type { Part } from '@google/genai';
|
||||
import type { Config } from '../config/config.js';
|
||||
import { getFolderStructure } from './getFolderStructure.js';
|
||||
|
||||
/**
|
||||
|
||||
@@ -10,7 +10,7 @@ import { isProQuotaExceededError } from './quotaErrorDetection.js';
|
||||
import { DEFAULT_GEMINI_FLASH_MODEL } from '../config/models.js';
|
||||
import { UserTierId } from '../code_assist/types.js';
|
||||
import { AuthType } from '../core/contentGenerator.js';
|
||||
import { StructuredError } from '../core/turn.js';
|
||||
import type { StructuredError } from '../core/turn.js';
|
||||
|
||||
describe('parseAndFormatApiError', () => {
|
||||
const vertexMessage = 'request a quota increase through Vertex';
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import fs from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { Content } from '@google/genai';
|
||||
import type { Content } from '@google/genai';
|
||||
|
||||
interface ErrorReportData {
|
||||
error: { message: string; stack?: string } | { message: string };
|
||||
|
||||
@@ -25,6 +25,41 @@ export function getErrorMessage(error: unknown): string {
|
||||
}
|
||||
}
|
||||
|
||||
export class FatalError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
readonly exitCode: number,
|
||||
) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
export class FatalAuthenticationError extends FatalError {
|
||||
constructor(message: string) {
|
||||
super(message, 41);
|
||||
}
|
||||
}
|
||||
export class FatalInputError extends FatalError {
|
||||
constructor(message: string) {
|
||||
super(message, 42);
|
||||
}
|
||||
}
|
||||
export class FatalSandboxError extends FatalError {
|
||||
constructor(message: string) {
|
||||
super(message, 44);
|
||||
}
|
||||
}
|
||||
export class FatalConfigError extends FatalError {
|
||||
constructor(message: string) {
|
||||
super(message, 52);
|
||||
}
|
||||
}
|
||||
export class FatalTurnLimitedError extends FatalError {
|
||||
constructor(message: string) {
|
||||
super(message, 53);
|
||||
}
|
||||
}
|
||||
|
||||
export class ForbiddenError extends Error {}
|
||||
export class UnauthorizedError extends Error {}
|
||||
export class BadRequestError extends Error {}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
*/
|
||||
|
||||
import { getErrorMessage, isNodeError } from './errors.js';
|
||||
import { URL } from 'url';
|
||||
import { URL } from 'node:url';
|
||||
|
||||
const PRIVATE_IP_RANGES = [
|
||||
/^10\./,
|
||||
|
||||
@@ -6,9 +6,11 @@
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { PartUnion } from '@google/genai';
|
||||
import type { PartUnion } from '@google/genai';
|
||||
import mime from 'mime-types';
|
||||
import { FileSystemService } from '../services/fileSystemService.js';
|
||||
import type { FileSystemService } from '../services/fileSystemService.js';
|
||||
import { ToolErrorType } from '../tools/tool-error.js';
|
||||
import { BINARY_EXTENSIONS } from './ignorePatterns.js';
|
||||
|
||||
// Constants for text file processing
|
||||
const DEFAULT_MAX_LINES_TEXT_FILE = 2000;
|
||||
@@ -152,38 +154,7 @@ export async function detectFileType(
|
||||
|
||||
// Stricter binary check for common non-text extensions before content check
|
||||
// These are often not well-covered by mime-types or might be misidentified.
|
||||
if (
|
||||
[
|
||||
'.zip',
|
||||
'.tar',
|
||||
'.gz',
|
||||
'.exe',
|
||||
'.dll',
|
||||
'.so',
|
||||
'.class',
|
||||
'.jar',
|
||||
'.war',
|
||||
'.7z',
|
||||
'.doc',
|
||||
'.docx',
|
||||
'.xls',
|
||||
'.xlsx',
|
||||
'.ppt',
|
||||
'.pptx',
|
||||
'.odt',
|
||||
'.ods',
|
||||
'.odp',
|
||||
'.bin',
|
||||
'.dat',
|
||||
'.obj',
|
||||
'.o',
|
||||
'.a',
|
||||
'.lib',
|
||||
'.wasm',
|
||||
'.pyc',
|
||||
'.pyo',
|
||||
].includes(ext)
|
||||
) {
|
||||
if (BINARY_EXTENSIONS.includes(ext)) {
|
||||
return 'binary';
|
||||
}
|
||||
|
||||
@@ -196,18 +167,11 @@ export async function detectFileType(
|
||||
return 'text';
|
||||
}
|
||||
|
||||
export enum FileErrorType {
|
||||
FILE_NOT_FOUND = 'FILE_NOT_FOUND',
|
||||
IS_DIRECTORY = 'IS_DIRECTORY',
|
||||
FILE_TOO_LARGE = 'FILE_TOO_LARGE',
|
||||
READ_ERROR = 'READ_ERROR',
|
||||
}
|
||||
|
||||
export interface ProcessedFileReadResult {
|
||||
llmContent: PartUnion; // string for text, Part for image/pdf/unreadable binary
|
||||
returnDisplay: string;
|
||||
error?: string; // Optional error message for the LLM if file processing failed
|
||||
errorType?: FileErrorType; // Structured error type using enum
|
||||
errorType?: ToolErrorType; // Structured error type
|
||||
isTruncated?: boolean; // For text files, indicates if content was truncated
|
||||
originalLineCount?: number; // For text files
|
||||
linesShown?: [number, number]; // For text files [startLine, endLine] (1-based for display)
|
||||
@@ -232,33 +196,32 @@ export async function processSingleFileContent(
|
||||
if (!fs.existsSync(filePath)) {
|
||||
// Sync check is acceptable before async read
|
||||
return {
|
||||
llmContent: '',
|
||||
llmContent:
|
||||
'Could not read file because no file was found at the specified path.',
|
||||
returnDisplay: 'File not found.',
|
||||
error: `File not found: ${filePath}`,
|
||||
errorType: FileErrorType.FILE_NOT_FOUND,
|
||||
errorType: ToolErrorType.FILE_NOT_FOUND,
|
||||
};
|
||||
}
|
||||
const stats = await fs.promises.stat(filePath);
|
||||
if (stats.isDirectory()) {
|
||||
return {
|
||||
llmContent: '',
|
||||
llmContent:
|
||||
'Could not read file because the provided path is a directory, not a file.',
|
||||
returnDisplay: 'Path is a directory.',
|
||||
error: `Path is a directory, not a file: ${filePath}`,
|
||||
errorType: FileErrorType.IS_DIRECTORY,
|
||||
errorType: ToolErrorType.TARGET_IS_DIRECTORY,
|
||||
};
|
||||
}
|
||||
|
||||
const fileSizeInBytes = stats.size;
|
||||
// 20MB limit
|
||||
const maxFileSize = 20 * 1024 * 1024;
|
||||
|
||||
if (fileSizeInBytes > maxFileSize) {
|
||||
throw new Error(
|
||||
`File size exceeds the 20MB limit: ${filePath} (${(
|
||||
fileSizeInBytes /
|
||||
(1024 * 1024)
|
||||
).toFixed(2)}MB)`,
|
||||
);
|
||||
const fileSizeInMB = stats.size / (1024 * 1024);
|
||||
if (fileSizeInMB > 20) {
|
||||
return {
|
||||
llmContent: 'File size exceeds the 20MB limit.',
|
||||
returnDisplay: 'File size exceeds the 20MB limit.',
|
||||
error: `File size exceeds the 20MB limit: ${filePath} (${fileSizeInMB.toFixed(2)}MB)`,
|
||||
errorType: ToolErrorType.FILE_TOO_LARGE,
|
||||
};
|
||||
}
|
||||
|
||||
const fileType = await detectFileType(filePath);
|
||||
@@ -373,6 +336,7 @@ export async function processSingleFileContent(
|
||||
llmContent: `Error reading file ${displayPath}: ${errorMessage}`,
|
||||
returnDisplay: `Error reading file ${displayPath}: ${errorMessage}`,
|
||||
error: `Error reading file ${filePath}: ${errorMessage}`,
|
||||
errorType: ToolErrorType.READ_CONTENT_FAILURE,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,12 +5,13 @@
|
||||
*/
|
||||
|
||||
import { describe, it, expect, afterEach, vi, beforeEach } from 'vitest';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as path from 'node:path';
|
||||
import * as cache from './crawlCache.js';
|
||||
import { crawl } from './crawler.js';
|
||||
import { createTmpDir, cleanupTmpDir } from '@qwen-code/qwen-code-test-utils';
|
||||
import { Ignore, loadIgnoreRules } from './ignore.js';
|
||||
import type { Ignore } from './ignore.js';
|
||||
import { loadIgnoreRules } from './ignore.js';
|
||||
|
||||
describe('crawler', () => {
|
||||
let tmpDir: string;
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
import path from 'node:path';
|
||||
import { fdir } from 'fdir';
|
||||
import { Ignore } from './ignore.js';
|
||||
import type { Ignore } from './ignore.js';
|
||||
import * as cache from './crawlCache.js';
|
||||
|
||||
export interface CrawlOptions {
|
||||
|
||||
@@ -32,6 +32,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -57,6 +58,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -84,6 +86,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -112,6 +115,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -144,6 +148,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -167,6 +172,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -201,6 +207,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -230,6 +237,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -259,6 +267,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
// Expect no errors to be thrown during initialization
|
||||
@@ -285,6 +294,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -310,6 +320,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -318,6 +329,60 @@ describe('FileSearch', () => {
|
||||
expect(results).toEqual(['src/style.css']);
|
||||
});
|
||||
|
||||
it('should not use fzf for fuzzy matching when disableFuzzySearch is true', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
src: {
|
||||
'file1.js': '',
|
||||
'flexible.js': '',
|
||||
'other.ts': '',
|
||||
},
|
||||
});
|
||||
|
||||
const fileSearch = FileSearchFactory.create({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: true,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('fle');
|
||||
|
||||
expect(results).toEqual(['src/flexible.js']);
|
||||
});
|
||||
|
||||
it('should use fzf for fuzzy matching when disableFuzzySearch is false', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
src: {
|
||||
'file1.js': '',
|
||||
'flexible.js': '',
|
||||
'other.ts': '',
|
||||
},
|
||||
});
|
||||
|
||||
const fileSearch = FileSearchFactory.create({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
const results = await fileSearch.search('fle');
|
||||
|
||||
expect(results).toEqual(
|
||||
expect.arrayContaining(['src/file1.js', 'src/flexible.js']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array when no matches are found', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
src: ['file1.js'],
|
||||
@@ -331,6 +396,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -361,6 +427,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await expect(fileSearch.search('')).rejects.toThrow(
|
||||
@@ -382,6 +449,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -404,6 +472,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -427,6 +496,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -463,6 +533,7 @@ describe('FileSearch', () => {
|
||||
cache: true, // Enable caching for this test
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -502,6 +573,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -545,6 +617,7 @@ describe('FileSearch', () => {
|
||||
cache: true, // Ensure caching is enabled
|
||||
cacheTtl: 10000,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -566,6 +639,36 @@ describe('FileSearch', () => {
|
||||
expect(limitedResults).toEqual(['file1.js', 'file2.js']);
|
||||
});
|
||||
|
||||
it('should handle file paths with special characters that need escaping', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
src: {
|
||||
'file with (special) chars.txt': '',
|
||||
'another-file.txt': '',
|
||||
},
|
||||
});
|
||||
|
||||
const fileSearch = FileSearchFactory.create({
|
||||
projectRoot: tmpDir,
|
||||
useGitignore: false,
|
||||
useGeminiignore: false,
|
||||
ignoreDirs: [],
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: true,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
|
||||
// Search for the file using a pattern that contains special characters.
|
||||
// The `unescapePath` function should handle the escaped path correctly.
|
||||
const results = await fileSearch.search(
|
||||
'src/file with \\(special\\) chars.txt',
|
||||
);
|
||||
|
||||
expect(results).toEqual(['src/file with (special) chars.txt']);
|
||||
});
|
||||
|
||||
describe('DirectoryFileSearch', () => {
|
||||
it('should search for files in the current directory', async () => {
|
||||
tmpDir = await createTmpDir({
|
||||
@@ -582,6 +685,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: false,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -606,6 +710,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: false,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -630,6 +735,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: false,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
@@ -652,6 +758,7 @@ describe('FileSearch', () => {
|
||||
cache: false,
|
||||
cacheTtl: 0,
|
||||
enableRecursiveFileSearch: false,
|
||||
disableFuzzySearch: false,
|
||||
});
|
||||
|
||||
await fileSearch.initialize();
|
||||
|
||||
@@ -6,10 +6,13 @@
|
||||
|
||||
import path from 'node:path';
|
||||
import picomatch from 'picomatch';
|
||||
import { Ignore, loadIgnoreRules } from './ignore.js';
|
||||
import type { Ignore } from './ignore.js';
|
||||
import { loadIgnoreRules } from './ignore.js';
|
||||
import { ResultCache } from './result-cache.js';
|
||||
import { crawl } from './crawler.js';
|
||||
import { AsyncFzf, FzfResultItem } from 'fzf';
|
||||
import type { FzfResultItem } from 'fzf';
|
||||
import { AsyncFzf } from 'fzf';
|
||||
import { unescapePath } from '../paths.js';
|
||||
|
||||
export interface FileSearchOptions {
|
||||
projectRoot: string;
|
||||
@@ -19,6 +22,7 @@ export interface FileSearchOptions {
|
||||
cache: boolean;
|
||||
cacheTtl: number;
|
||||
enableRecursiveFileSearch: boolean;
|
||||
disableFuzzySearch: boolean;
|
||||
maxDepth?: number;
|
||||
}
|
||||
|
||||
@@ -112,11 +116,15 @@ class RecursiveFileSearch implements FileSearch {
|
||||
pattern: string,
|
||||
options: SearchOptions = {},
|
||||
): Promise<string[]> {
|
||||
if (!this.resultCache || !this.fzf || !this.ignore) {
|
||||
if (
|
||||
!this.resultCache ||
|
||||
(!this.fzf && !this.options.disableFuzzySearch) ||
|
||||
!this.ignore
|
||||
) {
|
||||
throw new Error('Engine not initialized. Call initialize() first.');
|
||||
}
|
||||
|
||||
pattern = pattern || '*';
|
||||
pattern = unescapePath(pattern) || '*';
|
||||
|
||||
let filteredCandidates;
|
||||
const { files: candidates, isExactMatch } =
|
||||
@@ -127,7 +135,7 @@ class RecursiveFileSearch implements FileSearch {
|
||||
filteredCandidates = candidates;
|
||||
} else {
|
||||
let shouldCache = true;
|
||||
if (pattern.includes('*')) {
|
||||
if (pattern.includes('*') || !this.fzf) {
|
||||
filteredCandidates = await filter(candidates, pattern, options.signal);
|
||||
} else {
|
||||
filteredCandidates = await this.fzf
|
||||
@@ -171,12 +179,14 @@ class RecursiveFileSearch implements FileSearch {
|
||||
|
||||
private buildResultCache(): void {
|
||||
this.resultCache = new ResultCache(this.allFiles);
|
||||
// The v1 algorithm is much faster since it only looks at the first
|
||||
// occurence of the pattern. We use it for search spaces that have >20k
|
||||
// files, because the v2 algorithm is just too slow in those cases.
|
||||
this.fzf = new AsyncFzf(this.allFiles, {
|
||||
fuzzy: this.allFiles.length > 20000 ? 'v1' : 'v2',
|
||||
});
|
||||
if (!this.options.disableFuzzySearch) {
|
||||
// The v1 algorithm is much faster since it only looks at the first
|
||||
// occurence of the pattern. We use it for search spaces that have >20k
|
||||
// files, because the v2 algorithm is just too slow in those cases.
|
||||
this.fzf = new AsyncFzf(this.allFiles, {
|
||||
fuzzy: this.allFiles.length > 20000 ? 'v1' : 'v2',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
getResponseText,
|
||||
getResponseTextFromParts,
|
||||
getFunctionCalls,
|
||||
getFunctionCallsFromParts,
|
||||
@@ -15,12 +14,12 @@ import {
|
||||
getStructuredResponse,
|
||||
getStructuredResponseFromParts,
|
||||
} from './generateContentResponseUtilities.js';
|
||||
import {
|
||||
import type {
|
||||
GenerateContentResponse,
|
||||
Part,
|
||||
FinishReason,
|
||||
SafetyRating,
|
||||
} from '@google/genai';
|
||||
import { FinishReason } from '@google/genai';
|
||||
|
||||
const mockTextPart = (text: string): Part => ({ text });
|
||||
const mockFunctionCallPart = (
|
||||
@@ -69,45 +68,6 @@ const minimalMockResponse = (
|
||||
});
|
||||
|
||||
describe('generateContentResponseUtilities', () => {
|
||||
describe('getResponseText', () => {
|
||||
it('should return undefined for no candidates', () => {
|
||||
expect(getResponseText(minimalMockResponse(undefined))).toBeUndefined();
|
||||
});
|
||||
it('should return undefined for empty candidates array', () => {
|
||||
expect(getResponseText(minimalMockResponse([]))).toBeUndefined();
|
||||
});
|
||||
it('should return undefined for no parts', () => {
|
||||
const response = mockResponse([]);
|
||||
expect(getResponseText(response)).toBeUndefined();
|
||||
});
|
||||
it('should extract text from a single text part', () => {
|
||||
const response = mockResponse([mockTextPart('Hello')]);
|
||||
expect(getResponseText(response)).toBe('Hello');
|
||||
});
|
||||
it('should concatenate text from multiple text parts', () => {
|
||||
const response = mockResponse([
|
||||
mockTextPart('Hello '),
|
||||
mockTextPart('World'),
|
||||
]);
|
||||
expect(getResponseText(response)).toBe('Hello World');
|
||||
});
|
||||
it('should ignore function call parts', () => {
|
||||
const response = mockResponse([
|
||||
mockTextPart('Hello '),
|
||||
mockFunctionCallPart('testFunc'),
|
||||
mockTextPart('World'),
|
||||
]);
|
||||
expect(getResponseText(response)).toBe('Hello World');
|
||||
});
|
||||
it('should return undefined if only function call parts exist', () => {
|
||||
const response = mockResponse([
|
||||
mockFunctionCallPart('testFunc'),
|
||||
mockFunctionCallPart('anotherFunc'),
|
||||
]);
|
||||
expect(getResponseText(response)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getResponseTextFromParts', () => {
|
||||
it('should return undefined for no parts', () => {
|
||||
expect(getResponseTextFromParts([])).toBeUndefined();
|
||||
|
||||
@@ -4,24 +4,12 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { GenerateContentResponse, Part, FunctionCall } from '@google/genai';
|
||||
|
||||
export function getResponseText(
|
||||
response: GenerateContentResponse,
|
||||
): string | undefined {
|
||||
const parts = response.candidates?.[0]?.content?.parts;
|
||||
if (!parts) {
|
||||
return undefined;
|
||||
}
|
||||
const textSegments = parts
|
||||
.map((part) => part.text)
|
||||
.filter((text): text is string => typeof text === 'string');
|
||||
|
||||
if (textSegments.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return textSegments.join('');
|
||||
}
|
||||
import type {
|
||||
GenerateContentResponse,
|
||||
Part,
|
||||
FunctionCall,
|
||||
} from '@google/genai';
|
||||
import { getResponseText } from './partUtils.js';
|
||||
|
||||
export function getResponseTextFromParts(parts: Part[]): string | undefined {
|
||||
if (!parts) {
|
||||
|
||||
@@ -5,12 +5,12 @@
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import fsPromises from 'fs/promises';
|
||||
import * as nodePath from 'path';
|
||||
import * as os from 'os';
|
||||
import fsPromises from 'node:fs/promises';
|
||||
import * as nodePath from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { getFolderStructure } from './getFolderStructure.js';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import * as path from 'path';
|
||||
import * as path from 'node:path';
|
||||
|
||||
describe('getFolderStructure', () => {
|
||||
let testRootDir: string;
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs/promises';
|
||||
import { Dirent } from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import type { Dirent } from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { getErrorMessage, isNodeError } from './errors.js';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import { FileFilteringOptions } from '../config/config.js';
|
||||
import type { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import type { FileFilteringOptions } from '../config/config.js';
|
||||
import { DEFAULT_FILE_FILTERING_OPTIONS } from '../config/config.js';
|
||||
|
||||
const MAX_ITEMS = 20;
|
||||
|
||||
@@ -6,9 +6,9 @@
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { GitIgnoreParser } from './gitIgnoreParser.js';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
|
||||
describe('GitIgnoreParser', () => {
|
||||
let parser: GitIgnoreParser;
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import ignore, { type Ignore } from 'ignore';
|
||||
import { isGitRepository } from './gitUtils.js';
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
|
||||
/**
|
||||
* Checks if a directory is within a git repository
|
||||
|
||||
319
packages/core/src/utils/ignorePatterns.test.ts
Normal file
319
packages/core/src/utils/ignorePatterns.test.ts
Normal file
@@ -0,0 +1,319 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import {
|
||||
FileExclusions,
|
||||
BINARY_EXTENSIONS,
|
||||
extractExtensionsFromPatterns,
|
||||
} from './ignorePatterns.js';
|
||||
import type { Config } from '../config/config.js';
|
||||
|
||||
// Mock the memoryTool module
|
||||
vi.mock('../tools/memoryTool.js', () => ({
|
||||
getCurrentGeminiMdFilename: vi.fn(() => 'GEMINI.md'),
|
||||
}));
|
||||
|
||||
describe('FileExclusions', () => {
|
||||
describe('getCoreIgnorePatterns', () => {
|
||||
it('should return basic ignore patterns', () => {
|
||||
const excluder = new FileExclusions();
|
||||
const patterns = excluder.getCoreIgnorePatterns();
|
||||
|
||||
expect(patterns).toContain('**/node_modules/**');
|
||||
expect(patterns).toContain('**/.git/**');
|
||||
expect(patterns).toContain('**/bower_components/**');
|
||||
expect(patterns).toContain('**/.svn/**');
|
||||
expect(patterns).toContain('**/.hg/**');
|
||||
expect(patterns).toHaveLength(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDefaultExcludePatterns', () => {
|
||||
it('should return comprehensive patterns by default', () => {
|
||||
const excluder = new FileExclusions();
|
||||
const patterns = excluder.getDefaultExcludePatterns();
|
||||
|
||||
// Should include core patterns
|
||||
expect(patterns).toContain('**/node_modules/**');
|
||||
expect(patterns).toContain('**/.git/**');
|
||||
|
||||
// Should include directory excludes
|
||||
expect(patterns).toContain('**/.vscode/**');
|
||||
expect(patterns).toContain('**/dist/**');
|
||||
expect(patterns).toContain('**/build/**');
|
||||
|
||||
// Should include binary patterns
|
||||
expect(patterns).toContain('**/*.exe');
|
||||
expect(patterns).toContain('**/*.jar');
|
||||
|
||||
// Should include system files
|
||||
expect(patterns).toContain('**/.DS_Store');
|
||||
expect(patterns).toContain('**/.env');
|
||||
|
||||
// Should include dynamic patterns
|
||||
expect(patterns).toContain('**/GEMINI.md');
|
||||
});
|
||||
|
||||
it('should respect includeDefaults option', () => {
|
||||
const excluder = new FileExclusions();
|
||||
const patterns = excluder.getDefaultExcludePatterns({
|
||||
includeDefaults: false,
|
||||
includeDynamicPatterns: false,
|
||||
});
|
||||
|
||||
expect(patterns).not.toContain('**/node_modules/**');
|
||||
expect(patterns).not.toContain('**/.git/**');
|
||||
expect(patterns).not.toContain('**/GEMINI.md');
|
||||
expect(patterns).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should include custom patterns', () => {
|
||||
const excluder = new FileExclusions();
|
||||
const patterns = excluder.getDefaultExcludePatterns({
|
||||
customPatterns: ['**/custom/**', '**/*.custom'],
|
||||
});
|
||||
|
||||
expect(patterns).toContain('**/custom/**');
|
||||
expect(patterns).toContain('**/*.custom');
|
||||
});
|
||||
|
||||
it('should include runtime patterns', () => {
|
||||
const excluder = new FileExclusions();
|
||||
const patterns = excluder.getDefaultExcludePatterns({
|
||||
runtimePatterns: ['**/temp/**', '**/*.tmp'],
|
||||
});
|
||||
|
||||
expect(patterns).toContain('**/temp/**');
|
||||
expect(patterns).toContain('**/*.tmp');
|
||||
});
|
||||
|
||||
it('should respect includeDynamicPatterns option', () => {
|
||||
const excluder = new FileExclusions();
|
||||
const patternsWithDynamic = excluder.getDefaultExcludePatterns({
|
||||
includeDynamicPatterns: true,
|
||||
});
|
||||
const patternsWithoutDynamic = excluder.getDefaultExcludePatterns({
|
||||
includeDynamicPatterns: false,
|
||||
});
|
||||
|
||||
expect(patternsWithDynamic).toContain('**/GEMINI.md');
|
||||
expect(patternsWithoutDynamic).not.toContain('**/GEMINI.md');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getReadManyFilesExcludes', () => {
|
||||
it('should provide legacy compatibility', () => {
|
||||
const excluder = new FileExclusions();
|
||||
const patterns = excluder.getReadManyFilesExcludes(['**/*.log']);
|
||||
|
||||
// Should include all default patterns
|
||||
expect(patterns).toContain('**/node_modules/**');
|
||||
expect(patterns).toContain('**/.git/**');
|
||||
expect(patterns).toContain('**/GEMINI.md');
|
||||
|
||||
// Should include additional excludes
|
||||
expect(patterns).toContain('**/*.log');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getGlobExcludes', () => {
|
||||
it('should return core patterns for glob operations', () => {
|
||||
const excluder = new FileExclusions();
|
||||
const patterns = excluder.getGlobExcludes();
|
||||
|
||||
expect(patterns).toContain('**/node_modules/**');
|
||||
expect(patterns).toContain('**/.git/**');
|
||||
expect(patterns).toContain('**/bower_components/**');
|
||||
expect(patterns).toContain('**/.svn/**');
|
||||
expect(patterns).toContain('**/.hg/**');
|
||||
|
||||
// Should not include comprehensive patterns by default
|
||||
expect(patterns).toHaveLength(5);
|
||||
});
|
||||
|
||||
it('should include additional excludes', () => {
|
||||
const excluder = new FileExclusions();
|
||||
const patterns = excluder.getGlobExcludes(['**/temp/**']);
|
||||
|
||||
expect(patterns).toContain('**/node_modules/**');
|
||||
expect(patterns).toContain('**/.git/**');
|
||||
expect(patterns).toContain('**/temp/**');
|
||||
});
|
||||
});
|
||||
|
||||
describe('with Config', () => {
|
||||
it('should use config custom excludes when available', () => {
|
||||
const mockConfig = {
|
||||
getCustomExcludes: vi.fn(() => ['**/config-exclude/**']),
|
||||
} as unknown as Config;
|
||||
|
||||
const excluder = new FileExclusions(mockConfig);
|
||||
const patterns = excluder.getDefaultExcludePatterns();
|
||||
|
||||
expect(patterns).toContain('**/config-exclude/**');
|
||||
expect(mockConfig.getCustomExcludes).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle config without getCustomExcludes method', () => {
|
||||
const mockConfig = {} as Config;
|
||||
|
||||
const excluder = new FileExclusions(mockConfig);
|
||||
const patterns = excluder.getDefaultExcludePatterns();
|
||||
|
||||
// Should not throw and should include default patterns
|
||||
expect(patterns).toContain('**/node_modules/**');
|
||||
expect(patterns.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should include config custom excludes in glob patterns', () => {
|
||||
const mockConfig = {
|
||||
getCustomExcludes: vi.fn(() => ['**/config-glob/**']),
|
||||
} as unknown as Config;
|
||||
|
||||
const excluder = new FileExclusions(mockConfig);
|
||||
const patterns = excluder.getGlobExcludes();
|
||||
|
||||
expect(patterns).toContain('**/node_modules/**');
|
||||
expect(patterns).toContain('**/.git/**');
|
||||
expect(patterns).toContain('**/config-glob/**');
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildExcludePatterns', () => {
|
||||
it('should be an alias for getDefaultExcludePatterns', () => {
|
||||
const excluder = new FileExclusions();
|
||||
const options = {
|
||||
includeDefaults: true,
|
||||
customPatterns: ['**/test/**'],
|
||||
runtimePatterns: ['**/runtime/**'],
|
||||
};
|
||||
|
||||
const defaultPatterns = excluder.getDefaultExcludePatterns(options);
|
||||
const buildPatterns = excluder.buildExcludePatterns(options);
|
||||
|
||||
expect(buildPatterns).toEqual(defaultPatterns);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('BINARY_EXTENSIONS', () => {
|
||||
it('should include common binary file extensions', () => {
|
||||
expect(BINARY_EXTENSIONS).toContain('.exe');
|
||||
expect(BINARY_EXTENSIONS).toContain('.dll');
|
||||
expect(BINARY_EXTENSIONS).toContain('.jar');
|
||||
expect(BINARY_EXTENSIONS).toContain('.zip');
|
||||
});
|
||||
|
||||
it('should include additional binary extensions', () => {
|
||||
expect(BINARY_EXTENSIONS).toContain('.dat');
|
||||
expect(BINARY_EXTENSIONS).toContain('.obj');
|
||||
expect(BINARY_EXTENSIONS).toContain('.wasm');
|
||||
});
|
||||
|
||||
it('should include media file extensions', () => {
|
||||
expect(BINARY_EXTENSIONS).toContain('.pdf');
|
||||
expect(BINARY_EXTENSIONS).toContain('.png');
|
||||
expect(BINARY_EXTENSIONS).toContain('.jpg');
|
||||
});
|
||||
|
||||
it('should be sorted', () => {
|
||||
const sortedExtensions = [...BINARY_EXTENSIONS].sort();
|
||||
expect(BINARY_EXTENSIONS).toEqual(sortedExtensions);
|
||||
});
|
||||
|
||||
it('should not contain invalid extensions from brace patterns', () => {
|
||||
// If brace expansion was not handled correctly, we would see invalid extensions like '.{jpg,png}'
|
||||
const invalidExtensions = BINARY_EXTENSIONS.filter(
|
||||
(ext) => ext.includes('{') || ext.includes('}'),
|
||||
);
|
||||
expect(invalidExtensions).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractExtensionsFromPatterns', () => {
|
||||
it('should extract simple extensions', () => {
|
||||
const patterns = ['**/*.exe', '**/*.jar', '**/*.zip'];
|
||||
const result = extractExtensionsFromPatterns(patterns);
|
||||
|
||||
expect(result).toEqual(['.exe', '.jar', '.zip']);
|
||||
});
|
||||
|
||||
it('should handle brace expansion patterns', () => {
|
||||
const patterns = ['**/*.{js,ts}', '**/*.{jpg,png}'];
|
||||
const result = extractExtensionsFromPatterns(patterns);
|
||||
|
||||
expect(result).toContain('.js');
|
||||
expect(result).toContain('.ts');
|
||||
expect(result).toContain('.jpg');
|
||||
expect(result).toContain('.png');
|
||||
expect(result).not.toContain('.{js,ts}');
|
||||
expect(result).not.toContain('.{jpg,png}');
|
||||
});
|
||||
|
||||
it('should combine simple and brace expansion patterns', () => {
|
||||
const patterns = ['**/*.exe', '**/*.{js,ts}', '**/*.pdf'];
|
||||
const result = extractExtensionsFromPatterns(patterns);
|
||||
|
||||
expect(result).toContain('.exe');
|
||||
expect(result).toContain('.js');
|
||||
expect(result).toContain('.ts');
|
||||
expect(result).toContain('.pdf');
|
||||
});
|
||||
|
||||
it('should handle empty brace expansion', () => {
|
||||
const patterns = ['**/*.{}', '**/*.{,}'];
|
||||
const result = extractExtensionsFromPatterns(patterns);
|
||||
|
||||
// Empty extensions should be filtered out
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should ignore invalid patterns', () => {
|
||||
const patterns = ['no-asterisk.exe', '**/*no-dot', '**/*.{unclosed'];
|
||||
const result = extractExtensionsFromPatterns(patterns);
|
||||
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should remove duplicates and sort results', () => {
|
||||
const patterns = ['**/*.js', '**/*.{js,ts}', '**/*.ts'];
|
||||
const result = extractExtensionsFromPatterns(patterns);
|
||||
|
||||
expect(result).toEqual(['.js', '.ts']);
|
||||
});
|
||||
|
||||
it('should handle complex brace patterns with multiple extensions', () => {
|
||||
const patterns = ['**/*.{html,css,js,jsx,ts,tsx}'];
|
||||
const result = extractExtensionsFromPatterns(patterns);
|
||||
|
||||
expect(result).toEqual(['.css', '.html', '.js', '.jsx', '.ts', '.tsx']);
|
||||
});
|
||||
|
||||
it('should handle compound extensions correctly using path.extname', () => {
|
||||
const patterns = ['**/*.tar.gz', '**/*.min.js', '**/*.d.ts'];
|
||||
const result = extractExtensionsFromPatterns(patterns);
|
||||
|
||||
// Should extract the final extension part only
|
||||
expect(result).toEqual(['.gz', '.js', '.ts']);
|
||||
});
|
||||
|
||||
it('should handle dotfiles correctly', () => {
|
||||
const patterns = ['**/*.gitignore', '**/*.profile', '**/*.bashrc'];
|
||||
const result = extractExtensionsFromPatterns(patterns);
|
||||
|
||||
// Dotfiles should be extracted properly
|
||||
expect(result).toEqual(['.bashrc', '.gitignore', '.profile']);
|
||||
});
|
||||
|
||||
it('should handle edge cases with path.extname', () => {
|
||||
const patterns = ['**/*.hidden.', '**/*.config.json'];
|
||||
const result = extractExtensionsFromPatterns(patterns);
|
||||
|
||||
// Should handle edge cases properly (trailing dots are filtered out)
|
||||
expect(result).toEqual(['.json']);
|
||||
});
|
||||
});
|
||||
276
packages/core/src/utils/ignorePatterns.ts
Normal file
276
packages/core/src/utils/ignorePatterns.ts
Normal file
@@ -0,0 +1,276 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import type { Config } from '../config/config.js';
|
||||
import { getCurrentGeminiMdFilename } from '../tools/memoryTool.js';
|
||||
|
||||
/**
|
||||
* Common ignore patterns used across multiple tools for basic exclusions.
|
||||
* These are the most commonly ignored directories in development projects.
|
||||
*/
|
||||
export const COMMON_IGNORE_PATTERNS: string[] = [
|
||||
'**/node_modules/**',
|
||||
'**/.git/**',
|
||||
'**/bower_components/**',
|
||||
'**/.svn/**',
|
||||
'**/.hg/**',
|
||||
];
|
||||
|
||||
/**
|
||||
* Binary file extension patterns that are typically excluded from text processing.
|
||||
*/
|
||||
export const BINARY_FILE_PATTERNS: string[] = [
|
||||
'**/*.bin',
|
||||
'**/*.exe',
|
||||
'**/*.dll',
|
||||
'**/*.so',
|
||||
'**/*.dylib',
|
||||
'**/*.class',
|
||||
'**/*.jar',
|
||||
'**/*.war',
|
||||
'**/*.zip',
|
||||
'**/*.tar',
|
||||
'**/*.gz',
|
||||
'**/*.bz2',
|
||||
'**/*.rar',
|
||||
'**/*.7z',
|
||||
'**/*.doc',
|
||||
'**/*.docx',
|
||||
'**/*.xls',
|
||||
'**/*.xlsx',
|
||||
'**/*.ppt',
|
||||
'**/*.pptx',
|
||||
'**/*.odt',
|
||||
'**/*.ods',
|
||||
'**/*.odp',
|
||||
];
|
||||
|
||||
/**
|
||||
* Media file patterns that require special handling in tools like read-many-files.
|
||||
* These files can be processed as inlineData when explicitly requested.
|
||||
*/
|
||||
export const MEDIA_FILE_PATTERNS: string[] = [
|
||||
'**/*.pdf',
|
||||
'**/*.png',
|
||||
'**/*.jpg',
|
||||
'**/*.jpeg',
|
||||
'**/*.gif',
|
||||
'**/*.webp',
|
||||
'**/*.bmp',
|
||||
'**/*.svg',
|
||||
];
|
||||
|
||||
/**
|
||||
* Common directory patterns that are typically ignored in development projects.
|
||||
*/
|
||||
export const COMMON_DIRECTORY_EXCLUDES: string[] = [
|
||||
'**/.vscode/**',
|
||||
'**/.idea/**',
|
||||
'**/dist/**',
|
||||
'**/build/**',
|
||||
'**/coverage/**',
|
||||
'**/__pycache__/**',
|
||||
];
|
||||
|
||||
/**
|
||||
* Python-specific patterns.
|
||||
*/
|
||||
export const PYTHON_EXCLUDES: string[] = ['**/*.pyc', '**/*.pyo'];
|
||||
|
||||
/**
|
||||
* System and environment file patterns.
|
||||
*/
|
||||
export const SYSTEM_FILE_EXCLUDES: string[] = ['**/.DS_Store', '**/.env'];
|
||||
|
||||
/**
|
||||
* Comprehensive file exclusion patterns combining all common ignore patterns.
|
||||
* These patterns are compatible with glob ignore patterns.
|
||||
* Note: Media files (PDF, images) are not excluded here as they need special handling in read-many-files.
|
||||
*/
|
||||
export const DEFAULT_FILE_EXCLUDES: string[] = [
|
||||
...COMMON_IGNORE_PATTERNS,
|
||||
...COMMON_DIRECTORY_EXCLUDES,
|
||||
...BINARY_FILE_PATTERNS,
|
||||
...PYTHON_EXCLUDES,
|
||||
...SYSTEM_FILE_EXCLUDES,
|
||||
];
|
||||
|
||||
/**
|
||||
* Options for configuring file exclusion patterns.
|
||||
*/
|
||||
export interface ExcludeOptions {
|
||||
/**
|
||||
* Whether to include default exclusion patterns. Defaults to true.
|
||||
*/
|
||||
includeDefaults?: boolean;
|
||||
|
||||
/**
|
||||
* Additional custom patterns from configuration.
|
||||
*/
|
||||
customPatterns?: string[];
|
||||
|
||||
/**
|
||||
* Additional patterns provided at runtime (e.g., from CLI arguments).
|
||||
*/
|
||||
runtimePatterns?: string[];
|
||||
|
||||
/**
|
||||
* Whether to include dynamic patterns like the current Gemini MD filename. Defaults to true.
|
||||
*/
|
||||
includeDynamicPatterns?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Centralized file exclusion utility that provides configurable and extensible
|
||||
* file exclusion patterns for different tools and use cases.
|
||||
*/
|
||||
export class FileExclusions {
|
||||
constructor(private config?: Config) {}
|
||||
|
||||
/**
|
||||
* Gets core ignore patterns for basic file operations like glob.
|
||||
* These are the minimal essential patterns that should almost always be excluded.
|
||||
*/
|
||||
getCoreIgnorePatterns(): string[] {
|
||||
return [...COMMON_IGNORE_PATTERNS];
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets comprehensive default exclusion patterns for operations like read-many-files.
|
||||
* Includes all standard exclusions: directories, binary files, system files, etc.
|
||||
*/
|
||||
getDefaultExcludePatterns(options: ExcludeOptions = {}): string[] {
|
||||
const {
|
||||
includeDefaults = true,
|
||||
customPatterns = [],
|
||||
runtimePatterns = [],
|
||||
includeDynamicPatterns = true,
|
||||
} = options;
|
||||
|
||||
const patterns: string[] = [];
|
||||
|
||||
// Add base defaults if requested
|
||||
if (includeDefaults) {
|
||||
patterns.push(...DEFAULT_FILE_EXCLUDES);
|
||||
}
|
||||
|
||||
// Add dynamic patterns (like current Gemini MD filename)
|
||||
if (includeDynamicPatterns) {
|
||||
patterns.push(`**/${getCurrentGeminiMdFilename()}`);
|
||||
}
|
||||
|
||||
// Add custom patterns from configuration
|
||||
// TODO: getCustomExcludes method needs to be implemented in Config interface
|
||||
if (this.config) {
|
||||
const configCustomExcludes = this.config.getCustomExcludes?.() ?? [];
|
||||
patterns.push(...configCustomExcludes);
|
||||
}
|
||||
|
||||
// Add user-provided custom patterns
|
||||
patterns.push(...customPatterns);
|
||||
|
||||
// Add runtime patterns (e.g., from CLI)
|
||||
patterns.push(...runtimePatterns);
|
||||
|
||||
return patterns;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets exclude patterns for read-many-files tool with legacy compatibility.
|
||||
* This maintains the same behavior as the previous getDefaultExcludes() function.
|
||||
*/
|
||||
getReadManyFilesExcludes(additionalExcludes: string[] = []): string[] {
|
||||
return this.getDefaultExcludePatterns({
|
||||
includeDefaults: true,
|
||||
runtimePatterns: additionalExcludes,
|
||||
includeDynamicPatterns: true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets exclude patterns for glob tool operations.
|
||||
* Uses core patterns by default but can be extended with additional patterns.
|
||||
*/
|
||||
getGlobExcludes(additionalExcludes: string[] = []): string[] {
|
||||
const corePatterns = this.getCoreIgnorePatterns();
|
||||
|
||||
// Add any custom patterns from config if available
|
||||
// TODO: getCustomExcludes method needs to be implemented in Config interface
|
||||
const configPatterns = this.config?.getCustomExcludes?.() ?? [];
|
||||
|
||||
return [...corePatterns, ...configPatterns, ...additionalExcludes];
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds exclude patterns with full customization options.
|
||||
* This is the most flexible method for advanced use cases.
|
||||
*/
|
||||
buildExcludePatterns(options: ExcludeOptions): string[] {
|
||||
return this.getDefaultExcludePatterns(options);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts file extensions from glob patterns.
|
||||
* Converts patterns like glob/*.exe to .exe
|
||||
* Handles brace expansion like glob/*.{js,ts} to .js and .ts
|
||||
*/
|
||||
export function extractExtensionsFromPatterns(patterns: string[]): string[] {
|
||||
const extensions = new Set(
|
||||
patterns
|
||||
.filter((pattern) => pattern.includes('*.'))
|
||||
.flatMap((pattern) => {
|
||||
const extPart = pattern.substring(pattern.lastIndexOf('*.') + 1);
|
||||
// Handle brace expansion e.g. `**/*.{jpg,png}`
|
||||
if (extPart.startsWith('.{') && extPart.endsWith('}')) {
|
||||
const inner = extPart.slice(2, -1); // get 'jpg,png'
|
||||
return inner
|
||||
.split(',')
|
||||
.map((ext) => `.${ext.trim()}`)
|
||||
.filter((ext) => ext !== '.');
|
||||
}
|
||||
// Handle simple/compound/dotfile extensions
|
||||
if (
|
||||
extPart.startsWith('.') &&
|
||||
!extPart.includes('/') &&
|
||||
!extPart.includes('{') &&
|
||||
!extPart.includes('}')
|
||||
) {
|
||||
// Using path.extname on a dummy file handles various cases like
|
||||
// '.tar.gz' -> '.gz' and '.profile' -> '.profile' correctly.
|
||||
const extracted = path.extname(`dummy${extPart}`);
|
||||
// If extname returns empty (e.g. for '.'), use the original part.
|
||||
// Then filter out empty or '.' results and invalid double dot patterns.
|
||||
const result = extracted || extPart;
|
||||
return result && result !== '.' && !result.substring(1).includes('.')
|
||||
? [result]
|
||||
: [];
|
||||
}
|
||||
return [];
|
||||
}),
|
||||
);
|
||||
return Array.from(extensions).sort();
|
||||
}
|
||||
|
||||
/**
|
||||
* Binary file extensions extracted from BINARY_FILE_PATTERNS for quick lookup.
|
||||
* Additional extensions not covered by the patterns are included for completeness.
|
||||
*/
|
||||
export const BINARY_EXTENSIONS: string[] = [
|
||||
...extractExtensionsFromPatterns([
|
||||
...BINARY_FILE_PATTERNS,
|
||||
...MEDIA_FILE_PATTERNS,
|
||||
...PYTHON_EXCLUDES,
|
||||
]),
|
||||
// Additional binary extensions not in the main patterns
|
||||
'.dat',
|
||||
'.obj',
|
||||
'.o',
|
||||
'.a',
|
||||
'.lib',
|
||||
'.wasm',
|
||||
].sort();
|
||||
103
packages/core/src/utils/installationManager.test.ts
Normal file
103
packages/core/src/utils/installationManager.test.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type { Mock } from 'vitest';
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { InstallationManager } from './installationManager.js';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
vi.mock('node:fs', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('node:fs')>();
|
||||
return {
|
||||
...actual,
|
||||
readFileSync: vi.fn(actual.readFileSync),
|
||||
existsSync: vi.fn(actual.existsSync),
|
||||
} as typeof actual;
|
||||
});
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const os = await importOriginal<typeof import('os')>();
|
||||
return {
|
||||
...os,
|
||||
homedir: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('crypto', async (importOriginal) => {
|
||||
const crypto = await importOriginal<typeof import('crypto')>();
|
||||
return {
|
||||
...crypto,
|
||||
randomUUID: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('InstallationManager', () => {
|
||||
let tempHomeDir: string;
|
||||
let installationManager: InstallationManager;
|
||||
const installationIdFile = () =>
|
||||
path.join(tempHomeDir, '.qwen', 'installation_id');
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
|
||||
);
|
||||
(os.homedir as Mock).mockReturnValue(tempHomeDir);
|
||||
installationManager = new InstallationManager();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('getInstallationId', () => {
|
||||
it('should create and write a new installation ID if one does not exist', () => {
|
||||
const newId = 'new-uuid-123';
|
||||
(randomUUID as Mock).mockReturnValue(newId);
|
||||
|
||||
const installationId = installationManager.getInstallationId();
|
||||
|
||||
expect(installationId).toBe(newId);
|
||||
expect(fs.existsSync(installationIdFile())).toBe(true);
|
||||
expect(fs.readFileSync(installationIdFile(), 'utf-8')).toBe(newId);
|
||||
});
|
||||
|
||||
it('should read an existing installation ID from a file', () => {
|
||||
const existingId = 'existing-uuid-123';
|
||||
fs.mkdirSync(path.dirname(installationIdFile()), { recursive: true });
|
||||
fs.writeFileSync(installationIdFile(), existingId);
|
||||
|
||||
const installationId = installationManager.getInstallationId();
|
||||
|
||||
expect(installationId).toBe(existingId);
|
||||
});
|
||||
|
||||
it('should return the same ID on subsequent calls', () => {
|
||||
const firstId = installationManager.getInstallationId();
|
||||
const secondId = installationManager.getInstallationId();
|
||||
expect(secondId).toBe(firstId);
|
||||
});
|
||||
|
||||
it('should handle read errors and return a fallback ID', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValueOnce(true);
|
||||
const readSpy = vi.mocked(fs.readFileSync);
|
||||
readSpy.mockImplementationOnce(() => {
|
||||
throw new Error('Read error');
|
||||
});
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const id = installationManager.getInstallationId();
|
||||
|
||||
expect(id).toBe('123456789');
|
||||
expect(consoleErrorSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
58
packages/core/src/utils/installationManager.ts
Normal file
58
packages/core/src/utils/installationManager.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import * as path from 'node:path';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
export class InstallationManager {
|
||||
private getInstallationIdPath(): string {
|
||||
return Storage.getInstallationIdPath();
|
||||
}
|
||||
|
||||
private readInstallationIdFromFile(): string | null {
|
||||
const installationIdFile = this.getInstallationIdPath();
|
||||
if (fs.existsSync(installationIdFile)) {
|
||||
const installationid = fs
|
||||
.readFileSync(installationIdFile, 'utf-8')
|
||||
.trim();
|
||||
return installationid || null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private writeInstallationIdToFile(installationId: string) {
|
||||
const installationIdFile = this.getInstallationIdPath();
|
||||
const dir = path.dirname(installationIdFile);
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(installationIdFile, installationId, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the installation ID from a file, creating it if it doesn't exist.
|
||||
* This ID is used for unique user installation tracking.
|
||||
* @returns A UUID string for the user.
|
||||
*/
|
||||
getInstallationId(): string {
|
||||
try {
|
||||
let installationId = this.readInstallationIdFromFile();
|
||||
|
||||
if (!installationId) {
|
||||
installationId = randomUUID();
|
||||
this.writeInstallationIdToFile(installationId);
|
||||
}
|
||||
|
||||
return installationId;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Error accessing installation ID file, generating ephemeral ID:',
|
||||
error,
|
||||
);
|
||||
return '123456789';
|
||||
}
|
||||
}
|
||||
}
|
||||
103
packages/core/src/utils/language-detection.ts
Normal file
103
packages/core/src/utils/language-detection.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as path from 'node:path';
|
||||
|
||||
const extensionToLanguageMap: { [key: string]: string } = {
|
||||
'.ts': 'TypeScript',
|
||||
'.js': 'JavaScript',
|
||||
'.mjs': 'JavaScript',
|
||||
'.cjs': 'JavaScript',
|
||||
'.jsx': 'JavaScript',
|
||||
'.tsx': 'TypeScript',
|
||||
'.py': 'Python',
|
||||
'.java': 'Java',
|
||||
'.go': 'Go',
|
||||
'.rb': 'Ruby',
|
||||
'.php': 'PHP',
|
||||
'.phtml': 'PHP',
|
||||
'.cs': 'C#',
|
||||
'.cpp': 'C++',
|
||||
'.cxx': 'C++',
|
||||
'.cc': 'C++',
|
||||
'.c': 'C',
|
||||
'.h': 'C/C++',
|
||||
'.hpp': 'C++',
|
||||
'.swift': 'Swift',
|
||||
'.kt': 'Kotlin',
|
||||
'.rs': 'Rust',
|
||||
'.m': 'Objective-C',
|
||||
'.mm': 'Objective-C',
|
||||
'.pl': 'Perl',
|
||||
'.pm': 'Perl',
|
||||
'.lua': 'Lua',
|
||||
'.r': 'R',
|
||||
'.scala': 'Scala',
|
||||
'.sc': 'Scala',
|
||||
'.sh': 'Shell',
|
||||
'.ps1': 'PowerShell',
|
||||
'.bat': 'Batch',
|
||||
'.cmd': 'Batch',
|
||||
'.sql': 'SQL',
|
||||
'.html': 'HTML',
|
||||
'.htm': 'HTML',
|
||||
'.css': 'CSS',
|
||||
'.less': 'Less',
|
||||
'.sass': 'Sass',
|
||||
'.scss': 'Sass',
|
||||
'.json': 'JSON',
|
||||
'.xml': 'XML',
|
||||
'.yaml': 'YAML',
|
||||
'.yml': 'YAML',
|
||||
'.md': 'Markdown',
|
||||
'.markdown': 'Markdown',
|
||||
'.dockerfile': 'Dockerfile',
|
||||
'.vim': 'Vim script',
|
||||
'.vb': 'Visual Basic',
|
||||
'.fs': 'F#',
|
||||
'.clj': 'Clojure',
|
||||
'.cljs': 'Clojure',
|
||||
'.dart': 'Dart',
|
||||
'.ex': 'Elixir',
|
||||
'.erl': 'Erlang',
|
||||
'.hs': 'Haskell',
|
||||
'.lisp': 'Lisp',
|
||||
'.rkt': 'Racket',
|
||||
'.groovy': 'Groovy',
|
||||
'.jl': 'Julia',
|
||||
'.tex': 'LaTeX',
|
||||
'.ino': 'Arduino',
|
||||
'.asm': 'Assembly',
|
||||
'.s': 'Assembly',
|
||||
'.toml': 'TOML',
|
||||
'.vue': 'Vue',
|
||||
'.svelte': 'Svelte',
|
||||
'.gohtml': 'Go Template',
|
||||
'.hbs': 'Handlebars',
|
||||
'.ejs': 'EJS',
|
||||
'.erb': 'ERB',
|
||||
'.jsp': 'JSP',
|
||||
'.dockerignore': 'Docker',
|
||||
'.gitignore': 'Git',
|
||||
'.npmignore': 'npm',
|
||||
'.editorconfig': 'EditorConfig',
|
||||
'.prettierrc': 'Prettier',
|
||||
'.eslintrc': 'ESLint',
|
||||
'.babelrc': 'Babel',
|
||||
'.tsconfig': 'TypeScript',
|
||||
'.flow': 'Flow',
|
||||
'.graphql': 'GraphQL',
|
||||
'.proto': 'Protocol Buffers',
|
||||
};
|
||||
|
||||
export function getLanguageFromFilePath(filePath: string): string | undefined {
|
||||
const extension = path.extname(filePath).toLowerCase();
|
||||
if (extension) {
|
||||
return extensionToLanguageMap[extension];
|
||||
}
|
||||
const filename = path.basename(filePath).toLowerCase();
|
||||
return extensionToLanguageMap[`.${filename}`];
|
||||
}
|
||||
@@ -5,9 +5,9 @@
|
||||
*/
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import * as fsPromises from 'fs/promises';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import * as fsPromises from 'node:fs/promises';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import { loadServerHierarchicalMemory } from './memoryDiscovery.js';
|
||||
import {
|
||||
GEMINI_CONFIG_DIR,
|
||||
@@ -368,4 +368,75 @@ describe('loadServerHierarchicalMemory', () => {
|
||||
fileCount: 1,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multiple directories and files in parallel correctly', async () => {
|
||||
// Create multiple test directories with GEMINI.md files
|
||||
const numDirs = 5;
|
||||
const createdFiles: string[] = [];
|
||||
|
||||
for (let i = 0; i < numDirs; i++) {
|
||||
const dirPath = await createEmptyDir(
|
||||
path.join(testRootDir, `project-${i}`),
|
||||
);
|
||||
const filePath = await createTestFile(
|
||||
path.join(dirPath, DEFAULT_CONTEXT_FILENAME),
|
||||
`Content from project ${i}`,
|
||||
);
|
||||
createdFiles.push(filePath);
|
||||
}
|
||||
|
||||
// Load memory from all directories
|
||||
const result = await loadServerHierarchicalMemory(
|
||||
cwd,
|
||||
createdFiles.map((f) => path.dirname(f)),
|
||||
false,
|
||||
new FileDiscoveryService(projectRoot),
|
||||
);
|
||||
|
||||
// Should have loaded all files
|
||||
expect(result.fileCount).toBe(numDirs);
|
||||
|
||||
// Content should include all project contents
|
||||
for (let i = 0; i < numDirs; i++) {
|
||||
expect(result.memoryContent).toContain(`Content from project ${i}`);
|
||||
}
|
||||
});
|
||||
|
||||
it('should preserve order and prevent duplicates when processing multiple directories', async () => {
|
||||
// Create overlapping directory structure
|
||||
const parentDir = await createEmptyDir(path.join(testRootDir, 'parent'));
|
||||
const childDir = await createEmptyDir(path.join(parentDir, 'child'));
|
||||
|
||||
await createTestFile(
|
||||
path.join(parentDir, DEFAULT_CONTEXT_FILENAME),
|
||||
'Parent content',
|
||||
);
|
||||
await createTestFile(
|
||||
path.join(childDir, DEFAULT_CONTEXT_FILENAME),
|
||||
'Child content',
|
||||
);
|
||||
|
||||
// Include both parent and child directories
|
||||
const result = await loadServerHierarchicalMemory(
|
||||
parentDir,
|
||||
[childDir, parentDir], // Deliberately include duplicates
|
||||
false,
|
||||
new FileDiscoveryService(projectRoot),
|
||||
);
|
||||
|
||||
// Should have both files without duplicates
|
||||
expect(result.fileCount).toBe(2);
|
||||
expect(result.memoryContent).toContain('Parent content');
|
||||
expect(result.memoryContent).toContain('Child content');
|
||||
|
||||
// Check that files are not duplicated
|
||||
const parentOccurrences = (
|
||||
result.memoryContent.match(/Parent content/g) || []
|
||||
).length;
|
||||
const childOccurrences = (
|
||||
result.memoryContent.match(/Child content/g) || []
|
||||
).length;
|
||||
expect(parentOccurrences).toBe(1);
|
||||
expect(childOccurrences).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,21 +4,19 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs/promises';
|
||||
import * as fsSync from 'fs';
|
||||
import * as path from 'path';
|
||||
import { homedir } from 'os';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as fsSync from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { homedir } from 'node:os';
|
||||
import { bfsFileSearch } from './bfsFileSearch.js';
|
||||
import {
|
||||
GEMINI_CONFIG_DIR,
|
||||
getAllGeminiMdFilenames,
|
||||
} from '../tools/memoryTool.js';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import type { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import { processImports } from './memoryImportProcessor.js';
|
||||
import {
|
||||
DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
|
||||
FileFilteringOptions,
|
||||
} from '../config/config.js';
|
||||
import type { FileFilteringOptions } from '../config/config.js';
|
||||
import { DEFAULT_MEMORY_FILE_FILTERING_OPTIONS } from '../config/config.js';
|
||||
|
||||
// Simple console logger, similar to the one previously in CLI's config.ts
|
||||
// TODO: Integrate with a more robust server-side logger if available/appropriate.
|
||||
@@ -96,19 +94,41 @@ async function getGeminiMdFilePathsInternal(
|
||||
...includeDirectoriesToReadGemini,
|
||||
currentWorkingDirectory,
|
||||
]);
|
||||
const paths = [];
|
||||
for (const dir of dirs) {
|
||||
const pathsByDir = await getGeminiMdFilePathsInternalForEachDir(
|
||||
dir,
|
||||
userHomePath,
|
||||
debugMode,
|
||||
fileService,
|
||||
extensionContextFilePaths,
|
||||
fileFilteringOptions,
|
||||
maxDirs,
|
||||
|
||||
// Process directories in parallel with concurrency limit to prevent EMFILE errors
|
||||
const CONCURRENT_LIMIT = 10;
|
||||
const dirsArray = Array.from(dirs);
|
||||
const pathsArrays: string[][] = [];
|
||||
|
||||
for (let i = 0; i < dirsArray.length; i += CONCURRENT_LIMIT) {
|
||||
const batch = dirsArray.slice(i, i + CONCURRENT_LIMIT);
|
||||
const batchPromises = batch.map((dir) =>
|
||||
getGeminiMdFilePathsInternalForEachDir(
|
||||
dir,
|
||||
userHomePath,
|
||||
debugMode,
|
||||
fileService,
|
||||
extensionContextFilePaths,
|
||||
fileFilteringOptions,
|
||||
maxDirs,
|
||||
),
|
||||
);
|
||||
paths.push(...pathsByDir);
|
||||
|
||||
const batchResults = await Promise.allSettled(batchPromises);
|
||||
|
||||
for (const result of batchResults) {
|
||||
if (result.status === 'fulfilled') {
|
||||
pathsArrays.push(result.value);
|
||||
} else {
|
||||
const error = result.reason;
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
logger.error(`Error discovering files in directory: ${message}`);
|
||||
// Continue processing other directories
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const paths = pathsArrays.flat();
|
||||
return Array.from(new Set<string>(paths));
|
||||
}
|
||||
|
||||
@@ -245,39 +265,63 @@ async function readGeminiMdFiles(
|
||||
debugMode: boolean,
|
||||
importFormat: 'flat' | 'tree' = 'tree',
|
||||
): Promise<GeminiFileContent[]> {
|
||||
// Process files in parallel with concurrency limit to prevent EMFILE errors
|
||||
const CONCURRENT_LIMIT = 20; // Higher limit for file reads as they're typically faster
|
||||
const results: GeminiFileContent[] = [];
|
||||
for (const filePath of filePaths) {
|
||||
try {
|
||||
const content = await fs.readFile(filePath, 'utf-8');
|
||||
|
||||
// Process imports in the content
|
||||
const processedResult = await processImports(
|
||||
content,
|
||||
path.dirname(filePath),
|
||||
debugMode,
|
||||
undefined,
|
||||
undefined,
|
||||
importFormat,
|
||||
);
|
||||
for (let i = 0; i < filePaths.length; i += CONCURRENT_LIMIT) {
|
||||
const batch = filePaths.slice(i, i + CONCURRENT_LIMIT);
|
||||
const batchPromises = batch.map(
|
||||
async (filePath): Promise<GeminiFileContent> => {
|
||||
try {
|
||||
const content = await fs.readFile(filePath, 'utf-8');
|
||||
|
||||
results.push({ filePath, content: processedResult.content });
|
||||
if (debugMode)
|
||||
logger.debug(
|
||||
`Successfully read and processed imports: ${filePath} (Length: ${processedResult.content.length})`,
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
const isTestEnv =
|
||||
process.env['NODE_ENV'] === 'test' || process.env['VITEST'];
|
||||
if (!isTestEnv) {
|
||||
// Process imports in the content
|
||||
const processedResult = await processImports(
|
||||
content,
|
||||
path.dirname(filePath),
|
||||
debugMode,
|
||||
undefined,
|
||||
undefined,
|
||||
importFormat,
|
||||
);
|
||||
if (debugMode)
|
||||
logger.debug(
|
||||
`Successfully read and processed imports: ${filePath} (Length: ${processedResult.content.length})`,
|
||||
);
|
||||
|
||||
return { filePath, content: processedResult.content };
|
||||
} catch (error: unknown) {
|
||||
const isTestEnv =
|
||||
process.env['NODE_ENV'] === 'test' || process.env['VITEST'];
|
||||
if (!isTestEnv) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
logger.warn(
|
||||
`Warning: Could not read ${getAllGeminiMdFilenames()} file at ${filePath}. Error: ${message}`,
|
||||
);
|
||||
}
|
||||
if (debugMode) logger.debug(`Failed to read: ${filePath}`);
|
||||
return { filePath, content: null }; // Still include it with null content
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
const batchResults = await Promise.allSettled(batchPromises);
|
||||
|
||||
for (const result of batchResults) {
|
||||
if (result.status === 'fulfilled') {
|
||||
results.push(result.value);
|
||||
} else {
|
||||
// This case shouldn't happen since we catch all errors above,
|
||||
// but handle it for completeness
|
||||
const error = result.reason;
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
logger.warn(
|
||||
`Warning: Could not read ${getAllGeminiMdFilenames()} file at ${filePath}. Error: ${message}`,
|
||||
);
|
||||
logger.error(`Unexpected error processing file: ${message}`);
|
||||
}
|
||||
results.push({ filePath, content: null }); // Still include it with null content
|
||||
if (debugMode) logger.debug(`Failed to read: ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as path from 'node:path';
|
||||
import { marked } from 'marked';
|
||||
import { processImports, validateImportPath } from './memoryImportProcessor.js';
|
||||
|
||||
@@ -675,201 +675,6 @@ describe('memoryImportProcessor', () => {
|
||||
expect(result.content).toContain('A @./b.md');
|
||||
expect(result.content).toContain('B content');
|
||||
});
|
||||
|
||||
it('should build import tree structure', async () => {
|
||||
const content = 'Main content @./nested.md @./simple.md';
|
||||
const projectRoot = testPath('test', 'project');
|
||||
const basePath = testPath(projectRoot, 'src');
|
||||
const nestedContent = 'Nested @./inner.md content';
|
||||
const simpleContent = 'Simple content';
|
||||
const innerContent = 'Inner content';
|
||||
|
||||
mockedFs.access.mockResolvedValue(undefined);
|
||||
mockedFs.readFile
|
||||
.mockResolvedValueOnce(nestedContent)
|
||||
.mockResolvedValueOnce(simpleContent)
|
||||
.mockResolvedValueOnce(innerContent);
|
||||
|
||||
const result = await processImports(content, basePath, true);
|
||||
|
||||
// Use marked to find and validate import comments
|
||||
const comments = findMarkdownComments(result.content);
|
||||
const importComments = comments.filter((c) =>
|
||||
c.includes('Imported from:'),
|
||||
);
|
||||
|
||||
expect(importComments.some((c) => c.includes('./nested.md'))).toBe(true);
|
||||
expect(importComments.some((c) => c.includes('./simple.md'))).toBe(true);
|
||||
expect(importComments.some((c) => c.includes('./inner.md'))).toBe(true);
|
||||
|
||||
// Use marked to validate the markdown structure is well-formed
|
||||
const tokens = parseMarkdown(result.content);
|
||||
expect(tokens).toBeDefined();
|
||||
expect(tokens.length).toBeGreaterThan(0);
|
||||
|
||||
// Verify the content contains expected text using marked parsing
|
||||
const textContent = tokens
|
||||
.filter((token) => token.type === 'paragraph')
|
||||
.map((token) => token.raw)
|
||||
.join(' ');
|
||||
|
||||
expect(textContent).toContain('Main content');
|
||||
expect(textContent).toContain('Nested');
|
||||
expect(textContent).toContain('Simple content');
|
||||
expect(textContent).toContain('Inner content');
|
||||
|
||||
// Verify import tree structure
|
||||
expect(result.importTree.path).toBe('unknown'); // No currentFile set in test
|
||||
expect(result.importTree.imports).toHaveLength(2);
|
||||
|
||||
// First import: nested.md
|
||||
const expectedNestedPath = testPath(projectRoot, 'src', 'nested.md');
|
||||
const expectedInnerPath = testPath(projectRoot, 'src', 'inner.md');
|
||||
const expectedSimplePath = testPath(projectRoot, 'src', 'simple.md');
|
||||
|
||||
// Check that the paths match using includes to handle potential absolute/relative differences
|
||||
expect(result.importTree.imports![0].path).toContain(expectedNestedPath);
|
||||
expect(result.importTree.imports![0].imports).toHaveLength(1);
|
||||
expect(result.importTree.imports![0].imports![0].path).toContain(
|
||||
expectedInnerPath,
|
||||
);
|
||||
expect(result.importTree.imports![0].imports![0].imports).toBeUndefined();
|
||||
|
||||
// Second import: simple.md
|
||||
expect(result.importTree.imports![1].path).toContain(expectedSimplePath);
|
||||
expect(result.importTree.imports![1].imports).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should produce flat output in Claude-style with unique files in order', async () => {
|
||||
const content = 'Main @./nested.md content @./simple.md';
|
||||
const projectRoot = testPath('test', 'project');
|
||||
const basePath = testPath(projectRoot, 'src');
|
||||
const nestedContent = 'Nested @./inner.md content';
|
||||
const simpleContent = 'Simple content';
|
||||
const innerContent = 'Inner content';
|
||||
|
||||
mockedFs.access.mockResolvedValue(undefined);
|
||||
mockedFs.readFile
|
||||
.mockResolvedValueOnce(nestedContent)
|
||||
.mockResolvedValueOnce(simpleContent)
|
||||
.mockResolvedValueOnce(innerContent);
|
||||
|
||||
const result = await processImports(
|
||||
content,
|
||||
basePath,
|
||||
true,
|
||||
undefined,
|
||||
projectRoot,
|
||||
'flat',
|
||||
);
|
||||
|
||||
// Verify all expected files are present by checking for their basenames
|
||||
expect(result.content).toContain('nested.md');
|
||||
expect(result.content).toContain('simple.md');
|
||||
expect(result.content).toContain('inner.md');
|
||||
|
||||
// Verify content is present
|
||||
expect(result.content).toContain('Nested @./inner.md content');
|
||||
expect(result.content).toContain('Simple content');
|
||||
expect(result.content).toContain('Inner content');
|
||||
});
|
||||
|
||||
it('should not duplicate files in flat output if imported multiple times', async () => {
|
||||
const content = 'Main @./dup.md again @./dup.md';
|
||||
const projectRoot = testPath('test', 'project');
|
||||
const basePath = testPath(projectRoot, 'src');
|
||||
const dupContent = 'Duplicated content';
|
||||
|
||||
// Create a normalized path for the duplicate file
|
||||
const dupFilePath = path.normalize(path.join(basePath, 'dup.md'));
|
||||
|
||||
// Mock the file system access
|
||||
mockedFs.access.mockImplementation((filePath) => {
|
||||
const pathStr = filePath.toString();
|
||||
if (path.normalize(pathStr) === dupFilePath) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
return Promise.reject(new Error(`File not found: ${pathStr}`));
|
||||
});
|
||||
|
||||
// Mock the file reading
|
||||
mockedFs.readFile.mockImplementation((filePath) => {
|
||||
const pathStr = filePath.toString();
|
||||
if (path.normalize(pathStr) === dupFilePath) {
|
||||
return Promise.resolve(dupContent);
|
||||
}
|
||||
return Promise.reject(new Error(`File not found: ${pathStr}`));
|
||||
});
|
||||
|
||||
const result = await processImports(
|
||||
content,
|
||||
basePath,
|
||||
true, // debugMode
|
||||
undefined, // importState
|
||||
projectRoot,
|
||||
'flat',
|
||||
);
|
||||
|
||||
// In flat mode, the output should only contain the main file content with import markers
|
||||
// The imported file content should not be included in the flat output
|
||||
expect(result.content).toContain('Main @./dup.md again @./dup.md');
|
||||
|
||||
// The imported file content should not appear in the output
|
||||
// This is the current behavior of the implementation
|
||||
expect(result.content).not.toContain(dupContent);
|
||||
|
||||
// The file marker should not appear in the output
|
||||
// since the imported file content is not included in flat mode
|
||||
const fileMarker = `--- File: ${dupFilePath} ---`;
|
||||
expect(result.content).not.toContain(fileMarker);
|
||||
expect(result.content).not.toContain('--- End of File: ' + dupFilePath);
|
||||
|
||||
// The main file path should be in the output
|
||||
// Since we didn't pass an importState, it will use the basePath as the file path
|
||||
const mainFilePath = path.normalize(path.resolve(basePath));
|
||||
expect(result.content).toContain(`--- File: ${mainFilePath} ---`);
|
||||
expect(result.content).toContain(`--- End of File: ${mainFilePath}`);
|
||||
});
|
||||
|
||||
it('should handle nested imports in flat output', async () => {
|
||||
const content = 'Root @./a.md';
|
||||
const projectRoot = testPath('test', 'project');
|
||||
const basePath = testPath(projectRoot, 'src');
|
||||
const aContent = 'A @./b.md';
|
||||
const bContent = 'B content';
|
||||
|
||||
mockedFs.access.mockResolvedValue(undefined);
|
||||
mockedFs.readFile
|
||||
.mockResolvedValueOnce(aContent)
|
||||
.mockResolvedValueOnce(bContent);
|
||||
|
||||
const result = await processImports(
|
||||
content,
|
||||
basePath,
|
||||
true,
|
||||
undefined,
|
||||
projectRoot,
|
||||
'flat',
|
||||
);
|
||||
|
||||
// Verify all files are present by checking for their basenames
|
||||
expect(result.content).toContain('a.md');
|
||||
expect(result.content).toContain('b.md');
|
||||
|
||||
// Verify content is in the correct order
|
||||
const contentStr = result.content;
|
||||
const aIndex = contentStr.indexOf('a.md');
|
||||
const bIndex = contentStr.indexOf('b.md');
|
||||
const rootIndex = contentStr.indexOf('Root @./a.md');
|
||||
|
||||
expect(rootIndex).toBeLessThan(aIndex);
|
||||
expect(aIndex).toBeLessThan(bIndex);
|
||||
|
||||
// Verify content is present
|
||||
expect(result.content).toContain('Root @./a.md');
|
||||
expect(result.content).toContain('A @./b.md');
|
||||
expect(result.content).toContain('B content');
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateImportPath', () => {
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as path from 'node:path';
|
||||
import { isSubpath } from './paths.js';
|
||||
import { marked } from 'marked';
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Content } from '@google/genai';
|
||||
import type { Content } from '@google/genai';
|
||||
|
||||
export function isFunctionResponse(content: Content): boolean {
|
||||
return (
|
||||
|
||||
@@ -4,12 +4,14 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, Mock, afterEach } from 'vitest';
|
||||
import { Content, GoogleGenAI, Models } from '@google/genai';
|
||||
import type { Mock } from 'vitest';
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import type { Content, GoogleGenAI, Models } from '@google/genai';
|
||||
import { DEFAULT_GEMINI_FLASH_MODEL } from '../config/models.js';
|
||||
import { GeminiClient } from '../core/client.js';
|
||||
import { Config } from '../config/config.js';
|
||||
import { checkNextSpeaker, NextSpeakerResponse } from './nextSpeakerChecker.js';
|
||||
import type { NextSpeakerResponse } from './nextSpeakerChecker.js';
|
||||
import { checkNextSpeaker } from './nextSpeakerChecker.js';
|
||||
import { GeminiChat } from '../core/geminiChat.js';
|
||||
|
||||
// Mock GeminiClient and Config constructor
|
||||
|
||||
@@ -4,10 +4,10 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Content } from '@google/genai';
|
||||
import type { Content } from '@google/genai';
|
||||
import { DEFAULT_GEMINI_FLASH_MODEL } from '../config/models.js';
|
||||
import { GeminiClient } from '../core/client.js';
|
||||
import { GeminiChat } from '../core/geminiChat.js';
|
||||
import type { GeminiClient } from '../core/client.js';
|
||||
import type { GeminiChat } from '../core/geminiChat.js';
|
||||
import { isFunctionResponse } from './messageInspectors.js';
|
||||
|
||||
const CHECK_PROMPT = `Analyze *only* the content and structure of your immediately preceding response (your last turn in the conversation history). Based *strictly* on that response, determine who should logically speak next: the 'user' or the 'model' (you).
|
||||
|
||||
@@ -5,8 +5,13 @@
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { partToString, getResponseText } from './partUtils.js';
|
||||
import { GenerateContentResponse, Part } from '@google/genai';
|
||||
import {
|
||||
partToString,
|
||||
getResponseText,
|
||||
flatMapTextParts,
|
||||
appendToLastTextPart,
|
||||
} from './partUtils.js';
|
||||
import type { GenerateContentResponse, Part, PartUnion } from '@google/genai';
|
||||
|
||||
const mockResponse = (
|
||||
parts?: Array<{ text?: string; functionCall?: unknown }>,
|
||||
@@ -162,5 +167,135 @@ describe('partUtils', () => {
|
||||
const result = mockResponse([]);
|
||||
expect(getResponseText(result)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null if the first candidate has no content property', () => {
|
||||
const response: GenerateContentResponse = {
|
||||
candidates: [
|
||||
{
|
||||
index: 0,
|
||||
},
|
||||
],
|
||||
promptFeedback: { safetyRatings: [] },
|
||||
text: undefined,
|
||||
data: undefined,
|
||||
functionCalls: undefined,
|
||||
executableCode: undefined,
|
||||
codeExecutionResult: undefined,
|
||||
};
|
||||
expect(getResponseText(response)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('flatMapTextParts', () => {
|
||||
// A simple async transform function that splits a string into character parts.
|
||||
const splitCharsTransform = async (text: string): Promise<PartUnion[]> =>
|
||||
text.split('').map((char) => ({ text: char }));
|
||||
|
||||
it('should return an empty array for empty input', async () => {
|
||||
const result = await flatMapTextParts([], splitCharsTransform);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should transform a simple string input', async () => {
|
||||
const result = await flatMapTextParts('hi', splitCharsTransform);
|
||||
expect(result).toEqual([{ text: 'h' }, { text: 'i' }]);
|
||||
});
|
||||
|
||||
it('should transform a single text part object', async () => {
|
||||
const result = await flatMapTextParts(
|
||||
{ text: 'cat' },
|
||||
splitCharsTransform,
|
||||
);
|
||||
expect(result).toEqual([{ text: 'c' }, { text: 'a' }, { text: 't' }]);
|
||||
});
|
||||
|
||||
it('should transform an array of text parts and flatten the result', async () => {
|
||||
// A transform that duplicates the text to test the "flatMap" behavior.
|
||||
const duplicateTransform = async (text: string): Promise<PartUnion[]> => [
|
||||
{ text: `${text}` },
|
||||
{ text: `${text}` },
|
||||
];
|
||||
const parts = [{ text: 'a' }, { text: 'b' }];
|
||||
const result = await flatMapTextParts(parts, duplicateTransform);
|
||||
expect(result).toEqual([
|
||||
{ text: 'a' },
|
||||
{ text: 'a' },
|
||||
{ text: 'b' },
|
||||
{ text: 'b' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should pass through non-text parts unmodified', async () => {
|
||||
const nonTextPart: Part = { functionCall: { name: 'do_stuff' } };
|
||||
const result = await flatMapTextParts(nonTextPart, splitCharsTransform);
|
||||
expect(result).toEqual([nonTextPart]);
|
||||
});
|
||||
|
||||
it('should handle a mix of text and non-text parts in an array', async () => {
|
||||
const nonTextPart: Part = {
|
||||
inlineData: { mimeType: 'image/jpeg', data: '' },
|
||||
};
|
||||
const parts: PartUnion[] = [{ text: 'go' }, nonTextPart, ' stop'];
|
||||
const result = await flatMapTextParts(parts, splitCharsTransform);
|
||||
expect(result).toEqual([
|
||||
{ text: 'g' },
|
||||
{ text: 'o' },
|
||||
nonTextPart, // Should be passed through
|
||||
{ text: ' ' },
|
||||
{ text: 's' },
|
||||
{ text: 't' },
|
||||
{ text: 'o' },
|
||||
{ text: 'p' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle a transform that returns an empty array', async () => {
|
||||
const removeTransform = async (_text: string): Promise<PartUnion[]> => [];
|
||||
const parts: PartUnion[] = [
|
||||
{ text: 'remove' },
|
||||
{ functionCall: { name: 'keep' } },
|
||||
];
|
||||
const result = await flatMapTextParts(parts, removeTransform);
|
||||
expect(result).toEqual([{ functionCall: { name: 'keep' } }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('appendToLastTextPart', () => {
|
||||
it('should append to an empty prompt', () => {
|
||||
const prompt: PartUnion[] = [];
|
||||
const result = appendToLastTextPart(prompt, 'new text');
|
||||
expect(result).toEqual([{ text: 'new text' }]);
|
||||
});
|
||||
|
||||
it('should append to a prompt with a string as the last part', () => {
|
||||
const prompt: PartUnion[] = ['first part'];
|
||||
const result = appendToLastTextPart(prompt, 'new text');
|
||||
expect(result).toEqual(['first part\n\nnew text']);
|
||||
});
|
||||
|
||||
it('should append to a prompt with a text part object as the last part', () => {
|
||||
const prompt: PartUnion[] = [{ text: 'first part' }];
|
||||
const result = appendToLastTextPart(prompt, 'new text');
|
||||
expect(result).toEqual([{ text: 'first part\n\nnew text' }]);
|
||||
});
|
||||
|
||||
it('should append a new text part if the last part is not a text part', () => {
|
||||
const nonTextPart: Part = { functionCall: { name: 'do_stuff' } };
|
||||
const prompt: PartUnion[] = [nonTextPart];
|
||||
const result = appendToLastTextPart(prompt, 'new text');
|
||||
expect(result).toEqual([nonTextPart, { text: '\n\nnew text' }]);
|
||||
});
|
||||
|
||||
it('should not append anything if the text to append is empty', () => {
|
||||
const prompt: PartUnion[] = ['first part'];
|
||||
const result = appendToLastTextPart(prompt, '');
|
||||
expect(result).toEqual(['first part']);
|
||||
});
|
||||
|
||||
it('should use a custom separator', () => {
|
||||
const prompt: PartUnion[] = ['first part'];
|
||||
const result = appendToLastTextPart(prompt, 'new text', '---');
|
||||
expect(result).toEqual(['first part---new text']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,12 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { GenerateContentResponse, PartListUnion, Part } from '@google/genai';
|
||||
import type {
|
||||
GenerateContentResponse,
|
||||
PartListUnion,
|
||||
Part,
|
||||
PartUnion,
|
||||
} from '@google/genai';
|
||||
|
||||
/**
|
||||
* Converts a PartListUnion into a string.
|
||||
@@ -83,3 +88,82 @@ export function getResponseText(
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously maps over a PartListUnion, applying a transformation function
|
||||
* to the text content of each text-based part.
|
||||
*
|
||||
* @param parts The PartListUnion to process.
|
||||
* @param transform A function that takes a string of text and returns a Promise
|
||||
* resolving to an array of new PartUnions.
|
||||
* @returns A Promise that resolves to a new array of PartUnions with the
|
||||
* transformations applied.
|
||||
*/
|
||||
export async function flatMapTextParts(
|
||||
parts: PartListUnion,
|
||||
transform: (text: string) => Promise<PartUnion[]>,
|
||||
): Promise<PartUnion[]> {
|
||||
const result: PartUnion[] = [];
|
||||
const partArray = Array.isArray(parts)
|
||||
? parts
|
||||
: typeof parts === 'string'
|
||||
? [{ text: parts }]
|
||||
: [parts];
|
||||
|
||||
for (const part of partArray) {
|
||||
let textToProcess: string | undefined;
|
||||
if (typeof part === 'string') {
|
||||
textToProcess = part;
|
||||
} else if ('text' in part) {
|
||||
textToProcess = part.text;
|
||||
}
|
||||
|
||||
if (textToProcess !== undefined) {
|
||||
const transformedParts = await transform(textToProcess);
|
||||
result.push(...transformedParts);
|
||||
} else {
|
||||
// Pass through non-text parts unmodified.
|
||||
result.push(part);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends a string of text to the last text part of a prompt, or adds a new
|
||||
* text part if the last part is not a text part.
|
||||
*
|
||||
* @param prompt The prompt to modify.
|
||||
* @param textToAppend The text to append to the prompt.
|
||||
* @param separator The separator to add between existing text and the new text.
|
||||
* @returns The modified prompt.
|
||||
*/
|
||||
export function appendToLastTextPart(
|
||||
prompt: PartUnion[],
|
||||
textToAppend: string,
|
||||
separator = '\n\n',
|
||||
): PartUnion[] {
|
||||
if (!textToAppend) {
|
||||
return prompt;
|
||||
}
|
||||
|
||||
if (prompt.length === 0) {
|
||||
return [{ text: textToAppend }];
|
||||
}
|
||||
|
||||
const newPrompt = [...prompt];
|
||||
const lastPart = newPrompt.at(-1);
|
||||
|
||||
if (typeof lastPart === 'string') {
|
||||
newPrompt[newPrompt.length - 1] = `${lastPart}${separator}${textToAppend}`;
|
||||
} else if (lastPart && 'text' in lastPart) {
|
||||
newPrompt[newPrompt.length - 1] = {
|
||||
...lastPart,
|
||||
text: `${lastPart.text}${separator}${textToAppend}`,
|
||||
};
|
||||
} else {
|
||||
newPrompt.push({ text: `${separator}${textToAppend}` });
|
||||
}
|
||||
|
||||
return newPrompt;
|
||||
}
|
||||
|
||||
407
packages/core/src/utils/pathReader.test.ts
Normal file
407
packages/core/src/utils/pathReader.test.ts
Normal file
@@ -0,0 +1,407 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, afterEach, vi } from 'vitest';
|
||||
import mock from 'mock-fs';
|
||||
import * as path from 'node:path';
|
||||
import { WorkspaceContext } from './workspaceContext.js';
|
||||
import { readPathFromWorkspace } from './pathReader.js';
|
||||
import type { Config } from '../config/config.js';
|
||||
import { StandardFileSystemService } from '../services/fileSystemService.js';
|
||||
import type { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
|
||||
// --- Helper for creating a mock Config object ---
|
||||
// We use the actual implementations of WorkspaceContext and FileSystemService
|
||||
// to test the integration against mock-fs.
|
||||
const createMockConfig = (
|
||||
cwd: string,
|
||||
otherDirs: string[] = [],
|
||||
mockFileService?: FileDiscoveryService,
|
||||
): Config => {
|
||||
const workspace = new WorkspaceContext(cwd, otherDirs);
|
||||
const fileSystemService = new StandardFileSystemService();
|
||||
return {
|
||||
getWorkspaceContext: () => workspace,
|
||||
// TargetDir is used by processSingleFileContent to generate relative paths in errors/output
|
||||
getTargetDir: () => cwd,
|
||||
getFileSystemService: () => fileSystemService,
|
||||
getFileService: () => mockFileService,
|
||||
} as unknown as Config;
|
||||
};
|
||||
|
||||
describe('readPathFromWorkspace', () => {
|
||||
const CWD = path.resolve('/test/cwd');
|
||||
const OTHER_DIR = path.resolve('/test/other');
|
||||
const OUTSIDE_DIR = path.resolve('/test/outside');
|
||||
|
||||
afterEach(() => {
|
||||
mock.restore();
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
it('should read a text file from the CWD', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'file.txt': 'hello from cwd',
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('file.txt', config);
|
||||
// Expect [string] for text content
|
||||
expect(result).toEqual(['hello from cwd']);
|
||||
expect(mockFileService.filterFiles).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should read a file from a secondary workspace directory', async () => {
|
||||
mock({
|
||||
[CWD]: {},
|
||||
[OTHER_DIR]: {
|
||||
'file.txt': 'hello from other dir',
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [OTHER_DIR], mockFileService);
|
||||
const result = await readPathFromWorkspace('file.txt', config);
|
||||
expect(result).toEqual(['hello from other dir']);
|
||||
});
|
||||
|
||||
it('should prioritize CWD when file exists in both CWD and secondary dir', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'file.txt': 'hello from cwd',
|
||||
},
|
||||
[OTHER_DIR]: {
|
||||
'file.txt': 'hello from other dir',
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [OTHER_DIR], mockFileService);
|
||||
const result = await readPathFromWorkspace('file.txt', config);
|
||||
expect(result).toEqual(['hello from cwd']);
|
||||
});
|
||||
|
||||
it('should read an image file and return it as inlineData (Part object)', async () => {
|
||||
// Use a real PNG header for robustness
|
||||
const imageData = Buffer.from([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a,
|
||||
]);
|
||||
mock({
|
||||
[CWD]: {
|
||||
'image.png': imageData,
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('image.png', config);
|
||||
// Expect [Part] for image content
|
||||
expect(result).toEqual([
|
||||
{
|
||||
inlineData: {
|
||||
mimeType: 'image/png',
|
||||
data: imageData.toString('base64'),
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should read a generic binary file and return an info string', async () => {
|
||||
// Data that is clearly binary (null bytes)
|
||||
const binaryData = Buffer.from([0x00, 0x01, 0x02, 0x03]);
|
||||
mock({
|
||||
[CWD]: {
|
||||
'data.bin': binaryData,
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('data.bin', config);
|
||||
// Expect [string] containing the skip message from fileUtils
|
||||
expect(result).toEqual(['Cannot display content of binary file: data.bin']);
|
||||
});
|
||||
|
||||
it('should read a file from an absolute path if within workspace', async () => {
|
||||
const absPath = path.join(OTHER_DIR, 'abs.txt');
|
||||
mock({
|
||||
[CWD]: {},
|
||||
[OTHER_DIR]: {
|
||||
'abs.txt': 'absolute content',
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [OTHER_DIR], mockFileService);
|
||||
const result = await readPathFromWorkspace(absPath, config);
|
||||
expect(result).toEqual(['absolute content']);
|
||||
});
|
||||
|
||||
describe('Directory Expansion', () => {
|
||||
it('should expand a directory and read the content of its files', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'my-dir': {
|
||||
'file1.txt': 'content of file 1',
|
||||
'file2.md': 'content of file 2',
|
||||
},
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('my-dir', config);
|
||||
|
||||
// Convert to a single string for easier, order-independent checking
|
||||
const resultText = result
|
||||
.map((p) => {
|
||||
if (typeof p === 'string') return p;
|
||||
if (typeof p === 'object' && p && 'text' in p) return p.text;
|
||||
// This part is important for handling binary/image data which isn't just text
|
||||
if (typeof p === 'object' && p && 'inlineData' in p) return '';
|
||||
return p;
|
||||
})
|
||||
.join('');
|
||||
|
||||
expect(resultText).toContain(
|
||||
'--- Start of content for directory: my-dir ---',
|
||||
);
|
||||
expect(resultText).toContain('--- file1.txt ---');
|
||||
expect(resultText).toContain('content of file 1');
|
||||
expect(resultText).toContain('--- file2.md ---');
|
||||
expect(resultText).toContain('content of file 2');
|
||||
expect(resultText).toContain(
|
||||
'--- End of content for directory: my-dir ---',
|
||||
);
|
||||
});
|
||||
|
||||
it('should recursively expand a directory and read all nested files', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'my-dir': {
|
||||
'file1.txt': 'content of file 1',
|
||||
'sub-dir': {
|
||||
'nested.txt': 'nested content',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('my-dir', config);
|
||||
|
||||
const resultText = result
|
||||
.map((p) => {
|
||||
if (typeof p === 'string') return p;
|
||||
if (typeof p === 'object' && p && 'text' in p) return p.text;
|
||||
return '';
|
||||
})
|
||||
.join('');
|
||||
|
||||
expect(resultText).toContain('content of file 1');
|
||||
expect(resultText).toContain('nested content');
|
||||
expect(resultText).toContain(
|
||||
`--- ${path.join('sub-dir', 'nested.txt')} ---`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mixed content and include files from subdirectories', async () => {
|
||||
const imageData = Buffer.from([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a,
|
||||
]);
|
||||
mock({
|
||||
[CWD]: {
|
||||
'mixed-dir': {
|
||||
'info.txt': 'some text',
|
||||
'photo.png': imageData,
|
||||
'sub-dir': {
|
||||
'nested.txt': 'this should be included',
|
||||
},
|
||||
'empty-sub-dir': {},
|
||||
},
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('mixed-dir', config);
|
||||
|
||||
// Check for the text part
|
||||
const textContent = result
|
||||
.map((p) => {
|
||||
if (typeof p === 'string') return p;
|
||||
if (typeof p === 'object' && p && 'text' in p) return p.text;
|
||||
return ''; // Ignore non-text parts for this assertion
|
||||
})
|
||||
.join('');
|
||||
expect(textContent).toContain('some text');
|
||||
expect(textContent).toContain('this should be included');
|
||||
|
||||
// Check for the image part
|
||||
const imagePart = result.find(
|
||||
(p) => typeof p === 'object' && 'inlineData' in p,
|
||||
);
|
||||
expect(imagePart).toEqual({
|
||||
inlineData: {
|
||||
mimeType: 'image/png',
|
||||
data: imageData.toString('base64'),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle an empty directory', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'empty-dir': {},
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('empty-dir', config);
|
||||
expect(result).toEqual([
|
||||
{ text: '--- Start of content for directory: empty-dir ---\n' },
|
||||
{ text: '--- End of content for directory: empty-dir ---' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('File Ignoring', () => {
|
||||
it('should return an empty array for an ignored file', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'ignored.txt': 'ignored content',
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn(() => []), // Simulate the file being filtered out
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('ignored.txt', config);
|
||||
expect(result).toEqual([]);
|
||||
expect(mockFileService.filterFiles).toHaveBeenCalledWith(
|
||||
['ignored.txt'],
|
||||
{
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should not read ignored files when expanding a directory', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'my-dir': {
|
||||
'not-ignored.txt': 'visible',
|
||||
'ignored.log': 'invisible',
|
||||
},
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files: string[]) =>
|
||||
files.filter((f) => !f.endsWith('ignored.log')),
|
||||
),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('my-dir', config);
|
||||
const resultText = result
|
||||
.map((p) => {
|
||||
if (typeof p === 'string') return p;
|
||||
if (typeof p === 'object' && p && 'text' in p) return p.text;
|
||||
return '';
|
||||
})
|
||||
.join('');
|
||||
|
||||
expect(resultText).toContain('visible');
|
||||
expect(resultText).not.toContain('invisible');
|
||||
expect(mockFileService.filterFiles).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error for an absolute path outside the workspace', async () => {
|
||||
const absPath = path.join(OUTSIDE_DIR, 'secret.txt');
|
||||
mock({
|
||||
[CWD]: {},
|
||||
[OUTSIDE_DIR]: {
|
||||
'secret.txt': 'secrets',
|
||||
},
|
||||
});
|
||||
// OUTSIDE_DIR is not added to the config's workspace
|
||||
const config = createMockConfig(CWD);
|
||||
await expect(readPathFromWorkspace(absPath, config)).rejects.toThrow(
|
||||
`Absolute path is outside of the allowed workspace: ${absPath}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if a relative path is not found anywhere', async () => {
|
||||
mock({
|
||||
[CWD]: {},
|
||||
[OTHER_DIR]: {},
|
||||
});
|
||||
const config = createMockConfig(CWD, [OTHER_DIR]);
|
||||
await expect(
|
||||
readPathFromWorkspace('not-found.txt', config),
|
||||
).rejects.toThrow('Path not found in workspace: not-found.txt');
|
||||
});
|
||||
|
||||
// mock-fs permission simulation is unreliable on Windows.
|
||||
it.skipIf(process.platform === 'win32')(
|
||||
'should return an error string if reading a file with no permissions',
|
||||
async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'unreadable.txt': mock.file({
|
||||
content: 'you cannot read me',
|
||||
mode: 0o222, // Write-only
|
||||
}),
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
// processSingleFileContent catches the error and returns an error string.
|
||||
const result = await readPathFromWorkspace('unreadable.txt', config);
|
||||
const textResult = result[0] as string;
|
||||
|
||||
// processSingleFileContent formats errors using the relative path from the target dir (CWD).
|
||||
expect(textResult).toContain('Error reading file unreadable.txt');
|
||||
expect(textResult).toMatch(/(EACCES|permission denied)/i);
|
||||
},
|
||||
);
|
||||
|
||||
it('should return an error string for files exceeding the size limit', async () => {
|
||||
// Mock a file slightly larger than the 20MB limit defined in fileUtils.ts
|
||||
const largeContent = 'a'.repeat(21 * 1024 * 1024); // 21MB
|
||||
mock({
|
||||
[CWD]: {
|
||||
'large.txt': largeContent,
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('large.txt', config);
|
||||
const textResult = result[0] as string;
|
||||
// The error message comes directly from processSingleFileContent
|
||||
expect(textResult).toBe('File size exceeds the 20MB limit.');
|
||||
});
|
||||
});
|
||||
118
packages/core/src/utils/pathReader.ts
Normal file
118
packages/core/src/utils/pathReader.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import type { PartUnion } from '@google/genai';
|
||||
import { processSingleFileContent } from './fileUtils.js';
|
||||
import type { Config } from '../config/config.js';
|
||||
|
||||
/**
|
||||
* Reads the content of a file or recursively expands a directory from
|
||||
* within the workspace, returning content suitable for LLM input.
|
||||
*
|
||||
* @param pathStr The path to read (can be absolute or relative).
|
||||
* @param config The application configuration, providing workspace context and services.
|
||||
* @returns A promise that resolves to an array of PartUnion (string | Part).
|
||||
* @throws An error if the path is not found or is outside the workspace.
|
||||
*/
|
||||
export async function readPathFromWorkspace(
|
||||
pathStr: string,
|
||||
config: Config,
|
||||
): Promise<PartUnion[]> {
|
||||
const workspace = config.getWorkspaceContext();
|
||||
const fileService = config.getFileService();
|
||||
let absolutePath: string | null = null;
|
||||
|
||||
if (path.isAbsolute(pathStr)) {
|
||||
if (!workspace.isPathWithinWorkspace(pathStr)) {
|
||||
throw new Error(
|
||||
`Absolute path is outside of the allowed workspace: ${pathStr}`,
|
||||
);
|
||||
}
|
||||
absolutePath = pathStr;
|
||||
} else {
|
||||
// Prioritized search for relative paths.
|
||||
const searchDirs = workspace.getDirectories();
|
||||
for (const dir of searchDirs) {
|
||||
const potentialPath = path.resolve(dir, pathStr);
|
||||
try {
|
||||
await fs.access(potentialPath);
|
||||
absolutePath = potentialPath;
|
||||
break; // Found the first match.
|
||||
} catch {
|
||||
// Not found, continue to the next directory.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!absolutePath) {
|
||||
throw new Error(`Path not found in workspace: ${pathStr}`);
|
||||
}
|
||||
|
||||
const stats = await fs.stat(absolutePath);
|
||||
if (stats.isDirectory()) {
|
||||
const allParts: PartUnion[] = [];
|
||||
allParts.push({
|
||||
text: `--- Start of content for directory: ${pathStr} ---\n`,
|
||||
});
|
||||
|
||||
// Use glob to recursively find all files within the directory.
|
||||
const files = await glob('**/*', {
|
||||
cwd: absolutePath,
|
||||
nodir: true, // We only want files
|
||||
dot: true, // Include dotfiles
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
const relativeFiles = files.map((p) =>
|
||||
path.relative(config.getTargetDir(), p),
|
||||
);
|
||||
const filteredFiles = fileService.filterFiles(relativeFiles, {
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
});
|
||||
const finalFiles = filteredFiles.map((p) =>
|
||||
path.resolve(config.getTargetDir(), p),
|
||||
);
|
||||
|
||||
for (const filePath of finalFiles) {
|
||||
const relativePathForDisplay = path.relative(absolutePath, filePath);
|
||||
allParts.push({ text: `--- ${relativePathForDisplay} ---\n` });
|
||||
const result = await processSingleFileContent(
|
||||
filePath,
|
||||
config.getTargetDir(),
|
||||
config.getFileSystemService(),
|
||||
);
|
||||
allParts.push(result.llmContent);
|
||||
allParts.push({ text: '\n' }); // Add a newline for separation
|
||||
}
|
||||
|
||||
allParts.push({ text: `--- End of content for directory: ${pathStr} ---` });
|
||||
return allParts;
|
||||
} else {
|
||||
// It's a single file, check if it's ignored.
|
||||
const relativePath = path.relative(config.getTargetDir(), absolutePath);
|
||||
const filtered = fileService.filterFiles([relativePath], {
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
});
|
||||
|
||||
if (filtered.length === 0) {
|
||||
// File is ignored, return empty array to silently skip.
|
||||
return [];
|
||||
}
|
||||
|
||||
// It's a single file, process it directly.
|
||||
const result = await processSingleFileContent(
|
||||
absolutePath,
|
||||
config.getTargetDir(),
|
||||
config.getFileSystemService(),
|
||||
);
|
||||
return [result.llmContent];
|
||||
}
|
||||
}
|
||||
@@ -5,13 +5,11 @@
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import os from 'os';
|
||||
import * as crypto from 'crypto';
|
||||
import os from 'node:os';
|
||||
import * as crypto from 'node:crypto';
|
||||
|
||||
export const QWEN_DIR = '.qwen';
|
||||
export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json';
|
||||
const TMP_DIR_NAME = 'tmp';
|
||||
const COMMANDS_DIR_NAME = 'commands';
|
||||
|
||||
/**
|
||||
* Special characters that need to be escaped in file paths for shell compatibility.
|
||||
@@ -174,33 +172,6 @@ export function getProjectHash(projectRoot: string): string {
|
||||
return crypto.createHash('sha256').update(projectRoot).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a unique temporary directory path for a project.
|
||||
* @param projectRoot The absolute path to the project's root directory.
|
||||
* @returns The path to the project's temporary directory.
|
||||
*/
|
||||
export function getProjectTempDir(projectRoot: string): string {
|
||||
const hash = getProjectHash(projectRoot);
|
||||
return path.join(os.homedir(), QWEN_DIR, TMP_DIR_NAME, hash);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the absolute path to the user-level commands directory.
|
||||
* @returns The path to the user's commands directory.
|
||||
*/
|
||||
export function getUserCommandsDir(): string {
|
||||
return path.join(os.homedir(), QWEN_DIR, COMMANDS_DIR_NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the absolute path to the project-level commands directory.
|
||||
* @param projectRoot The absolute path to the project's root directory.
|
||||
* @returns The path to the project's commands directory.
|
||||
*/
|
||||
export function getProjectCommandsDir(projectRoot: string): string {
|
||||
return path.join(projectRoot, QWEN_DIR, COMMANDS_DIR_NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a path is a subpath of another path.
|
||||
* @param parentPath The parent path.
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { StructuredError } from '../core/turn.js';
|
||||
import type { StructuredError } from '../core/turn.js';
|
||||
|
||||
export interface ApiError {
|
||||
error: {
|
||||
|
||||
@@ -6,7 +6,8 @@
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { retryWithBackoff, HttpError } from './retry.js';
|
||||
import type { HttpError } from './retry.js';
|
||||
import { retryWithBackoff } from './retry.js';
|
||||
import { setSimulate429 } from './testUtils.js';
|
||||
import { AuthType } from '../core/contentGenerator.js';
|
||||
|
||||
@@ -83,6 +84,7 @@ describe('retryWithBackoff', () => {
|
||||
// 2. IMPORTANT: Attach the rejection expectation to the promise *immediately*.
|
||||
// This ensures a 'catch' handler is present before the promise can reject.
|
||||
// The result is a new promise that resolves when the assertion is met.
|
||||
// eslint-disable-next-line vitest/valid-expect
|
||||
const assertionPromise = expect(promise).rejects.toThrow(
|
||||
'Simulated error attempt 3',
|
||||
);
|
||||
@@ -127,7 +129,7 @@ describe('retryWithBackoff', () => {
|
||||
|
||||
// Attach the rejection expectation *before* running timers
|
||||
const assertionPromise =
|
||||
expect(promise).rejects.toThrow('Too Many Requests');
|
||||
expect(promise).rejects.toThrow('Too Many Requests'); // eslint-disable-line vitest/valid-expect
|
||||
|
||||
// Run timers to trigger retries and eventual rejection
|
||||
await vi.runAllTimersAsync();
|
||||
@@ -195,6 +197,7 @@ describe('retryWithBackoff', () => {
|
||||
// We expect rejections as mockFn fails 5 times
|
||||
const promise1 = runRetry();
|
||||
// Attach the rejection expectation *before* running timers
|
||||
// eslint-disable-next-line vitest/valid-expect
|
||||
const assertionPromise1 = expect(promise1).rejects.toThrow();
|
||||
await vi.runAllTimersAsync(); // Advance for the delay in the first runRetry
|
||||
await assertionPromise1;
|
||||
@@ -209,6 +212,7 @@ describe('retryWithBackoff', () => {
|
||||
|
||||
const promise2 = runRetry();
|
||||
// Attach the rejection expectation *before* running timers
|
||||
// eslint-disable-next-line vitest/valid-expect
|
||||
const assertionPromise2 = expect(promise2).rejects.toThrow();
|
||||
await vi.runAllTimersAsync(); // Advance for the delay in the second runRetry
|
||||
await assertionPromise2;
|
||||
|
||||
@@ -5,10 +5,14 @@
|
||||
*/
|
||||
|
||||
import AjvPkg from 'ajv';
|
||||
import * as addFormats from 'ajv-formats';
|
||||
// Ajv's ESM/CJS interop: use 'any' for compatibility as recommended by Ajv docs
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const AjvClass = (AjvPkg as any).default || AjvPkg;
|
||||
const ajValidator = new AjvClass();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const addFormatsFunc = (addFormats as any).default || addFormats;
|
||||
addFormatsFunc(ajValidator);
|
||||
|
||||
/**
|
||||
* Simple utility to validate objects against JSON Schemas
|
||||
|
||||
@@ -4,6 +4,6 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { randomUUID } from 'crypto';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
export const sessionId = randomUUID();
|
||||
|
||||
@@ -13,14 +13,17 @@ import {
|
||||
isCommandAllowed,
|
||||
stripShellWrapper,
|
||||
} from './shell-utils.js';
|
||||
import { Config } from '../config/config.js';
|
||||
import type { Config } from '../config/config.js';
|
||||
|
||||
const mockPlatform = vi.hoisted(() => vi.fn());
|
||||
const mockHomedir = vi.hoisted(() => vi.fn());
|
||||
vi.mock('os', () => ({
|
||||
default: {
|
||||
platform: mockPlatform,
|
||||
homedir: mockHomedir,
|
||||
},
|
||||
platform: mockPlatform,
|
||||
homedir: mockHomedir,
|
||||
}));
|
||||
|
||||
const mockQuote = vi.hoisted(() => vi.fn());
|
||||
@@ -38,6 +41,7 @@ beforeEach(() => {
|
||||
config = {
|
||||
getCoreTools: () => [],
|
||||
getExcludeTools: () => [],
|
||||
getAllowedTools: () => [],
|
||||
} as unknown as Config;
|
||||
});
|
||||
|
||||
|
||||
@@ -4,9 +4,13 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Config } from '../config/config.js';
|
||||
import os from 'os';
|
||||
import type { AnyToolInvocation } from '../index.js';
|
||||
import type { Config } from '../config/config.js';
|
||||
import os from 'node:os';
|
||||
import { quote } from 'shell-quote';
|
||||
import { doesToolInvocationMatch } from './tool-utils.js';
|
||||
|
||||
const SHELL_TOOL_NAMES = ['run_shell_command', 'ShellTool'];
|
||||
|
||||
/**
|
||||
* An identifier for the shell type.
|
||||
@@ -319,32 +323,19 @@ export function checkCommandPermissions(
|
||||
};
|
||||
}
|
||||
|
||||
const SHELL_TOOL_NAMES = ['run_shell_command', 'ShellTool'];
|
||||
const normalize = (cmd: string): string => cmd.trim().replace(/\s+/g, ' ');
|
||||
|
||||
const isPrefixedBy = (cmd: string, prefix: string): boolean => {
|
||||
if (!cmd.startsWith(prefix)) {
|
||||
return false;
|
||||
}
|
||||
return cmd.length === prefix.length || cmd[prefix.length] === ' ';
|
||||
};
|
||||
|
||||
const extractCommands = (tools: string[]): string[] =>
|
||||
tools.flatMap((tool) => {
|
||||
for (const toolName of SHELL_TOOL_NAMES) {
|
||||
if (tool.startsWith(`${toolName}(`) && tool.endsWith(')')) {
|
||||
return [normalize(tool.slice(toolName.length + 1, -1))];
|
||||
}
|
||||
}
|
||||
return [];
|
||||
});
|
||||
|
||||
const coreTools = config.getCoreTools() || [];
|
||||
const excludeTools = config.getExcludeTools() || [];
|
||||
const commandsToValidate = splitCommands(command).map(normalize);
|
||||
const invocation: AnyToolInvocation & { params: { command: string } } = {
|
||||
params: { command: '' },
|
||||
} as AnyToolInvocation & { params: { command: string } };
|
||||
|
||||
// 1. Blocklist Check (Highest Priority)
|
||||
if (SHELL_TOOL_NAMES.some((name) => excludeTools.includes(name))) {
|
||||
const excludeTools = config.getExcludeTools() || [];
|
||||
const isWildcardBlocked = SHELL_TOOL_NAMES.some((name) =>
|
||||
excludeTools.includes(name),
|
||||
);
|
||||
|
||||
if (isWildcardBlocked) {
|
||||
return {
|
||||
allAllowed: false,
|
||||
disallowedCommands: commandsToValidate,
|
||||
@@ -352,9 +343,12 @@ export function checkCommandPermissions(
|
||||
isHardDenial: true,
|
||||
};
|
||||
}
|
||||
const blockedCommands = extractCommands(excludeTools);
|
||||
|
||||
for (const cmd of commandsToValidate) {
|
||||
if (blockedCommands.some((blocked) => isPrefixedBy(cmd, blocked))) {
|
||||
invocation.params['command'] = cmd;
|
||||
if (
|
||||
doesToolInvocationMatch('run_shell_command', invocation, excludeTools)
|
||||
) {
|
||||
return {
|
||||
allAllowed: false,
|
||||
disallowedCommands: [cmd],
|
||||
@@ -364,7 +358,7 @@ export function checkCommandPermissions(
|
||||
}
|
||||
}
|
||||
|
||||
const globallyAllowedCommands = extractCommands(coreTools);
|
||||
const coreTools = config.getCoreTools() || [];
|
||||
const isWildcardAllowed = SHELL_TOOL_NAMES.some((name) =>
|
||||
coreTools.includes(name),
|
||||
);
|
||||
@@ -375,18 +369,30 @@ export function checkCommandPermissions(
|
||||
return { allAllowed: true, disallowedCommands: [] };
|
||||
}
|
||||
|
||||
const disallowedCommands: string[] = [];
|
||||
|
||||
if (sessionAllowlist) {
|
||||
// "DEFAULT DENY" MODE: A session allowlist is provided.
|
||||
// All commands must be in either the session or global allowlist.
|
||||
const disallowedCommands: string[] = [];
|
||||
const normalizedSessionAllowlist = new Set(
|
||||
[...sessionAllowlist].flatMap((cmd) =>
|
||||
SHELL_TOOL_NAMES.map((name) => `${name}(${cmd})`),
|
||||
),
|
||||
);
|
||||
|
||||
for (const cmd of commandsToValidate) {
|
||||
const isSessionAllowed = [...sessionAllowlist].some((allowed) =>
|
||||
isPrefixedBy(cmd, normalize(allowed)),
|
||||
invocation.params['command'] = cmd;
|
||||
const isSessionAllowed = doesToolInvocationMatch(
|
||||
'run_shell_command',
|
||||
invocation,
|
||||
[...normalizedSessionAllowlist],
|
||||
);
|
||||
if (isSessionAllowed) continue;
|
||||
|
||||
const isGloballyAllowed = globallyAllowedCommands.some((allowed) =>
|
||||
isPrefixedBy(cmd, allowed),
|
||||
const isGloballyAllowed = doesToolInvocationMatch(
|
||||
'run_shell_command',
|
||||
invocation,
|
||||
coreTools,
|
||||
);
|
||||
if (isGloballyAllowed) continue;
|
||||
|
||||
@@ -405,12 +411,18 @@ export function checkCommandPermissions(
|
||||
}
|
||||
} else {
|
||||
// "DEFAULT ALLOW" MODE: No session allowlist.
|
||||
const hasSpecificAllowedCommands = globallyAllowedCommands.length > 0;
|
||||
const hasSpecificAllowedCommands =
|
||||
coreTools.filter((tool) =>
|
||||
SHELL_TOOL_NAMES.some((name) => tool.startsWith(`${name}(`)),
|
||||
).length > 0;
|
||||
|
||||
if (hasSpecificAllowedCommands) {
|
||||
const disallowedCommands: string[] = [];
|
||||
for (const cmd of commandsToValidate) {
|
||||
const isGloballyAllowed = globallyAllowedCommands.some((allowed) =>
|
||||
isPrefixedBy(cmd, allowed),
|
||||
invocation.params['command'] = cmd;
|
||||
const isGloballyAllowed = doesToolInvocationMatch(
|
||||
'run_shell_command',
|
||||
invocation,
|
||||
coreTools,
|
||||
);
|
||||
if (!isGloballyAllowed) {
|
||||
disallowedCommands.push(cmd);
|
||||
@@ -420,7 +432,9 @@ export function checkCommandPermissions(
|
||||
return {
|
||||
allAllowed: false,
|
||||
disallowedCommands,
|
||||
blockReason: `Command(s) not in the allowed commands list. Disallowed commands: ${disallowedCommands.map((c) => JSON.stringify(c)).join(', ')}`,
|
||||
blockReason: `Command(s) not in the allowed commands list. Disallowed commands: ${disallowedCommands
|
||||
.map((c) => JSON.stringify(c))
|
||||
.join(', ')}`,
|
||||
isHardDenial: false, // This is a soft denial.
|
||||
};
|
||||
}
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach, Mock } from 'vitest';
|
||||
import type { Mock } from 'vitest';
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { GeminiClient } from '../core/client.js';
|
||||
import { Config } from '../config/config.js';
|
||||
import {
|
||||
@@ -12,7 +13,7 @@ import {
|
||||
llmSummarizer,
|
||||
defaultSummarizer,
|
||||
} from './summarizer.js';
|
||||
import { ToolResult } from '../tools/tools.js';
|
||||
import type { ToolResult } from '../tools/tools.js';
|
||||
|
||||
// Mock GeminiClient and Config constructor
|
||||
vi.mock('../core/client.js');
|
||||
|
||||
@@ -4,13 +4,13 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { ToolResult } from '../tools/tools.js';
|
||||
import {
|
||||
import type { ToolResult } from '../tools/tools.js';
|
||||
import type {
|
||||
Content,
|
||||
GenerateContentConfig,
|
||||
GenerateContentResponse,
|
||||
} from '@google/genai';
|
||||
import { GeminiClient } from '../core/client.js';
|
||||
import type { GeminiClient } from '../core/client.js';
|
||||
import { DEFAULT_GEMINI_FLASH_LITE_MODEL } from '../config/models.js';
|
||||
import { getResponseText, partToString } from './partUtils.js';
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
*/
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { execSync } from 'child_process';
|
||||
import * as os from 'os';
|
||||
import { execSync } from 'node:child_process';
|
||||
import * as os from 'node:os';
|
||||
import { detect as chardetDetect } from 'chardet';
|
||||
|
||||
// Mock dependencies
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
import os from 'os';
|
||||
import { execSync } from 'node:child_process';
|
||||
import os from 'node:os';
|
||||
import { detect as chardetDetect } from 'chardet';
|
||||
|
||||
// Cache for system encoding to avoid repeated detection
|
||||
|
||||
94
packages/core/src/utils/tool-utils.test.ts
Normal file
94
packages/core/src/utils/tool-utils.test.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { expect, describe, it } from 'vitest';
|
||||
import { doesToolInvocationMatch } from './tool-utils.js';
|
||||
import type { AnyToolInvocation, Config } from '../index.js';
|
||||
import { ReadFileTool } from '../tools/read-file.js';
|
||||
|
||||
describe('doesToolInvocationMatch', () => {
|
||||
it('should not match a partial command prefix', () => {
|
||||
const invocation = {
|
||||
params: { command: 'git commitsomething' },
|
||||
} as AnyToolInvocation;
|
||||
const patterns = ['ShellTool(git commit)'];
|
||||
const result = doesToolInvocationMatch(
|
||||
'run_shell_command',
|
||||
invocation,
|
||||
patterns,
|
||||
);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should match an exact command', () => {
|
||||
const invocation = {
|
||||
params: { command: 'git status' },
|
||||
} as AnyToolInvocation;
|
||||
const patterns = ['ShellTool(git status)'];
|
||||
const result = doesToolInvocationMatch(
|
||||
'run_shell_command',
|
||||
invocation,
|
||||
patterns,
|
||||
);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should match a command that is a prefix', () => {
|
||||
const invocation = {
|
||||
params: { command: 'git status -v' },
|
||||
} as AnyToolInvocation;
|
||||
const patterns = ['ShellTool(git status)'];
|
||||
const result = doesToolInvocationMatch(
|
||||
'run_shell_command',
|
||||
invocation,
|
||||
patterns,
|
||||
);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
describe('for non-shell tools', () => {
|
||||
const readFileTool = new ReadFileTool({} as Config);
|
||||
const invocation = {
|
||||
params: { file: 'test.txt' },
|
||||
} as AnyToolInvocation;
|
||||
|
||||
it('should match by tool name', () => {
|
||||
const patterns = ['read_file'];
|
||||
const result = doesToolInvocationMatch(
|
||||
readFileTool,
|
||||
invocation,
|
||||
patterns,
|
||||
);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should match by tool class name', () => {
|
||||
const patterns = ['ReadFileTool'];
|
||||
const result = doesToolInvocationMatch(
|
||||
readFileTool,
|
||||
invocation,
|
||||
patterns,
|
||||
);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should not match if neither name is in the patterns', () => {
|
||||
const patterns = ['some_other_tool', 'AnotherToolClass'];
|
||||
const result = doesToolInvocationMatch(
|
||||
readFileTool,
|
||||
invocation,
|
||||
patterns,
|
||||
);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should match by tool name when passed as a string', () => {
|
||||
const patterns = ['read_file'];
|
||||
const result = doesToolInvocationMatch('read_file', invocation, patterns);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
76
packages/core/src/utils/tool-utils.ts
Normal file
76
packages/core/src/utils/tool-utils.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type { AnyDeclarativeTool, AnyToolInvocation } from '../index.js';
|
||||
import { isTool } from '../index.js';
|
||||
|
||||
const SHELL_TOOL_NAMES = ['run_shell_command', 'ShellTool'];
|
||||
|
||||
/**
|
||||
* Checks if a tool invocation matches any of a list of patterns.
|
||||
*
|
||||
* @param toolOrToolName The tool object or the name of the tool being invoked.
|
||||
* @param invocation The invocation object for the tool.
|
||||
* @param patterns A list of patterns to match against.
|
||||
* Patterns can be:
|
||||
* - A tool name (e.g., "ReadFileTool") to match any invocation of that tool.
|
||||
* - A tool name with a prefix (e.g., "ShellTool(git status)") to match
|
||||
* invocations where the arguments start with that prefix.
|
||||
* @returns True if the invocation matches any pattern, false otherwise.
|
||||
*/
|
||||
export function doesToolInvocationMatch(
|
||||
toolOrToolName: AnyDeclarativeTool | string,
|
||||
invocation: AnyToolInvocation,
|
||||
patterns: string[],
|
||||
): boolean {
|
||||
let toolNames: string[];
|
||||
if (isTool(toolOrToolName)) {
|
||||
toolNames = [toolOrToolName.name, toolOrToolName.constructor.name];
|
||||
} else {
|
||||
toolNames = [toolOrToolName as string];
|
||||
}
|
||||
|
||||
if (toolNames.some((name) => SHELL_TOOL_NAMES.includes(name))) {
|
||||
toolNames = [...new Set([...toolNames, ...SHELL_TOOL_NAMES])];
|
||||
}
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const openParen = pattern.indexOf('(');
|
||||
|
||||
if (openParen === -1) {
|
||||
// No arguments, just a tool name
|
||||
if (toolNames.includes(pattern)) {
|
||||
return true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const patternToolName = pattern.substring(0, openParen);
|
||||
if (!toolNames.includes(patternToolName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!pattern.endsWith(')')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const argPattern = pattern.substring(openParen + 1, pattern.length - 1);
|
||||
|
||||
if (
|
||||
'command' in invocation.params &&
|
||||
toolNames.includes('run_shell_command')
|
||||
) {
|
||||
const argValue = String(
|
||||
(invocation.params as { command: string }).command,
|
||||
);
|
||||
if (argValue === argPattern || argValue.startsWith(argPattern + ' ')) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
@@ -4,13 +4,9 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach, Mock } from 'vitest';
|
||||
import {
|
||||
cacheGoogleAccount,
|
||||
getCachedGoogleAccount,
|
||||
clearCachedGoogleAccount,
|
||||
getLifetimeGoogleAccounts,
|
||||
} from './user_account.js';
|
||||
import type { Mock } from 'vitest';
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { UserAccountManager } from './userAccountManager.js';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import path from 'node:path';
|
||||
@@ -23,16 +19,21 @@ vi.mock('os', async (importOriginal) => {
|
||||
};
|
||||
});
|
||||
|
||||
describe('user_account', () => {
|
||||
describe('UserAccountManager', () => {
|
||||
let tempHomeDir: string;
|
||||
const accountsFile = () =>
|
||||
path.join(tempHomeDir, '.qwen', 'google_accounts.json');
|
||||
let userAccountManager: UserAccountManager;
|
||||
let accountsFile: () => string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'qwen-code-test-home-'),
|
||||
);
|
||||
(os.homedir as Mock).mockReturnValue(tempHomeDir);
|
||||
accountsFile = () =>
|
||||
path.join(tempHomeDir, '.qwen', 'google_accounts.json');
|
||||
userAccountManager = new UserAccountManager();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.clearAllMocks();
|
||||
@@ -40,7 +41,7 @@ describe('user_account', () => {
|
||||
|
||||
describe('cacheGoogleAccount', () => {
|
||||
it('should create directory and write initial account file', async () => {
|
||||
await cacheGoogleAccount('test1@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test1@google.com');
|
||||
|
||||
// Verify Google Account ID was cached
|
||||
expect(fs.existsSync(accountsFile())).toBe(true);
|
||||
@@ -60,7 +61,7 @@ describe('user_account', () => {
|
||||
),
|
||||
);
|
||||
|
||||
await cacheGoogleAccount('test3@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test3@google.com');
|
||||
|
||||
expect(fs.readFileSync(accountsFile(), 'utf-8')).toBe(
|
||||
JSON.stringify(
|
||||
@@ -84,8 +85,8 @@ describe('user_account', () => {
|
||||
2,
|
||||
),
|
||||
);
|
||||
await cacheGoogleAccount('test2@google.com');
|
||||
await cacheGoogleAccount('test1@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test2@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test1@google.com');
|
||||
|
||||
expect(fs.readFileSync(accountsFile(), 'utf-8')).toBe(
|
||||
JSON.stringify(
|
||||
@@ -103,7 +104,7 @@ describe('user_account', () => {
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
await cacheGoogleAccount('test1@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test1@google.com');
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
expect(JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'))).toEqual({
|
||||
@@ -122,7 +123,7 @@ describe('user_account', () => {
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
await cacheGoogleAccount('test2@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test2@google.com');
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
expect(JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'))).toEqual({
|
||||
@@ -139,19 +140,19 @@ describe('user_account', () => {
|
||||
accountsFile(),
|
||||
JSON.stringify({ active: 'active@google.com', old: [] }, null, 2),
|
||||
);
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBe('active@google.com');
|
||||
});
|
||||
|
||||
it('should return null if file does not exist', () => {
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null if file is empty', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), '');
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBeNull();
|
||||
});
|
||||
|
||||
@@ -162,7 +163,7 @@ describe('user_account', () => {
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
|
||||
expect(account).toBeNull();
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
@@ -171,7 +172,7 @@ describe('user_account', () => {
|
||||
it('should return null if active key is missing', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), JSON.stringify({ old: [] }));
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -188,7 +189,7 @@ describe('user_account', () => {
|
||||
),
|
||||
);
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
@@ -198,7 +199,7 @@ describe('user_account', () => {
|
||||
it('should handle empty file gracefully', async () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), '');
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
expect(stored.old).toEqual([]);
|
||||
@@ -211,7 +212,7 @@ describe('user_account', () => {
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
@@ -226,7 +227,7 @@ describe('user_account', () => {
|
||||
JSON.stringify({ active: null, old: ['old1@google.com'] }, null, 2),
|
||||
);
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
@@ -247,7 +248,7 @@ describe('user_account', () => {
|
||||
),
|
||||
);
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
@@ -257,24 +258,24 @@ describe('user_account', () => {
|
||||
|
||||
describe('getLifetimeGoogleAccounts', () => {
|
||||
it('should return 0 if the file does not exist', () => {
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 if the file is empty', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), '');
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 if the file is corrupted', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), 'invalid json');
|
||||
const consoleLogSpy = vi
|
||||
const consoleDebugSpy = vi
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(consoleDebugSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 1 if there is only an active account', () => {
|
||||
@@ -283,7 +284,7 @@ describe('user_account', () => {
|
||||
accountsFile(),
|
||||
JSON.stringify({ active: 'test1@google.com', old: [] }),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(1);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(1);
|
||||
});
|
||||
|
||||
it('should correctly count old accounts when active is null', () => {
|
||||
@@ -295,7 +296,7 @@ describe('user_account', () => {
|
||||
old: ['test1@google.com', 'test2@google.com'],
|
||||
}),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(2);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(2);
|
||||
});
|
||||
|
||||
it('should correctly count both active and old accounts', () => {
|
||||
@@ -307,7 +308,7 @@ describe('user_account', () => {
|
||||
old: ['test1@google.com', 'test2@google.com'],
|
||||
}),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(3);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(3);
|
||||
});
|
||||
|
||||
it('should handle valid JSON with incorrect schema by returning 0', () => {
|
||||
@@ -320,7 +321,7 @@ describe('user_account', () => {
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -333,7 +334,7 @@ describe('user_account', () => {
|
||||
old: ['test1@google.com', 'test2@google.com'],
|
||||
}),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(2);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
140
packages/core/src/utils/userAccountManager.ts
Normal file
140
packages/core/src/utils/userAccountManager.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import { promises as fsp, readFileSync } from 'node:fs';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
interface UserAccounts {
|
||||
active: string | null;
|
||||
old: string[];
|
||||
}
|
||||
|
||||
export class UserAccountManager {
|
||||
private getGoogleAccountsCachePath(): string {
|
||||
return Storage.getGoogleAccountsPath();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and validates the string content of an accounts file.
|
||||
* @param content The raw string content from the file.
|
||||
* @returns A valid UserAccounts object.
|
||||
*/
|
||||
private parseAndValidateAccounts(content: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
if (!content.trim()) {
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content);
|
||||
|
||||
// Inlined validation logic
|
||||
if (typeof parsed !== 'object' || parsed === null) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
const { active, old } = parsed as Partial<UserAccounts>;
|
||||
const isValid =
|
||||
(active === undefined || active === null || typeof active === 'string') &&
|
||||
(old === undefined ||
|
||||
(Array.isArray(old) && old.every((i) => typeof i === 'string')));
|
||||
|
||||
if (!isValid) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
return {
|
||||
active: parsed.active ?? null,
|
||||
old: parsed.old ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
private readAccountsSync(filePath: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
return this.parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'code' in error &&
|
||||
error.code === 'ENOENT'
|
||||
) {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Error during sync read of accounts, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
private async readAccounts(filePath: string): Promise<UserAccounts> {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = await fsp.readFile(filePath, 'utf-8');
|
||||
return this.parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'code' in error &&
|
||||
error.code === 'ENOENT'
|
||||
) {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Could not parse accounts file, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
async cacheGoogleAccount(email: string): Promise<void> {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
await fsp.mkdir(path.dirname(filePath), { recursive: true });
|
||||
|
||||
const accounts = await this.readAccounts(filePath);
|
||||
|
||||
if (accounts.active && accounts.active !== email) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
}
|
||||
|
||||
// If the new email was in the old list, remove it
|
||||
accounts.old = accounts.old.filter((oldEmail) => oldEmail !== email);
|
||||
|
||||
accounts.active = email;
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
getCachedGoogleAccount(): string | null {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
const accounts = this.readAccountsSync(filePath);
|
||||
return accounts.active;
|
||||
}
|
||||
|
||||
getLifetimeGoogleAccounts(): number {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
const accounts = this.readAccountsSync(filePath);
|
||||
const allAccounts = new Set(accounts.old);
|
||||
if (accounts.active) {
|
||||
allAccounts.add(accounts.active);
|
||||
}
|
||||
return allAccounts.size;
|
||||
}
|
||||
|
||||
async clearCachedGoogleAccount(): Promise<void> {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
const accounts = await this.readAccounts(filePath);
|
||||
|
||||
if (accounts.active) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
accounts.active = null;
|
||||
}
|
||||
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
}
|
||||
@@ -1,131 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import { promises as fsp, readFileSync } from 'node:fs';
|
||||
import * as os from 'os';
|
||||
import { QWEN_DIR, GOOGLE_ACCOUNTS_FILENAME } from './paths.js';
|
||||
|
||||
interface UserAccounts {
|
||||
active: string | null;
|
||||
old: string[];
|
||||
}
|
||||
|
||||
function getGoogleAccountsCachePath(): string {
|
||||
return path.join(os.homedir(), QWEN_DIR, GOOGLE_ACCOUNTS_FILENAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and validates the string content of an accounts file.
|
||||
* @param content The raw string content from the file.
|
||||
* @returns A valid UserAccounts object.
|
||||
*/
|
||||
function parseAndValidateAccounts(content: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
if (!content.trim()) {
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content);
|
||||
|
||||
// Inlined validation logic
|
||||
if (typeof parsed !== 'object' || parsed === null) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
const { active, old } = parsed as Partial<UserAccounts>;
|
||||
const isValid =
|
||||
(active === undefined || active === null || typeof active === 'string') &&
|
||||
(old === undefined ||
|
||||
(Array.isArray(old) && old.every((i) => typeof i === 'string')));
|
||||
|
||||
if (!isValid) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
return {
|
||||
active: parsed.active ?? null,
|
||||
old: parsed.old ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
function readAccountsSync(filePath: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
return parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Error during sync read of accounts, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
async function readAccounts(filePath: string): Promise<UserAccounts> {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = await fsp.readFile(filePath, 'utf-8');
|
||||
return parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Could not parse accounts file, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
export async function cacheGoogleAccount(email: string): Promise<void> {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
await fsp.mkdir(path.dirname(filePath), { recursive: true });
|
||||
|
||||
const accounts = await readAccounts(filePath);
|
||||
|
||||
if (accounts.active && accounts.active !== email) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
}
|
||||
|
||||
// If the new email was in the old list, remove it
|
||||
accounts.old = accounts.old.filter((oldEmail) => oldEmail !== email);
|
||||
|
||||
accounts.active = email;
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
export function getCachedGoogleAccount(): string | null {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
const accounts = readAccountsSync(filePath);
|
||||
return accounts.active;
|
||||
}
|
||||
|
||||
export function getLifetimeGoogleAccounts(): number {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
const accounts = readAccountsSync(filePath);
|
||||
const allAccounts = new Set(accounts.old);
|
||||
if (accounts.active) {
|
||||
allAccounts.add(accounts.active);
|
||||
}
|
||||
return allAccounts.size;
|
||||
}
|
||||
|
||||
export async function clearCachedGoogleAccount(): Promise<void> {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
const accounts = await readAccounts(filePath);
|
||||
|
||||
if (accounts.active) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
accounts.active = null;
|
||||
}
|
||||
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { getInstallationId } from './user_id.js';
|
||||
|
||||
describe('user_id', () => {
|
||||
describe('getInstallationId', () => {
|
||||
it('should return a valid UUID format string', () => {
|
||||
const installationId = getInstallationId();
|
||||
|
||||
expect(installationId).toBeDefined();
|
||||
expect(typeof installationId).toBe('string');
|
||||
expect(installationId.length).toBeGreaterThan(0);
|
||||
|
||||
// Should return the same ID on subsequent calls (consistent)
|
||||
const secondCall = getInstallationId();
|
||||
expect(secondCall).toBe(installationId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,58 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { QWEN_DIR } from './paths.js';
|
||||
|
||||
const homeDir = os.homedir() ?? '';
|
||||
const geminiDir = path.join(homeDir, QWEN_DIR);
|
||||
const installationIdFile = path.join(geminiDir, 'installation_id');
|
||||
|
||||
function ensureGeminiDirExists() {
|
||||
if (!fs.existsSync(geminiDir)) {
|
||||
fs.mkdirSync(geminiDir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
function readInstallationIdFromFile(): string | null {
|
||||
if (fs.existsSync(installationIdFile)) {
|
||||
const installationid = fs.readFileSync(installationIdFile, 'utf-8').trim();
|
||||
return installationid || null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function writeInstallationIdToFile(installationId: string) {
|
||||
fs.writeFileSync(installationIdFile, installationId, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the installation ID from a file, creating it if it doesn't exist.
|
||||
* This ID is used for unique user installation tracking.
|
||||
* @returns A UUID string for the user.
|
||||
*/
|
||||
export function getInstallationId(): string {
|
||||
try {
|
||||
ensureGeminiDirExists();
|
||||
let installationId = readInstallationIdFromFile();
|
||||
|
||||
if (!installationId) {
|
||||
installationId = randomUUID();
|
||||
writeInstallationIdToFile(installationId);
|
||||
}
|
||||
|
||||
return installationId;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Error accessing installation ID file, generating ephemeral ID:',
|
||||
error,
|
||||
);
|
||||
return '123456789';
|
||||
}
|
||||
}
|
||||
@@ -5,9 +5,9 @@
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import { WorkspaceContext } from './workspaceContext.js';
|
||||
|
||||
describe('WorkspaceContext with real filesystem', () => {
|
||||
@@ -48,13 +48,6 @@ describe('WorkspaceContext with real filesystem', () => {
|
||||
expect(directories).toEqual([cwd, otherDir]);
|
||||
});
|
||||
|
||||
it('should reject non-existent directories', () => {
|
||||
const nonExistentDir = path.join(tempDir, 'does-not-exist');
|
||||
expect(() => {
|
||||
new WorkspaceContext(cwd, [nonExistentDir]);
|
||||
}).toThrow('Directory does not exist');
|
||||
});
|
||||
|
||||
it('should handle empty initialization', () => {
|
||||
const workspaceContext = new WorkspaceContext(cwd, []);
|
||||
const directories = workspaceContext.getDirectories();
|
||||
@@ -81,15 +74,6 @@ describe('WorkspaceContext with real filesystem', () => {
|
||||
expect(directories).toEqual([cwd, otherDir]);
|
||||
});
|
||||
|
||||
it('should reject non-existent directories', () => {
|
||||
const nonExistentDir = path.join(tempDir, 'does-not-exist');
|
||||
const workspaceContext = new WorkspaceContext(cwd);
|
||||
|
||||
expect(() => {
|
||||
workspaceContext.addDirectory(nonExistentDir);
|
||||
}).toThrow('Directory does not exist');
|
||||
});
|
||||
|
||||
it('should prevent duplicate directories', () => {
|
||||
const workspaceContext = new WorkspaceContext(cwd);
|
||||
workspaceContext.addDirectory(otherDir);
|
||||
@@ -387,3 +371,52 @@ describe('WorkspaceContext with real filesystem', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('WorkspaceContext with optional directories', () => {
|
||||
let tempDir: string;
|
||||
let cwd: string;
|
||||
let existingDir1: string;
|
||||
let existingDir2: string;
|
||||
let nonExistentDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = fs.realpathSync(
|
||||
fs.mkdtempSync(path.join(os.tmpdir(), 'workspace-context-optional-')),
|
||||
);
|
||||
cwd = path.join(tempDir, 'project');
|
||||
existingDir1 = path.join(tempDir, 'existing-dir-1');
|
||||
existingDir2 = path.join(tempDir, 'existing-dir-2');
|
||||
nonExistentDir = path.join(tempDir, 'non-existent-dir');
|
||||
|
||||
fs.mkdirSync(cwd, { recursive: true });
|
||||
fs.mkdirSync(existingDir1, { recursive: true });
|
||||
fs.mkdirSync(existingDir2, { recursive: true });
|
||||
|
||||
vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should skip a missing optional directory and log a warning', () => {
|
||||
const workspaceContext = new WorkspaceContext(cwd, [
|
||||
nonExistentDir,
|
||||
existingDir1,
|
||||
]);
|
||||
const directories = workspaceContext.getDirectories();
|
||||
expect(directories).toEqual([cwd, existingDir1]);
|
||||
expect(console.warn).toHaveBeenCalledTimes(1);
|
||||
expect(console.warn).toHaveBeenCalledWith(
|
||||
`[WARN] Skipping unreadable directory: ${nonExistentDir} (Directory does not exist: ${nonExistentDir})`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should include an existing optional directory', () => {
|
||||
const workspaceContext = new WorkspaceContext(cwd, [existingDir1]);
|
||||
const directories = workspaceContext.getDirectories();
|
||||
expect(directories).toEqual([cwd, existingDir1]);
|
||||
expect(console.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
22
packages/core/src/utils/workspaceContext.ts
Normal file → Executable file
22
packages/core/src/utils/workspaceContext.ts
Normal file → Executable file
@@ -5,8 +5,9 @@
|
||||
*/
|
||||
|
||||
import { isNodeError } from '../utils/errors.js';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as process from 'node:process';
|
||||
|
||||
export type Unsubscribe = () => void;
|
||||
|
||||
@@ -30,7 +31,6 @@ export class WorkspaceContext {
|
||||
for (const additionalDirectory of additionalDirectories) {
|
||||
this.addDirectory(additionalDirectory);
|
||||
}
|
||||
|
||||
this.initialDirectories = new Set(this.directories);
|
||||
}
|
||||
|
||||
@@ -64,12 +64,18 @@ export class WorkspaceContext {
|
||||
* @param basePath Optional base path for resolving relative paths (defaults to cwd)
|
||||
*/
|
||||
addDirectory(directory: string, basePath: string = process.cwd()): void {
|
||||
const resolved = this.resolveAndValidateDir(directory, basePath);
|
||||
if (this.directories.has(resolved)) {
|
||||
return;
|
||||
try {
|
||||
const resolved = this.resolveAndValidateDir(directory, basePath);
|
||||
if (this.directories.has(resolved)) {
|
||||
return;
|
||||
}
|
||||
this.directories.add(resolved);
|
||||
this.notifyDirectoriesChanged();
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
`[WARN] Skipping unreadable directory: ${directory} (${err instanceof Error ? err.message : String(err)})`,
|
||||
);
|
||||
}
|
||||
this.directories.add(resolved);
|
||||
this.notifyDirectoriesChanged();
|
||||
}
|
||||
|
||||
private resolveAndValidateDir(
|
||||
|
||||
Reference in New Issue
Block a user