chore: sync gemini-cli v0.1.19

This commit is contained in:
tanzhenxin
2025-08-18 19:55:46 +08:00
244 changed files with 19407 additions and 5030 deletions

View File

@@ -26,6 +26,17 @@ describe('CrawlCache', () => {
const key2 = getCacheKey('/foo', 'baz');
expect(key1).not.toBe(key2);
});
it('should generate a different hash for different maxDepth values', () => {
const key1 = getCacheKey('/foo', 'bar', 1);
const key2 = getCacheKey('/foo', 'bar', 2);
const key3 = getCacheKey('/foo', 'bar', undefined);
const key4 = getCacheKey('/foo', 'bar');
expect(key1).not.toBe(key2);
expect(key1).not.toBe(key3);
expect(key2).not.toBe(key3);
expect(key3).toBe(key4);
});
});
describe('in-memory cache operations', () => {

View File

@@ -17,10 +17,14 @@ const cacheTimers = new Map<string, NodeJS.Timeout>();
export const getCacheKey = (
directory: string,
ignoreContent: string,
maxDepth?: number,
): string => {
const hash = crypto.createHash('sha256');
hash.update(directory);
hash.update(ignoreContent);
if (maxDepth !== undefined) {
hash.update(String(maxDepth));
}
return hash.digest('hex');
};

View File

@@ -290,6 +290,30 @@ describe('FileSearch', () => {
expect(results).toEqual(['src/file1.js', 'src/file2.js']); // Assuming alphabetical sort
});
it('should use fzf for fuzzy matching when pattern does not contain wildcards', async () => {
tmpDir = await createTmpDir({
src: {
'main.js': '',
'util.ts': '',
'style.css': '',
},
});
const fileSearch = new FileSearch({
projectRoot: tmpDir,
useGitignore: false,
useGeminiignore: false,
ignoreDirs: [],
cache: false,
cacheTtl: 0,
});
await fileSearch.initialize();
const results = await fileSearch.search('sst');
expect(results).toEqual(['src/style.css']);
});
it('should return empty array when no matches are found', async () => {
tmpDir = await createTmpDir({
src: ['file1.js'],
@@ -446,6 +470,46 @@ describe('FileSearch', () => {
expect(crawlSpy).toHaveBeenCalledTimes(1);
});
it('should miss the cache when maxDepth changes', async () => {
tmpDir = await createTmpDir({ 'file1.js': '' });
const getOptions = (maxDepth?: number) => ({
projectRoot: tmpDir,
useGitignore: false,
useGeminiignore: false,
ignoreDirs: [],
cache: true,
cacheTtl: 10000,
maxDepth,
});
// 1. First search with maxDepth: 1, should trigger a crawl.
const fs1 = new FileSearch(getOptions(1));
const crawlSpy1 = vi.spyOn(
fs1 as FileSearchWithPrivateMethods,
'performCrawl',
);
await fs1.initialize();
expect(crawlSpy1).toHaveBeenCalledTimes(1);
// 2. Second search with maxDepth: 2, should be a cache miss and trigger a crawl.
const fs2 = new FileSearch(getOptions(2));
const crawlSpy2 = vi.spyOn(
fs2 as FileSearchWithPrivateMethods,
'performCrawl',
);
await fs2.initialize();
expect(crawlSpy2).toHaveBeenCalledTimes(1);
// 3. Third search with maxDepth: 1 again, should be a cache hit.
const fs3 = new FileSearch(getOptions(1));
const crawlSpy3 = vi.spyOn(
fs3 as FileSearchWithPrivateMethods,
'performCrawl',
);
await fs3.initialize();
expect(crawlSpy3).not.toHaveBeenCalled();
});
});
it('should handle empty or commented-only ignore files', async () => {
@@ -639,4 +703,109 @@ describe('FileSearch', () => {
// 3. Assert that the maxResults limit was respected, even with a cache hit.
expect(limitedResults).toEqual(['file1.js', 'file2.js']);
});
describe('with maxDepth', () => {
beforeEach(async () => {
tmpDir = await createTmpDir({
'file-root.txt': '',
level1: {
'file-level1.txt': '',
level2: {
'file-level2.txt': '',
level3: {
'file-level3.txt': '',
},
},
},
});
});
it('should only search top-level files when maxDepth is 0', async () => {
const fileSearch = new FileSearch({
projectRoot: tmpDir,
useGitignore: false,
useGeminiignore: false,
ignoreDirs: [],
cache: false,
cacheTtl: 0,
maxDepth: 0,
});
await fileSearch.initialize();
const results = await fileSearch.search('');
expect(results).toEqual(['level1/', 'file-root.txt']);
});
it('should search one level deep when maxDepth is 1', async () => {
const fileSearch = new FileSearch({
projectRoot: tmpDir,
useGitignore: false,
useGeminiignore: false,
ignoreDirs: [],
cache: false,
cacheTtl: 0,
maxDepth: 1,
});
await fileSearch.initialize();
const results = await fileSearch.search('');
expect(results).toEqual([
'level1/',
'level1/level2/',
'file-root.txt',
'level1/file-level1.txt',
]);
});
it('should search two levels deep when maxDepth is 2', async () => {
const fileSearch = new FileSearch({
projectRoot: tmpDir,
useGitignore: false,
useGeminiignore: false,
ignoreDirs: [],
cache: false,
cacheTtl: 0,
maxDepth: 2,
});
await fileSearch.initialize();
const results = await fileSearch.search('');
expect(results).toEqual([
'level1/',
'level1/level2/',
'level1/level2/level3/',
'file-root.txt',
'level1/file-level1.txt',
'level1/level2/file-level2.txt',
]);
});
it('should perform a full recursive search when maxDepth is undefined', async () => {
const fileSearch = new FileSearch({
projectRoot: tmpDir,
useGitignore: false,
useGeminiignore: false,
ignoreDirs: [],
cache: false,
cacheTtl: 0,
maxDepth: undefined, // Explicitly undefined
});
await fileSearch.initialize();
const results = await fileSearch.search('');
expect(results).toEqual([
'level1/',
'level1/level2/',
'level1/level2/level3/',
'file-root.txt',
'level1/file-level1.txt',
'level1/level2/file-level2.txt',
'level1/level2/level3/file-level3.txt',
]);
});
});
});

View File

@@ -11,6 +11,7 @@ import picomatch from 'picomatch';
import { Ignore } from './ignore.js';
import { ResultCache } from './result-cache.js';
import * as cache from './crawlCache.js';
import { AsyncFzf, FzfResultItem } from 'fzf';
export type FileSearchOptions = {
projectRoot: string;
@@ -19,6 +20,7 @@ export type FileSearchOptions = {
useGeminiignore: boolean;
cache: boolean;
cacheTtl: number;
maxDepth?: number;
};
export class AbortError extends Error {
@@ -91,6 +93,7 @@ export class FileSearch {
private readonly ignore: Ignore = new Ignore();
private resultCache: ResultCache | undefined;
private allFiles: string[] = [];
private fzf: AsyncFzf<string[]> | undefined;
/**
* Constructs a new `FileSearch` instance.
@@ -122,22 +125,38 @@ export class FileSearch {
pattern: string,
options: SearchOptions = {},
): Promise<string[]> {
if (!this.resultCache) {
if (!this.resultCache || !this.fzf) {
throw new Error('Engine not initialized. Call initialize() first.');
}
pattern = pattern || '*';
let filteredCandidates;
const { files: candidates, isExactMatch } =
await this.resultCache!.get(pattern);
let filteredCandidates;
if (isExactMatch) {
// Use the cached result.
filteredCandidates = candidates;
} else {
// Apply the user's picomatch pattern filter
filteredCandidates = await filter(candidates, pattern, options.signal);
this.resultCache!.set(pattern, filteredCandidates);
let shouldCache = true;
if (pattern.includes('*')) {
filteredCandidates = await filter(candidates, pattern, options.signal);
} else {
filteredCandidates = await this.fzf
.find(pattern)
.then((results: Array<FzfResultItem<string>>) =>
results.map((entry: FzfResultItem<string>) => entry.item),
)
.catch(() => {
shouldCache = false;
return [];
});
}
if (shouldCache) {
this.resultCache!.set(pattern, filteredCandidates);
}
}
// Trade-off: We apply a two-stage filtering process.
@@ -215,6 +234,7 @@ export class FileSearch {
const cacheKey = cache.getCacheKey(
this.absoluteDir,
this.ignore.getFingerprint(),
this.options.maxDepth,
);
const cachedResults = cache.read(cacheKey);
@@ -230,6 +250,7 @@ export class FileSearch {
const cacheKey = cache.getCacheKey(
this.absoluteDir,
this.ignore.getFingerprint(),
this.options.maxDepth,
);
cache.write(cacheKey, this.allFiles, this.options.cacheTtl * 1000);
}
@@ -257,6 +278,10 @@ export class FileSearch {
return dirFilter(`${relativePath}/`);
});
if (this.options.maxDepth !== undefined) {
api.withMaxDepth(this.options.maxDepth);
}
return api.crawl(this.absoluteDir).withPromise();
}
@@ -265,5 +290,11 @@ export class FileSearch {
*/
private buildResultCache(): void {
this.resultCache = new ResultCache(this.allFiles, this.absoluteDir);
// The v1 algorithm is much faster since it only looks at the first
// occurence of the pattern. We use it for search spaces that have >20k
// files, because the v2 algorithm is just too slow in those cases.
this.fzf = new AsyncFzf(this.allFiles, {
fuzzy: this.allFiles.length > 20000 ? 'v1' : 'v2',
});
}
}