mirror of
https://github.com/QwenLM/qwen-code.git
synced 2025-12-20 16:57:46 +00:00
feat(commands): Enable @file processing in TOML commands (#6716)
This commit is contained in:
@@ -6,7 +6,6 @@
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
getResponseText,
|
||||
getResponseTextFromParts,
|
||||
getFunctionCalls,
|
||||
getFunctionCallsFromParts,
|
||||
@@ -69,45 +68,6 @@ const minimalMockResponse = (
|
||||
});
|
||||
|
||||
describe('generateContentResponseUtilities', () => {
|
||||
describe('getResponseText', () => {
|
||||
it('should return undefined for no candidates', () => {
|
||||
expect(getResponseText(minimalMockResponse(undefined))).toBeUndefined();
|
||||
});
|
||||
it('should return undefined for empty candidates array', () => {
|
||||
expect(getResponseText(minimalMockResponse([]))).toBeUndefined();
|
||||
});
|
||||
it('should return undefined for no parts', () => {
|
||||
const response = mockResponse([]);
|
||||
expect(getResponseText(response)).toBeUndefined();
|
||||
});
|
||||
it('should extract text from a single text part', () => {
|
||||
const response = mockResponse([mockTextPart('Hello')]);
|
||||
expect(getResponseText(response)).toBe('Hello');
|
||||
});
|
||||
it('should concatenate text from multiple text parts', () => {
|
||||
const response = mockResponse([
|
||||
mockTextPart('Hello '),
|
||||
mockTextPart('World'),
|
||||
]);
|
||||
expect(getResponseText(response)).toBe('Hello World');
|
||||
});
|
||||
it('should ignore function call parts', () => {
|
||||
const response = mockResponse([
|
||||
mockTextPart('Hello '),
|
||||
mockFunctionCallPart('testFunc'),
|
||||
mockTextPart('World'),
|
||||
]);
|
||||
expect(getResponseText(response)).toBe('Hello World');
|
||||
});
|
||||
it('should return undefined if only function call parts exist', () => {
|
||||
const response = mockResponse([
|
||||
mockFunctionCallPart('testFunc'),
|
||||
mockFunctionCallPart('anotherFunc'),
|
||||
]);
|
||||
expect(getResponseText(response)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getResponseTextFromParts', () => {
|
||||
it('should return undefined for no parts', () => {
|
||||
expect(getResponseTextFromParts([])).toBeUndefined();
|
||||
|
||||
@@ -9,23 +9,7 @@ import type {
|
||||
Part,
|
||||
FunctionCall,
|
||||
} from '@google/genai';
|
||||
|
||||
export function getResponseText(
|
||||
response: GenerateContentResponse,
|
||||
): string | undefined {
|
||||
const parts = response.candidates?.[0]?.content?.parts;
|
||||
if (!parts) {
|
||||
return undefined;
|
||||
}
|
||||
const textSegments = parts
|
||||
.map((part) => part.text)
|
||||
.filter((text): text is string => typeof text === 'string');
|
||||
|
||||
if (textSegments.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return textSegments.join('');
|
||||
}
|
||||
import { getResponseText } from './partUtils.js';
|
||||
|
||||
export function getResponseTextFromParts(parts: Part[]): string | undefined {
|
||||
if (!parts) {
|
||||
|
||||
@@ -5,8 +5,13 @@
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { partToString, getResponseText } from './partUtils.js';
|
||||
import type { GenerateContentResponse, Part } from '@google/genai';
|
||||
import {
|
||||
partToString,
|
||||
getResponseText,
|
||||
flatMapTextParts,
|
||||
appendToLastTextPart,
|
||||
} from './partUtils.js';
|
||||
import type { GenerateContentResponse, Part, PartUnion } from '@google/genai';
|
||||
|
||||
const mockResponse = (
|
||||
parts?: Array<{ text?: string; functionCall?: unknown }>,
|
||||
@@ -162,5 +167,135 @@ describe('partUtils', () => {
|
||||
const result = mockResponse([]);
|
||||
expect(getResponseText(result)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null if the first candidate has no content property', () => {
|
||||
const response: GenerateContentResponse = {
|
||||
candidates: [
|
||||
{
|
||||
index: 0,
|
||||
},
|
||||
],
|
||||
promptFeedback: { safetyRatings: [] },
|
||||
text: undefined,
|
||||
data: undefined,
|
||||
functionCalls: undefined,
|
||||
executableCode: undefined,
|
||||
codeExecutionResult: undefined,
|
||||
};
|
||||
expect(getResponseText(response)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('flatMapTextParts', () => {
|
||||
// A simple async transform function that splits a string into character parts.
|
||||
const splitCharsTransform = async (text: string): Promise<PartUnion[]> =>
|
||||
text.split('').map((char) => ({ text: char }));
|
||||
|
||||
it('should return an empty array for empty input', async () => {
|
||||
const result = await flatMapTextParts([], splitCharsTransform);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should transform a simple string input', async () => {
|
||||
const result = await flatMapTextParts('hi', splitCharsTransform);
|
||||
expect(result).toEqual([{ text: 'h' }, { text: 'i' }]);
|
||||
});
|
||||
|
||||
it('should transform a single text part object', async () => {
|
||||
const result = await flatMapTextParts(
|
||||
{ text: 'cat' },
|
||||
splitCharsTransform,
|
||||
);
|
||||
expect(result).toEqual([{ text: 'c' }, { text: 'a' }, { text: 't' }]);
|
||||
});
|
||||
|
||||
it('should transform an array of text parts and flatten the result', async () => {
|
||||
// A transform that duplicates the text to test the "flatMap" behavior.
|
||||
const duplicateTransform = async (text: string): Promise<PartUnion[]> => [
|
||||
{ text: `${text}` },
|
||||
{ text: `${text}` },
|
||||
];
|
||||
const parts = [{ text: 'a' }, { text: 'b' }];
|
||||
const result = await flatMapTextParts(parts, duplicateTransform);
|
||||
expect(result).toEqual([
|
||||
{ text: 'a' },
|
||||
{ text: 'a' },
|
||||
{ text: 'b' },
|
||||
{ text: 'b' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should pass through non-text parts unmodified', async () => {
|
||||
const nonTextPart: Part = { functionCall: { name: 'do_stuff' } };
|
||||
const result = await flatMapTextParts(nonTextPart, splitCharsTransform);
|
||||
expect(result).toEqual([nonTextPart]);
|
||||
});
|
||||
|
||||
it('should handle a mix of text and non-text parts in an array', async () => {
|
||||
const nonTextPart: Part = {
|
||||
inlineData: { mimeType: 'image/jpeg', data: '' },
|
||||
};
|
||||
const parts: PartUnion[] = [{ text: 'go' }, nonTextPart, ' stop'];
|
||||
const result = await flatMapTextParts(parts, splitCharsTransform);
|
||||
expect(result).toEqual([
|
||||
{ text: 'g' },
|
||||
{ text: 'o' },
|
||||
nonTextPart, // Should be passed through
|
||||
{ text: ' ' },
|
||||
{ text: 's' },
|
||||
{ text: 't' },
|
||||
{ text: 'o' },
|
||||
{ text: 'p' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle a transform that returns an empty array', async () => {
|
||||
const removeTransform = async (_text: string): Promise<PartUnion[]> => [];
|
||||
const parts: PartUnion[] = [
|
||||
{ text: 'remove' },
|
||||
{ functionCall: { name: 'keep' } },
|
||||
];
|
||||
const result = await flatMapTextParts(parts, removeTransform);
|
||||
expect(result).toEqual([{ functionCall: { name: 'keep' } }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('appendToLastTextPart', () => {
|
||||
it('should append to an empty prompt', () => {
|
||||
const prompt: PartUnion[] = [];
|
||||
const result = appendToLastTextPart(prompt, 'new text');
|
||||
expect(result).toEqual([{ text: 'new text' }]);
|
||||
});
|
||||
|
||||
it('should append to a prompt with a string as the last part', () => {
|
||||
const prompt: PartUnion[] = ['first part'];
|
||||
const result = appendToLastTextPart(prompt, 'new text');
|
||||
expect(result).toEqual(['first part\n\nnew text']);
|
||||
});
|
||||
|
||||
it('should append to a prompt with a text part object as the last part', () => {
|
||||
const prompt: PartUnion[] = [{ text: 'first part' }];
|
||||
const result = appendToLastTextPart(prompt, 'new text');
|
||||
expect(result).toEqual([{ text: 'first part\n\nnew text' }]);
|
||||
});
|
||||
|
||||
it('should append a new text part if the last part is not a text part', () => {
|
||||
const nonTextPart: Part = { functionCall: { name: 'do_stuff' } };
|
||||
const prompt: PartUnion[] = [nonTextPart];
|
||||
const result = appendToLastTextPart(prompt, 'new text');
|
||||
expect(result).toEqual([nonTextPart, { text: '\n\nnew text' }]);
|
||||
});
|
||||
|
||||
it('should not append anything if the text to append is empty', () => {
|
||||
const prompt: PartUnion[] = ['first part'];
|
||||
const result = appendToLastTextPart(prompt, '');
|
||||
expect(result).toEqual(['first part']);
|
||||
});
|
||||
|
||||
it('should use a custom separator', () => {
|
||||
const prompt: PartUnion[] = ['first part'];
|
||||
const result = appendToLastTextPart(prompt, 'new text', '---');
|
||||
expect(result).toEqual(['first part---new text']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,6 +8,7 @@ import type {
|
||||
GenerateContentResponse,
|
||||
PartListUnion,
|
||||
Part,
|
||||
PartUnion,
|
||||
} from '@google/genai';
|
||||
|
||||
/**
|
||||
@@ -87,3 +88,82 @@ export function getResponseText(
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously maps over a PartListUnion, applying a transformation function
|
||||
* to the text content of each text-based part.
|
||||
*
|
||||
* @param parts The PartListUnion to process.
|
||||
* @param transform A function that takes a string of text and returns a Promise
|
||||
* resolving to an array of new PartUnions.
|
||||
* @returns A Promise that resolves to a new array of PartUnions with the
|
||||
* transformations applied.
|
||||
*/
|
||||
export async function flatMapTextParts(
|
||||
parts: PartListUnion,
|
||||
transform: (text: string) => Promise<PartUnion[]>,
|
||||
): Promise<PartUnion[]> {
|
||||
const result: PartUnion[] = [];
|
||||
const partArray = Array.isArray(parts)
|
||||
? parts
|
||||
: typeof parts === 'string'
|
||||
? [{ text: parts }]
|
||||
: [parts];
|
||||
|
||||
for (const part of partArray) {
|
||||
let textToProcess: string | undefined;
|
||||
if (typeof part === 'string') {
|
||||
textToProcess = part;
|
||||
} else if ('text' in part) {
|
||||
textToProcess = part.text;
|
||||
}
|
||||
|
||||
if (textToProcess !== undefined) {
|
||||
const transformedParts = await transform(textToProcess);
|
||||
result.push(...transformedParts);
|
||||
} else {
|
||||
// Pass through non-text parts unmodified.
|
||||
result.push(part);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends a string of text to the last text part of a prompt, or adds a new
|
||||
* text part if the last part is not a text part.
|
||||
*
|
||||
* @param prompt The prompt to modify.
|
||||
* @param textToAppend The text to append to the prompt.
|
||||
* @param separator The separator to add between existing text and the new text.
|
||||
* @returns The modified prompt.
|
||||
*/
|
||||
export function appendToLastTextPart(
|
||||
prompt: PartUnion[],
|
||||
textToAppend: string,
|
||||
separator = '\n\n',
|
||||
): PartUnion[] {
|
||||
if (!textToAppend) {
|
||||
return prompt;
|
||||
}
|
||||
|
||||
if (prompt.length === 0) {
|
||||
return [{ text: textToAppend }];
|
||||
}
|
||||
|
||||
const newPrompt = [...prompt];
|
||||
const lastPart = newPrompt.at(-1);
|
||||
|
||||
if (typeof lastPart === 'string') {
|
||||
newPrompt[newPrompt.length - 1] = `${lastPart}${separator}${textToAppend}`;
|
||||
} else if (lastPart && 'text' in lastPart) {
|
||||
newPrompt[newPrompt.length - 1] = {
|
||||
...lastPart,
|
||||
text: `${lastPart.text}${separator}${textToAppend}`,
|
||||
};
|
||||
} else {
|
||||
newPrompt.push({ text: `${separator}${textToAppend}` });
|
||||
}
|
||||
|
||||
return newPrompt;
|
||||
}
|
||||
|
||||
407
packages/core/src/utils/pathReader.test.ts
Normal file
407
packages/core/src/utils/pathReader.test.ts
Normal file
@@ -0,0 +1,407 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, afterEach, vi } from 'vitest';
|
||||
import mock from 'mock-fs';
|
||||
import * as path from 'node:path';
|
||||
import { WorkspaceContext } from './workspaceContext.js';
|
||||
import { readPathFromWorkspace } from './pathReader.js';
|
||||
import type { Config } from '../config/config.js';
|
||||
import { StandardFileSystemService } from '../services/fileSystemService.js';
|
||||
import type { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
|
||||
// --- Helper for creating a mock Config object ---
|
||||
// We use the actual implementations of WorkspaceContext and FileSystemService
|
||||
// to test the integration against mock-fs.
|
||||
const createMockConfig = (
|
||||
cwd: string,
|
||||
otherDirs: string[] = [],
|
||||
mockFileService?: FileDiscoveryService,
|
||||
): Config => {
|
||||
const workspace = new WorkspaceContext(cwd, otherDirs);
|
||||
const fileSystemService = new StandardFileSystemService();
|
||||
return {
|
||||
getWorkspaceContext: () => workspace,
|
||||
// TargetDir is used by processSingleFileContent to generate relative paths in errors/output
|
||||
getTargetDir: () => cwd,
|
||||
getFileSystemService: () => fileSystemService,
|
||||
getFileService: () => mockFileService,
|
||||
} as unknown as Config;
|
||||
};
|
||||
|
||||
describe('readPathFromWorkspace', () => {
|
||||
const CWD = path.resolve('/test/cwd');
|
||||
const OTHER_DIR = path.resolve('/test/other');
|
||||
const OUTSIDE_DIR = path.resolve('/test/outside');
|
||||
|
||||
afterEach(() => {
|
||||
mock.restore();
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
it('should read a text file from the CWD', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'file.txt': 'hello from cwd',
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('file.txt', config);
|
||||
// Expect [string] for text content
|
||||
expect(result).toEqual(['hello from cwd']);
|
||||
expect(mockFileService.filterFiles).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should read a file from a secondary workspace directory', async () => {
|
||||
mock({
|
||||
[CWD]: {},
|
||||
[OTHER_DIR]: {
|
||||
'file.txt': 'hello from other dir',
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [OTHER_DIR], mockFileService);
|
||||
const result = await readPathFromWorkspace('file.txt', config);
|
||||
expect(result).toEqual(['hello from other dir']);
|
||||
});
|
||||
|
||||
it('should prioritize CWD when file exists in both CWD and secondary dir', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'file.txt': 'hello from cwd',
|
||||
},
|
||||
[OTHER_DIR]: {
|
||||
'file.txt': 'hello from other dir',
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [OTHER_DIR], mockFileService);
|
||||
const result = await readPathFromWorkspace('file.txt', config);
|
||||
expect(result).toEqual(['hello from cwd']);
|
||||
});
|
||||
|
||||
it('should read an image file and return it as inlineData (Part object)', async () => {
|
||||
// Use a real PNG header for robustness
|
||||
const imageData = Buffer.from([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a,
|
||||
]);
|
||||
mock({
|
||||
[CWD]: {
|
||||
'image.png': imageData,
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('image.png', config);
|
||||
// Expect [Part] for image content
|
||||
expect(result).toEqual([
|
||||
{
|
||||
inlineData: {
|
||||
mimeType: 'image/png',
|
||||
data: imageData.toString('base64'),
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should read a generic binary file and return an info string', async () => {
|
||||
// Data that is clearly binary (null bytes)
|
||||
const binaryData = Buffer.from([0x00, 0x01, 0x02, 0x03]);
|
||||
mock({
|
||||
[CWD]: {
|
||||
'data.bin': binaryData,
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('data.bin', config);
|
||||
// Expect [string] containing the skip message from fileUtils
|
||||
expect(result).toEqual(['Cannot display content of binary file: data.bin']);
|
||||
});
|
||||
|
||||
it('should read a file from an absolute path if within workspace', async () => {
|
||||
const absPath = path.join(OTHER_DIR, 'abs.txt');
|
||||
mock({
|
||||
[CWD]: {},
|
||||
[OTHER_DIR]: {
|
||||
'abs.txt': 'absolute content',
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [OTHER_DIR], mockFileService);
|
||||
const result = await readPathFromWorkspace(absPath, config);
|
||||
expect(result).toEqual(['absolute content']);
|
||||
});
|
||||
|
||||
describe('Directory Expansion', () => {
|
||||
it('should expand a directory and read the content of its files', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'my-dir': {
|
||||
'file1.txt': 'content of file 1',
|
||||
'file2.md': 'content of file 2',
|
||||
},
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('my-dir', config);
|
||||
|
||||
// Convert to a single string for easier, order-independent checking
|
||||
const resultText = result
|
||||
.map((p) => {
|
||||
if (typeof p === 'string') return p;
|
||||
if (typeof p === 'object' && p && 'text' in p) return p.text;
|
||||
// This part is important for handling binary/image data which isn't just text
|
||||
if (typeof p === 'object' && p && 'inlineData' in p) return '';
|
||||
return p;
|
||||
})
|
||||
.join('');
|
||||
|
||||
expect(resultText).toContain(
|
||||
'--- Start of content for directory: my-dir ---',
|
||||
);
|
||||
expect(resultText).toContain('--- file1.txt ---');
|
||||
expect(resultText).toContain('content of file 1');
|
||||
expect(resultText).toContain('--- file2.md ---');
|
||||
expect(resultText).toContain('content of file 2');
|
||||
expect(resultText).toContain(
|
||||
'--- End of content for directory: my-dir ---',
|
||||
);
|
||||
});
|
||||
|
||||
it('should recursively expand a directory and read all nested files', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'my-dir': {
|
||||
'file1.txt': 'content of file 1',
|
||||
'sub-dir': {
|
||||
'nested.txt': 'nested content',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('my-dir', config);
|
||||
|
||||
const resultText = result
|
||||
.map((p) => {
|
||||
if (typeof p === 'string') return p;
|
||||
if (typeof p === 'object' && p && 'text' in p) return p.text;
|
||||
return '';
|
||||
})
|
||||
.join('');
|
||||
|
||||
expect(resultText).toContain('content of file 1');
|
||||
expect(resultText).toContain('nested content');
|
||||
expect(resultText).toContain(
|
||||
`--- ${path.join('sub-dir', 'nested.txt')} ---`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mixed content and include files from subdirectories', async () => {
|
||||
const imageData = Buffer.from([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a,
|
||||
]);
|
||||
mock({
|
||||
[CWD]: {
|
||||
'mixed-dir': {
|
||||
'info.txt': 'some text',
|
||||
'photo.png': imageData,
|
||||
'sub-dir': {
|
||||
'nested.txt': 'this should be included',
|
||||
},
|
||||
'empty-sub-dir': {},
|
||||
},
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('mixed-dir', config);
|
||||
|
||||
// Check for the text part
|
||||
const textContent = result
|
||||
.map((p) => {
|
||||
if (typeof p === 'string') return p;
|
||||
if (typeof p === 'object' && p && 'text' in p) return p.text;
|
||||
return ''; // Ignore non-text parts for this assertion
|
||||
})
|
||||
.join('');
|
||||
expect(textContent).toContain('some text');
|
||||
expect(textContent).toContain('this should be included');
|
||||
|
||||
// Check for the image part
|
||||
const imagePart = result.find(
|
||||
(p) => typeof p === 'object' && 'inlineData' in p,
|
||||
);
|
||||
expect(imagePart).toEqual({
|
||||
inlineData: {
|
||||
mimeType: 'image/png',
|
||||
data: imageData.toString('base64'),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle an empty directory', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'empty-dir': {},
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('empty-dir', config);
|
||||
expect(result).toEqual([
|
||||
{ text: '--- Start of content for directory: empty-dir ---\n' },
|
||||
{ text: '--- End of content for directory: empty-dir ---' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('File Ignoring', () => {
|
||||
it('should return an empty array for an ignored file', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'ignored.txt': 'ignored content',
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn(() => []), // Simulate the file being filtered out
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('ignored.txt', config);
|
||||
expect(result).toEqual([]);
|
||||
expect(mockFileService.filterFiles).toHaveBeenCalledWith(
|
||||
['ignored.txt'],
|
||||
{
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should not read ignored files when expanding a directory', async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'my-dir': {
|
||||
'not-ignored.txt': 'visible',
|
||||
'ignored.log': 'invisible',
|
||||
},
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files: string[]) =>
|
||||
files.filter((f) => !f.endsWith('ignored.log')),
|
||||
),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('my-dir', config);
|
||||
const resultText = result
|
||||
.map((p) => {
|
||||
if (typeof p === 'string') return p;
|
||||
if (typeof p === 'object' && p && 'text' in p) return p.text;
|
||||
return '';
|
||||
})
|
||||
.join('');
|
||||
|
||||
expect(resultText).toContain('visible');
|
||||
expect(resultText).not.toContain('invisible');
|
||||
expect(mockFileService.filterFiles).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error for an absolute path outside the workspace', async () => {
|
||||
const absPath = path.join(OUTSIDE_DIR, 'secret.txt');
|
||||
mock({
|
||||
[CWD]: {},
|
||||
[OUTSIDE_DIR]: {
|
||||
'secret.txt': 'secrets',
|
||||
},
|
||||
});
|
||||
// OUTSIDE_DIR is not added to the config's workspace
|
||||
const config = createMockConfig(CWD);
|
||||
await expect(readPathFromWorkspace(absPath, config)).rejects.toThrow(
|
||||
`Absolute path is outside of the allowed workspace: ${absPath}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if a relative path is not found anywhere', async () => {
|
||||
mock({
|
||||
[CWD]: {},
|
||||
[OTHER_DIR]: {},
|
||||
});
|
||||
const config = createMockConfig(CWD, [OTHER_DIR]);
|
||||
await expect(
|
||||
readPathFromWorkspace('not-found.txt', config),
|
||||
).rejects.toThrow('Path not found in workspace: not-found.txt');
|
||||
});
|
||||
|
||||
// mock-fs permission simulation is unreliable on Windows.
|
||||
it.skipIf(process.platform === 'win32')(
|
||||
'should return an error string if reading a file with no permissions',
|
||||
async () => {
|
||||
mock({
|
||||
[CWD]: {
|
||||
'unreadable.txt': mock.file({
|
||||
content: 'you cannot read me',
|
||||
mode: 0o222, // Write-only
|
||||
}),
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
// processSingleFileContent catches the error and returns an error string.
|
||||
const result = await readPathFromWorkspace('unreadable.txt', config);
|
||||
const textResult = result[0] as string;
|
||||
|
||||
// processSingleFileContent formats errors using the relative path from the target dir (CWD).
|
||||
expect(textResult).toContain('Error reading file unreadable.txt');
|
||||
expect(textResult).toMatch(/(EACCES|permission denied)/i);
|
||||
},
|
||||
);
|
||||
|
||||
it('should return an error string for files exceeding the size limit', async () => {
|
||||
// Mock a file slightly larger than the 20MB limit defined in fileUtils.ts
|
||||
const largeContent = 'a'.repeat(21 * 1024 * 1024); // 21MB
|
||||
mock({
|
||||
[CWD]: {
|
||||
'large.txt': largeContent,
|
||||
},
|
||||
});
|
||||
const mockFileService = {
|
||||
filterFiles: vi.fn((files) => files),
|
||||
} as unknown as FileDiscoveryService;
|
||||
const config = createMockConfig(CWD, [], mockFileService);
|
||||
const result = await readPathFromWorkspace('large.txt', config);
|
||||
const textResult = result[0] as string;
|
||||
// The error message comes directly from processSingleFileContent
|
||||
expect(textResult).toBe('File size exceeds the 20MB limit.');
|
||||
});
|
||||
});
|
||||
118
packages/core/src/utils/pathReader.ts
Normal file
118
packages/core/src/utils/pathReader.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import type { PartUnion } from '@google/genai';
|
||||
import { processSingleFileContent } from './fileUtils.js';
|
||||
import type { Config } from '../config/config.js';
|
||||
|
||||
/**
|
||||
* Reads the content of a file or recursively expands a directory from
|
||||
* within the workspace, returning content suitable for LLM input.
|
||||
*
|
||||
* @param pathStr The path to read (can be absolute or relative).
|
||||
* @param config The application configuration, providing workspace context and services.
|
||||
* @returns A promise that resolves to an array of PartUnion (string | Part).
|
||||
* @throws An error if the path is not found or is outside the workspace.
|
||||
*/
|
||||
export async function readPathFromWorkspace(
|
||||
pathStr: string,
|
||||
config: Config,
|
||||
): Promise<PartUnion[]> {
|
||||
const workspace = config.getWorkspaceContext();
|
||||
const fileService = config.getFileService();
|
||||
let absolutePath: string | null = null;
|
||||
|
||||
if (path.isAbsolute(pathStr)) {
|
||||
if (!workspace.isPathWithinWorkspace(pathStr)) {
|
||||
throw new Error(
|
||||
`Absolute path is outside of the allowed workspace: ${pathStr}`,
|
||||
);
|
||||
}
|
||||
absolutePath = pathStr;
|
||||
} else {
|
||||
// Prioritized search for relative paths.
|
||||
const searchDirs = workspace.getDirectories();
|
||||
for (const dir of searchDirs) {
|
||||
const potentialPath = path.resolve(dir, pathStr);
|
||||
try {
|
||||
await fs.access(potentialPath);
|
||||
absolutePath = potentialPath;
|
||||
break; // Found the first match.
|
||||
} catch {
|
||||
// Not found, continue to the next directory.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!absolutePath) {
|
||||
throw new Error(`Path not found in workspace: ${pathStr}`);
|
||||
}
|
||||
|
||||
const stats = await fs.stat(absolutePath);
|
||||
if (stats.isDirectory()) {
|
||||
const allParts: PartUnion[] = [];
|
||||
allParts.push({
|
||||
text: `--- Start of content for directory: ${pathStr} ---\n`,
|
||||
});
|
||||
|
||||
// Use glob to recursively find all files within the directory.
|
||||
const files = await glob('**/*', {
|
||||
cwd: absolutePath,
|
||||
nodir: true, // We only want files
|
||||
dot: true, // Include dotfiles
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
const relativeFiles = files.map((p) =>
|
||||
path.relative(config.getTargetDir(), p),
|
||||
);
|
||||
const filteredFiles = fileService.filterFiles(relativeFiles, {
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
});
|
||||
const finalFiles = filteredFiles.map((p) =>
|
||||
path.resolve(config.getTargetDir(), p),
|
||||
);
|
||||
|
||||
for (const filePath of finalFiles) {
|
||||
const relativePathForDisplay = path.relative(absolutePath, filePath);
|
||||
allParts.push({ text: `--- ${relativePathForDisplay} ---\n` });
|
||||
const result = await processSingleFileContent(
|
||||
filePath,
|
||||
config.getTargetDir(),
|
||||
config.getFileSystemService(),
|
||||
);
|
||||
allParts.push(result.llmContent);
|
||||
allParts.push({ text: '\n' }); // Add a newline for separation
|
||||
}
|
||||
|
||||
allParts.push({ text: `--- End of content for directory: ${pathStr} ---` });
|
||||
return allParts;
|
||||
} else {
|
||||
// It's a single file, check if it's ignored.
|
||||
const relativePath = path.relative(config.getTargetDir(), absolutePath);
|
||||
const filtered = fileService.filterFiles([relativePath], {
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
});
|
||||
|
||||
if (filtered.length === 0) {
|
||||
// File is ignored, return empty array to silently skip.
|
||||
return [];
|
||||
}
|
||||
|
||||
// It's a single file, process it directly.
|
||||
const result = await processSingleFileContent(
|
||||
absolutePath,
|
||||
config.getTargetDir(),
|
||||
config.getFileSystemService(),
|
||||
);
|
||||
return [result.llmContent];
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user