mirror of
https://github.com/QwenLM/qwen-code.git
synced 2025-12-19 09:33:53 +00:00
🔧 Miscellaneous Improvements and Refactoring (#466)
This commit is contained in:
@@ -52,10 +52,7 @@ jobs:
|
||||
{
|
||||
"maxSessionTurns": 25,
|
||||
"coreTools": [
|
||||
"run_shell_command(echo)",
|
||||
"run_shell_command(gh label list)",
|
||||
"run_shell_command(gh issue edit)",
|
||||
"run_shell_command(gh issue list)"
|
||||
"run_shell_command"
|
||||
],
|
||||
"sandbox": false
|
||||
}
|
||||
|
||||
@@ -209,7 +209,7 @@ npm run lint
|
||||
### Coding Conventions
|
||||
|
||||
- Please adhere to the coding style, patterns, and conventions used throughout the existing codebase.
|
||||
- Consult [GEMINI.md](https://github.com/google-gemini/gemini-cli/blob/main/GEMINI.md) (typically found in the project root) for specific instructions related to AI-assisted development, including conventions for React, comments, and Git usage.
|
||||
- Consult [QWEN.md](https://github.com/QwenLM/qwen-code/blob/main/QWEN.md) (typically found in the project root) for specific instructions related to AI-assisted development, including conventions for React, comments, and Git usage.
|
||||
- **Imports:** Pay special attention to import paths. The project uses ESLint to enforce restrictions on relative imports between packages.
|
||||
|
||||
### Project Structure
|
||||
|
||||
@@ -577,6 +577,7 @@ export async function loadCliConfig(
|
||||
'SYSTEM_TEMPLATE:{"name":"qwen3_coder","params":{"is_git_repository":{RUNTIME_VARS_IS_GIT_REPO},"sandbox":"{RUNTIME_VARS_SANDBOX}"}}',
|
||||
},
|
||||
]) as ConfigParameters['systemPromptMappings'],
|
||||
authType: settings.selectedAuthType,
|
||||
contentGenerator: settings.contentGenerator,
|
||||
cliVersion,
|
||||
tavilyApiKey:
|
||||
|
||||
@@ -91,15 +91,13 @@ export const directoryCommand: SlashCommand = {
|
||||
}
|
||||
}
|
||||
|
||||
if (added.length > 0) {
|
||||
try {
|
||||
if (config.shouldLoadMemoryFromIncludeDirectories()) {
|
||||
const { memoryContent, fileCount } =
|
||||
await loadServerHierarchicalMemory(
|
||||
config.getWorkingDir(),
|
||||
[
|
||||
...config.getWorkspaceContext().getDirectories(),
|
||||
...pathsToAdd,
|
||||
],
|
||||
[...config.getWorkspaceContext().getDirectories()],
|
||||
config.getDebugMode(),
|
||||
config.getFileService(),
|
||||
config.getExtensionContextFilePaths(),
|
||||
@@ -114,13 +112,14 @@ export const directoryCommand: SlashCommand = {
|
||||
addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: `Successfully added GEMINI.md files from the following directories if there are:\n- ${added.join('\n- ')}`,
|
||||
text: `Successfully added memory files from the following directories if there are:\n- ${added.join('\n- ')}`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
} catch (error) {
|
||||
errors.push(`Error refreshing memory: ${(error as Error).message}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (added.length > 0) {
|
||||
const gemini = config.getGeminiClient();
|
||||
|
||||
@@ -27,7 +27,7 @@ const renderWithWidth = (
|
||||
describe('<ContextSummaryDisplay />', () => {
|
||||
const baseProps = {
|
||||
geminiMdFileCount: 1,
|
||||
contextFileNames: ['GEMINI.md'],
|
||||
contextFileNames: ['QWEN.md'],
|
||||
mcpServers: { 'test-server': { command: 'test' } },
|
||||
showToolDescriptions: false,
|
||||
ideContext: {
|
||||
@@ -41,7 +41,7 @@ describe('<ContextSummaryDisplay />', () => {
|
||||
const { lastFrame } = renderWithWidth(120, baseProps);
|
||||
const output = lastFrame();
|
||||
expect(output).toContain(
|
||||
'Using: 1 open file (ctrl+e to view) | 1 GEMINI.md file | 1 MCP server (ctrl+t to view)',
|
||||
'Using: 1 open file (ctrl+e to view) | 1 QWEN.md file | 1 MCP server (ctrl+t to view)',
|
||||
);
|
||||
// Check for absence of newlines
|
||||
expect(output.includes('\n')).toBe(false);
|
||||
@@ -53,7 +53,7 @@ describe('<ContextSummaryDisplay />', () => {
|
||||
const expectedLines = [
|
||||
'Using:',
|
||||
' - 1 open file (ctrl+e to view)',
|
||||
' - 1 GEMINI.md file',
|
||||
' - 1 QWEN.md file',
|
||||
' - 1 MCP server (ctrl+t to view)',
|
||||
];
|
||||
const actualLines = output.split('\n');
|
||||
|
||||
@@ -208,6 +208,7 @@ export interface ConfigParameters {
|
||||
modelNames: string[];
|
||||
template: string;
|
||||
}>;
|
||||
authType?: AuthType;
|
||||
contentGenerator?: {
|
||||
timeout?: number;
|
||||
maxRetries?: number;
|
||||
@@ -288,6 +289,7 @@ export class Config {
|
||||
private readonly summarizeToolOutput:
|
||||
| Record<string, SummarizeToolOutputSettings>
|
||||
| undefined;
|
||||
private authType?: AuthType;
|
||||
private readonly enableOpenAILogging: boolean;
|
||||
private readonly contentGenerator?: {
|
||||
timeout?: number;
|
||||
@@ -368,6 +370,7 @@ export class Config {
|
||||
this.ideMode = params.ideMode ?? false;
|
||||
this.ideClient = IdeClient.getInstance();
|
||||
this.systemPromptMappings = params.systemPromptMappings;
|
||||
this.authType = params.authType;
|
||||
this.enableOpenAILogging = params.enableOpenAILogging ?? false;
|
||||
this.contentGenerator = params.contentGenerator;
|
||||
this.cliVersion = params.cliVersion;
|
||||
@@ -451,6 +454,8 @@ export class Config {
|
||||
|
||||
// Reset the session flag since we're explicitly changing auth and using default model
|
||||
this.inFallbackMode = false;
|
||||
|
||||
this.authType = authMethod;
|
||||
}
|
||||
|
||||
getSessionId(): string {
|
||||
@@ -545,6 +550,7 @@ export class Config {
|
||||
getDebugMode(): boolean {
|
||||
return this.debugMode;
|
||||
}
|
||||
|
||||
getQuestion(): string | undefined {
|
||||
return this.question;
|
||||
}
|
||||
@@ -763,6 +769,10 @@ export class Config {
|
||||
}
|
||||
}
|
||||
|
||||
getAuthType(): AuthType | undefined {
|
||||
return this.authType;
|
||||
}
|
||||
|
||||
getEnableOpenAILogging(): boolean {
|
||||
return this.enableOpenAILogging;
|
||||
}
|
||||
|
||||
@@ -3410,7 +3410,10 @@ describe('OpenAIContentGenerator', () => {
|
||||
model: 'qwen-turbo',
|
||||
};
|
||||
|
||||
await dashscopeGenerator.generateContent(request, 'dashscope-prompt-id');
|
||||
await dashscopeGenerator.generateContentStream(
|
||||
request,
|
||||
'dashscope-prompt-id',
|
||||
);
|
||||
|
||||
// Should include cache control in last message
|
||||
expect(mockOpenAIClient.chat.completions.create).toHaveBeenCalledWith(
|
||||
@@ -3422,7 +3425,6 @@ describe('OpenAIContentGenerator', () => {
|
||||
expect.objectContaining({
|
||||
type: 'text',
|
||||
text: 'Hello, how are you?',
|
||||
cache_control: { type: 'ephemeral' },
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
|
||||
@@ -130,6 +130,7 @@ export class OpenAIContentGenerator implements ContentGenerator {
|
||||
? {
|
||||
'X-DashScope-CacheControl': 'enable',
|
||||
'X-DashScope-UserAgent': userAgent,
|
||||
'X-DashScope-AuthType': contentGeneratorConfig.authType,
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
@@ -235,8 +236,18 @@ export class OpenAIContentGenerator implements ContentGenerator {
|
||||
private async buildCreateParams(
|
||||
request: GenerateContentParameters,
|
||||
userPromptId: string,
|
||||
streaming: boolean = false,
|
||||
): Promise<Parameters<typeof this.client.chat.completions.create>[0]> {
|
||||
const messages = this.convertToOpenAIFormat(request);
|
||||
let messages = this.convertToOpenAIFormat(request);
|
||||
|
||||
// Add cache control to system and last messages for DashScope providers
|
||||
// Only add cache control to system message for non-streaming requests
|
||||
if (this.isDashScopeProvider()) {
|
||||
messages = this.addDashScopeCacheControl(
|
||||
messages,
|
||||
streaming ? 'both' : 'system',
|
||||
);
|
||||
}
|
||||
|
||||
// Build sampling parameters with clear priority:
|
||||
// 1. Request-level parameters (highest priority)
|
||||
@@ -259,6 +270,11 @@ export class OpenAIContentGenerator implements ContentGenerator {
|
||||
);
|
||||
}
|
||||
|
||||
if (streaming) {
|
||||
createParams.stream = true;
|
||||
createParams.stream_options = { include_usage: true };
|
||||
}
|
||||
|
||||
return createParams;
|
||||
}
|
||||
|
||||
@@ -267,7 +283,11 @@ export class OpenAIContentGenerator implements ContentGenerator {
|
||||
userPromptId: string,
|
||||
): Promise<GenerateContentResponse> {
|
||||
const startTime = Date.now();
|
||||
const createParams = await this.buildCreateParams(request, userPromptId);
|
||||
const createParams = await this.buildCreateParams(
|
||||
request,
|
||||
userPromptId,
|
||||
false,
|
||||
);
|
||||
|
||||
try {
|
||||
const completion = (await this.client.chat.completions.create(
|
||||
@@ -358,10 +378,11 @@ export class OpenAIContentGenerator implements ContentGenerator {
|
||||
userPromptId: string,
|
||||
): Promise<AsyncGenerator<GenerateContentResponse>> {
|
||||
const startTime = Date.now();
|
||||
const createParams = await this.buildCreateParams(request, userPromptId);
|
||||
|
||||
createParams.stream = true;
|
||||
createParams.stream_options = { include_usage: true };
|
||||
const createParams = await this.buildCreateParams(
|
||||
request,
|
||||
userPromptId,
|
||||
true,
|
||||
);
|
||||
|
||||
try {
|
||||
const stream = (await this.client.chat.completions.create(
|
||||
@@ -942,14 +963,13 @@ export class OpenAIContentGenerator implements ContentGenerator {
|
||||
const mergedMessages =
|
||||
this.mergeConsecutiveAssistantMessages(cleanedMessages);
|
||||
|
||||
// Add cache control to system and last messages for DashScope providers
|
||||
return this.addCacheControlFlag(mergedMessages, 'both');
|
||||
return mergedMessages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add cache control flag to specified message(s) for DashScope providers
|
||||
*/
|
||||
private addCacheControlFlag(
|
||||
private addDashScopeCacheControl(
|
||||
messages: OpenAI.Chat.ChatCompletionMessageParam[],
|
||||
target: 'system' | 'last' | 'both' = 'both',
|
||||
): OpenAI.Chat.ChatCompletionMessageParam[] {
|
||||
|
||||
@@ -8,12 +8,23 @@
|
||||
* Integration test to verify circular reference handling with proxy agents
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { QwenLogger } from './qwen-logger/qwen-logger.js';
|
||||
import { RumEvent } from './qwen-logger/event-types.js';
|
||||
import { Config } from '../config/config.js';
|
||||
|
||||
describe('Circular Reference Integration Test', () => {
|
||||
beforeEach(() => {
|
||||
// Clear singleton instance before each test
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(QwenLogger as any).instance = undefined;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(QwenLogger as any).instance = undefined;
|
||||
});
|
||||
|
||||
it('should handle HttpsProxyAgent-like circular references in qwen logging', () => {
|
||||
// Create a mock config with proxy
|
||||
const mockConfig = {
|
||||
@@ -64,4 +75,36 @@ describe('Circular Reference Integration Test', () => {
|
||||
logger?.enqueueLogEvent(problematicEvent);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle event overflow without memory leaks', () => {
|
||||
const mockConfig = {
|
||||
getTelemetryEnabled: () => true,
|
||||
getUsageStatisticsEnabled: () => true,
|
||||
getSessionId: () => 'test-session',
|
||||
getDebugMode: () => true,
|
||||
} as unknown as Config;
|
||||
|
||||
const logger = QwenLogger.getInstance(mockConfig);
|
||||
|
||||
// Add more events than the maximum capacity
|
||||
for (let i = 0; i < 1100; i++) {
|
||||
logger?.enqueueLogEvent({
|
||||
timestamp: Date.now(),
|
||||
event_type: 'action',
|
||||
type: 'test',
|
||||
name: `overflow-test-${i}`,
|
||||
});
|
||||
}
|
||||
|
||||
// Logger should still be functional
|
||||
expect(logger).toBeDefined();
|
||||
expect(() => {
|
||||
logger?.enqueueLogEvent({
|
||||
timestamp: Date.now(),
|
||||
event_type: 'action',
|
||||
type: 'test',
|
||||
name: 'final-test',
|
||||
});
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,7 +8,6 @@ import { LogAttributes, LogRecord, logs } from '@opentelemetry/api-logs';
|
||||
import { SemanticAttributes } from '@opentelemetry/semantic-conventions';
|
||||
import { Config } from '../config/config.js';
|
||||
import { safeJsonStringify } from '../utils/safeJsonStringify.js';
|
||||
import { ClearcutLogger } from './clearcut-logger/clearcut-logger.js';
|
||||
import {
|
||||
EVENT_API_ERROR,
|
||||
EVENT_API_REQUEST,
|
||||
@@ -150,7 +149,7 @@ export function logToolCall(config: Config, event: ToolCallEvent): void {
|
||||
}
|
||||
|
||||
export function logApiRequest(config: Config, event: ApiRequestEvent): void {
|
||||
QwenLogger.getInstance(config)?.logApiRequestEvent(event);
|
||||
// QwenLogger.getInstance(config)?.logApiRequestEvent(event);
|
||||
if (!isTelemetrySdkInitialized()) return;
|
||||
|
||||
const attributes: LogAttributes = {
|
||||
@@ -364,6 +363,7 @@ export function logIdeConnection(
|
||||
config: Config,
|
||||
event: IdeConnectionEvent,
|
||||
): void {
|
||||
QwenLogger.getInstance(config)?.logIdeConnectionEvent(event);
|
||||
if (!isTelemetrySdkInitialized()) return;
|
||||
|
||||
const attributes: LogAttributes = {
|
||||
@@ -384,7 +384,7 @@ export function logKittySequenceOverflow(
|
||||
config: Config,
|
||||
event: KittySequenceOverflowEvent,
|
||||
): void {
|
||||
ClearcutLogger.getInstance(config)?.logKittySequenceOverflowEvent(event);
|
||||
QwenLogger.getInstance(config)?.logKittySequenceOverflowEvent(event);
|
||||
if (!isTelemetrySdkInitialized()) return;
|
||||
const attributes: LogAttributes = {
|
||||
...getCommonAttributes(config),
|
||||
|
||||
@@ -79,5 +79,6 @@ export interface RumPayload {
|
||||
session: RumSession;
|
||||
view: RumView;
|
||||
events: RumEvent[];
|
||||
properties?: Record<string, unknown>;
|
||||
_v: string;
|
||||
}
|
||||
|
||||
407
packages/core/src/telemetry/qwen-logger/qwen-logger.test.ts
Normal file
407
packages/core/src/telemetry/qwen-logger/qwen-logger.test.ts
Normal file
@@ -0,0 +1,407 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import {
|
||||
describe,
|
||||
it,
|
||||
expect,
|
||||
vi,
|
||||
beforeEach,
|
||||
afterEach,
|
||||
afterAll,
|
||||
} from 'vitest';
|
||||
import { QwenLogger, TEST_ONLY } from './qwen-logger.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
import {
|
||||
StartSessionEvent,
|
||||
EndSessionEvent,
|
||||
IdeConnectionEvent,
|
||||
KittySequenceOverflowEvent,
|
||||
IdeConnectionType,
|
||||
} from '../types.js';
|
||||
import { RumEvent } from './event-types.js';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('../../utils/user_id.js', () => ({
|
||||
getInstallationId: vi.fn(() => 'test-installation-id'),
|
||||
}));
|
||||
|
||||
vi.mock('../../utils/safeJsonStringify.js', () => ({
|
||||
safeJsonStringify: vi.fn((obj) => JSON.stringify(obj)),
|
||||
}));
|
||||
|
||||
// Mock https module
|
||||
vi.mock('https', () => ({
|
||||
request: vi.fn(),
|
||||
}));
|
||||
|
||||
const makeFakeConfig = (overrides: Partial<Config> = {}): Config => {
|
||||
const defaults = {
|
||||
getUsageStatisticsEnabled: () => true,
|
||||
getDebugMode: () => false,
|
||||
getSessionId: () => 'test-session-id',
|
||||
getCliVersion: () => '1.0.0',
|
||||
getProxy: () => undefined,
|
||||
getContentGeneratorConfig: () => ({ authType: 'test-auth' }),
|
||||
getMcpServers: () => ({}),
|
||||
getModel: () => 'test-model',
|
||||
getEmbeddingModel: () => 'test-embedding',
|
||||
getSandbox: () => false,
|
||||
getCoreTools: () => [],
|
||||
getApprovalMode: () => 'auto',
|
||||
getTelemetryEnabled: () => true,
|
||||
getTelemetryLogPromptsEnabled: () => false,
|
||||
getFileFilteringRespectGitIgnore: () => true,
|
||||
...overrides,
|
||||
};
|
||||
return defaults as Config;
|
||||
};
|
||||
|
||||
describe('QwenLogger', () => {
|
||||
let mockConfig: Config;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
vi.setSystemTime(new Date('2025-01-01T12:00:00.000Z'));
|
||||
mockConfig = makeFakeConfig();
|
||||
// Clear singleton instance
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(QwenLogger as any).instance = undefined;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(QwenLogger as any).instance = undefined;
|
||||
});
|
||||
|
||||
describe('getInstance', () => {
|
||||
it('returns undefined when usage statistics are disabled', () => {
|
||||
const config = makeFakeConfig({ getUsageStatisticsEnabled: () => false });
|
||||
const logger = QwenLogger.getInstance(config);
|
||||
expect(logger).toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns an instance when usage statistics are enabled', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig);
|
||||
expect(logger).toBeInstanceOf(QwenLogger);
|
||||
});
|
||||
|
||||
it('is a singleton', () => {
|
||||
const logger1 = QwenLogger.getInstance(mockConfig);
|
||||
const logger2 = QwenLogger.getInstance(mockConfig);
|
||||
expect(logger1).toBe(logger2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('event queue management', () => {
|
||||
it('should handle event overflow gracefully', () => {
|
||||
const debugConfig = makeFakeConfig({ getDebugMode: () => true });
|
||||
const logger = QwenLogger.getInstance(debugConfig)!;
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'debug')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Fill the queue beyond capacity
|
||||
for (let i = 0; i < TEST_ONLY.MAX_EVENTS + 10; i++) {
|
||||
logger.enqueueLogEvent({
|
||||
timestamp: Date.now(),
|
||||
event_type: 'action',
|
||||
type: 'test',
|
||||
name: `test-event-${i}`,
|
||||
});
|
||||
}
|
||||
|
||||
// Should have logged debug messages about dropping events
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'QwenLogger: Dropped old event to prevent memory leak',
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle enqueue errors gracefully', () => {
|
||||
const debugConfig = makeFakeConfig({ getDebugMode: () => true });
|
||||
const logger = QwenLogger.getInstance(debugConfig)!;
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Mock the events deque to throw an error
|
||||
const originalPush = logger['events'].push;
|
||||
logger['events'].push = vi.fn(() => {
|
||||
throw new Error('Test error');
|
||||
});
|
||||
|
||||
logger.enqueueLogEvent({
|
||||
timestamp: Date.now(),
|
||||
event_type: 'action',
|
||||
type: 'test',
|
||||
name: 'test-event',
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
'QwenLogger: Failed to enqueue log event.',
|
||||
expect.any(Error),
|
||||
);
|
||||
|
||||
// Restore original method
|
||||
logger['events'].push = originalPush;
|
||||
});
|
||||
});
|
||||
|
||||
describe('concurrent flush protection', () => {
|
||||
it('should handle concurrent flush requests', () => {
|
||||
const debugConfig = makeFakeConfig({ getDebugMode: () => true });
|
||||
const logger = QwenLogger.getInstance(debugConfig)!;
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'debug')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Manually set the flush in progress flag to simulate concurrent access
|
||||
logger['isFlushInProgress'] = true;
|
||||
|
||||
// Try to flush while another flush is in progress
|
||||
const result = logger.flushToRum();
|
||||
|
||||
// Should have logged about pending flush
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'QwenLogger: Flush already in progress, marking pending flush',
|
||||
),
|
||||
);
|
||||
|
||||
// Should return a resolved promise
|
||||
expect(result).toBeInstanceOf(Promise);
|
||||
|
||||
// Reset the flag
|
||||
logger['isFlushInProgress'] = false;
|
||||
});
|
||||
});
|
||||
|
||||
describe('failed event retry mechanism', () => {
|
||||
it('should requeue failed events with size limits', () => {
|
||||
const debugConfig = makeFakeConfig({ getDebugMode: () => true });
|
||||
const logger = QwenLogger.getInstance(debugConfig)!;
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'debug')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const failedEvents: RumEvent[] = [];
|
||||
for (let i = 0; i < TEST_ONLY.MAX_RETRY_EVENTS + 50; i++) {
|
||||
failedEvents.push({
|
||||
timestamp: Date.now(),
|
||||
event_type: 'action',
|
||||
type: 'test',
|
||||
name: `failed-event-${i}`,
|
||||
});
|
||||
}
|
||||
|
||||
// Call the private method using bracket notation
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(logger as any).requeueFailedEvents(failedEvents);
|
||||
|
||||
// Should have logged about dropping events due to retry limit
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('QwenLogger: Re-queued'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle empty retry queue gracefully', () => {
|
||||
const debugConfig = makeFakeConfig({ getDebugMode: () => true });
|
||||
const logger = QwenLogger.getInstance(debugConfig)!;
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'debug')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Fill the queue to capacity first
|
||||
for (let i = 0; i < TEST_ONLY.MAX_EVENTS; i++) {
|
||||
logger.enqueueLogEvent({
|
||||
timestamp: Date.now(),
|
||||
event_type: 'action',
|
||||
type: 'test',
|
||||
name: `event-${i}`,
|
||||
});
|
||||
}
|
||||
|
||||
// Try to requeue when no space is available
|
||||
const failedEvents: RumEvent[] = [
|
||||
{
|
||||
timestamp: Date.now(),
|
||||
event_type: 'action',
|
||||
type: 'test',
|
||||
name: 'failed-event',
|
||||
},
|
||||
];
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(logger as any).requeueFailedEvents(failedEvents);
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('QwenLogger: No events re-queued'),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('event handlers', () => {
|
||||
it('should log IDE connection events', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
const event = new IdeConnectionEvent(IdeConnectionType.SESSION);
|
||||
|
||||
logger.logIdeConnectionEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
event_type: 'action',
|
||||
type: 'connection',
|
||||
name: 'ide_connection',
|
||||
snapshots: JSON.stringify({
|
||||
connection_type: IdeConnectionType.SESSION,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should log Kitty sequence overflow events', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
const event = new KittySequenceOverflowEvent(1024, 'truncated...');
|
||||
|
||||
logger.logKittySequenceOverflowEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
event_type: 'exception',
|
||||
type: 'overflow',
|
||||
name: 'kitty_sequence_overflow',
|
||||
subtype: 'kitty_sequence_overflow',
|
||||
snapshots: JSON.stringify({
|
||||
sequence_length: 1024,
|
||||
truncated_sequence: 'truncated...',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should flush start session events immediately', async () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const flushSpy = vi.spyOn(logger, 'flushToRum').mockResolvedValue({});
|
||||
|
||||
const testConfig = makeFakeConfig({
|
||||
getModel: () => 'test-model',
|
||||
getEmbeddingModel: () => 'test-embedding',
|
||||
});
|
||||
const event = new StartSessionEvent(testConfig);
|
||||
|
||||
logger.logStartSessionEvent(event);
|
||||
|
||||
expect(flushSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should flush end session events immediately', async () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const flushSpy = vi.spyOn(logger, 'flushToRum').mockResolvedValue({});
|
||||
|
||||
const event = new EndSessionEvent(mockConfig);
|
||||
|
||||
logger.logEndSessionEvent(event);
|
||||
|
||||
expect(flushSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('flush timing', () => {
|
||||
it('should not flush if interval has not passed', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const flushSpy = vi.spyOn(logger, 'flushToRum');
|
||||
|
||||
// Add an event and try to flush immediately
|
||||
logger.enqueueLogEvent({
|
||||
timestamp: Date.now(),
|
||||
event_type: 'action',
|
||||
type: 'test',
|
||||
name: 'test-event',
|
||||
});
|
||||
|
||||
logger.flushIfNeeded();
|
||||
|
||||
expect(flushSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should flush when interval has passed', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const flushSpy = vi.spyOn(logger, 'flushToRum').mockResolvedValue({});
|
||||
|
||||
// Add an event
|
||||
logger.enqueueLogEvent({
|
||||
timestamp: Date.now(),
|
||||
event_type: 'action',
|
||||
type: 'test',
|
||||
name: 'test-event',
|
||||
});
|
||||
|
||||
// Advance time beyond flush interval
|
||||
vi.advanceTimersByTime(TEST_ONLY.FLUSH_INTERVAL_MS + 1000);
|
||||
|
||||
logger.flushIfNeeded();
|
||||
|
||||
expect(flushSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should handle flush errors gracefully with debug mode', async () => {
|
||||
const debugConfig = makeFakeConfig({ getDebugMode: () => true });
|
||||
const logger = QwenLogger.getInstance(debugConfig)!;
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'debug')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Add an event first
|
||||
logger.enqueueLogEvent({
|
||||
timestamp: Date.now(),
|
||||
event_type: 'action',
|
||||
type: 'test',
|
||||
name: 'test-event',
|
||||
});
|
||||
|
||||
// Mock flushToRum to throw an error
|
||||
const originalFlush = logger.flushToRum.bind(logger);
|
||||
logger.flushToRum = vi.fn().mockRejectedValue(new Error('Network error'));
|
||||
|
||||
// Advance time to trigger flush
|
||||
vi.advanceTimersByTime(TEST_ONLY.FLUSH_INTERVAL_MS + 1000);
|
||||
|
||||
logger.flushIfNeeded();
|
||||
|
||||
// Wait for async operations
|
||||
await vi.runAllTimersAsync();
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
'Error flushing to RUM:',
|
||||
expect.any(Error),
|
||||
);
|
||||
|
||||
// Restore original method
|
||||
logger.flushToRum = originalFlush;
|
||||
});
|
||||
});
|
||||
|
||||
describe('constants export', () => {
|
||||
it('should export test constants', () => {
|
||||
expect(TEST_ONLY.MAX_EVENTS).toBe(1000);
|
||||
expect(TEST_ONLY.MAX_RETRY_EVENTS).toBe(100);
|
||||
expect(TEST_ONLY.FLUSH_INTERVAL_MS).toBe(60000);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,7 +7,6 @@
|
||||
import { Buffer } from 'buffer';
|
||||
import * as https from 'https';
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
import {
|
||||
StartSessionEvent,
|
||||
@@ -22,6 +21,8 @@ import {
|
||||
NextSpeakerCheckEvent,
|
||||
SlashCommandEvent,
|
||||
MalformedJsonResponseEvent,
|
||||
IdeConnectionEvent,
|
||||
KittySequenceOverflowEvent,
|
||||
} from '../types.js';
|
||||
import {
|
||||
RumEvent,
|
||||
@@ -31,12 +32,12 @@ import {
|
||||
RumExceptionEvent,
|
||||
RumPayload,
|
||||
} from './event-types.js';
|
||||
// Removed unused EventMetadataKey import
|
||||
import { Config } from '../../config/config.js';
|
||||
import { safeJsonStringify } from '../../utils/safeJsonStringify.js';
|
||||
// Removed unused import
|
||||
import { HttpError, retryWithBackoff } from '../../utils/retry.js';
|
||||
import { getInstallationId } from '../../utils/user_id.js';
|
||||
import { FixedDeque } from 'mnemonist';
|
||||
import { AuthType } from '../../core/contentGenerator.js';
|
||||
|
||||
// Usage statistics collection endpoint
|
||||
const USAGE_STATS_HOSTNAME = 'gb4w8c3ygj-default-sea.rum.aliyuncs.com';
|
||||
@@ -44,6 +45,23 @@ const USAGE_STATS_PATH = '/';
|
||||
|
||||
const RUN_APP_ID = 'gb4w8c3ygj@851d5d500f08f92';
|
||||
|
||||
/**
|
||||
* Interval in which buffered events are sent to RUM.
|
||||
*/
|
||||
const FLUSH_INTERVAL_MS = 1000 * 60;
|
||||
|
||||
/**
|
||||
* Maximum amount of events to keep in memory. Events added after this amount
|
||||
* are dropped until the next flush to RUM, which happens periodically as
|
||||
* defined by {@link FLUSH_INTERVAL_MS}.
|
||||
*/
|
||||
const MAX_EVENTS = 1000;
|
||||
|
||||
/**
|
||||
* Maximum events to retry after a failed RUM flush
|
||||
*/
|
||||
const MAX_RETRY_EVENTS = 100;
|
||||
|
||||
export interface LogResponse {
|
||||
nextRequestWaitMs?: number;
|
||||
}
|
||||
@@ -53,23 +71,42 @@ export interface LogResponse {
|
||||
export class QwenLogger {
|
||||
private static instance: QwenLogger;
|
||||
private config?: Config;
|
||||
private readonly events: RumEvent[] = [];
|
||||
private last_flush_time: number = Date.now();
|
||||
private flush_interval_ms: number = 1000 * 60; // Wait at least a minute before flushing events.
|
||||
|
||||
/**
|
||||
* Queue of pending events that need to be flushed to the server. New events
|
||||
* are added to this queue and then flushed on demand (via `flushToRum`)
|
||||
*/
|
||||
private readonly events: FixedDeque<RumEvent>;
|
||||
|
||||
/**
|
||||
* The last time that the events were successfully flushed to the server.
|
||||
*/
|
||||
private lastFlushTime: number = Date.now();
|
||||
|
||||
private userId: string;
|
||||
private sessionId: string;
|
||||
private viewId: string;
|
||||
|
||||
/**
|
||||
* The value is true when there is a pending flush happening. This prevents
|
||||
* concurrent flush operations.
|
||||
*/
|
||||
private isFlushInProgress: boolean = false;
|
||||
|
||||
/**
|
||||
* This value is true when a flush was requested during an ongoing flush.
|
||||
*/
|
||||
private pendingFlush: boolean = false;
|
||||
|
||||
private isShutdown: boolean = false;
|
||||
|
||||
private constructor(config?: Config) {
|
||||
this.config = config;
|
||||
this.events = new FixedDeque<RumEvent>(Array, MAX_EVENTS);
|
||||
this.userId = this.generateUserId();
|
||||
this.sessionId =
|
||||
typeof this.config?.getSessionId === 'function'
|
||||
? this.config.getSessionId()
|
||||
: '';
|
||||
this.viewId = randomUUID();
|
||||
}
|
||||
|
||||
private generateUserId(): string {
|
||||
@@ -92,7 +129,26 @@ export class QwenLogger {
|
||||
}
|
||||
|
||||
enqueueLogEvent(event: RumEvent): void {
|
||||
try {
|
||||
// Manually handle overflow for FixedDeque, which throws when full.
|
||||
const wasAtCapacity = this.events.size >= MAX_EVENTS;
|
||||
|
||||
if (wasAtCapacity) {
|
||||
this.events.shift(); // Evict oldest element to make space.
|
||||
}
|
||||
|
||||
this.events.push(event);
|
||||
|
||||
if (wasAtCapacity && this.config?.getDebugMode()) {
|
||||
console.debug(
|
||||
`QwenLogger: Dropped old event to prevent memory leak (queue size: ${this.events.size})`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
if (this.config?.getDebugMode()) {
|
||||
console.error('QwenLogger: Failed to enqueue log event.', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
createRumEvent(
|
||||
@@ -143,6 +199,7 @@ export class QwenLogger {
|
||||
}
|
||||
|
||||
async createRumPayload(): Promise<RumPayload> {
|
||||
const authType = this.config?.getAuthType();
|
||||
const version = this.config?.getCliVersion() || 'unknown';
|
||||
|
||||
return {
|
||||
@@ -159,40 +216,59 @@ export class QwenLogger {
|
||||
id: this.sessionId,
|
||||
},
|
||||
view: {
|
||||
id: this.viewId,
|
||||
id: this.sessionId,
|
||||
name: 'qwen-code-cli',
|
||||
},
|
||||
events: [...this.events],
|
||||
|
||||
events: this.events.toArray() as RumEvent[],
|
||||
properties: {
|
||||
auth_type: authType,
|
||||
model: this.config?.getModel(),
|
||||
base_url:
|
||||
authType === AuthType.USE_OPENAI ? process.env.OPENAI_BASE_URL : '',
|
||||
},
|
||||
_v: `qwen-code@${version}`,
|
||||
};
|
||||
}
|
||||
|
||||
flushIfNeeded(): void {
|
||||
if (Date.now() - this.last_flush_time < this.flush_interval_ms) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Prevent concurrent flush operations
|
||||
if (this.isFlushInProgress) {
|
||||
if (Date.now() - this.lastFlushTime < FLUSH_INTERVAL_MS) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.flushToRum().catch((error) => {
|
||||
if (this.config?.getDebugMode()) {
|
||||
console.debug('Error flushing to RUM:', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async flushToRum(): Promise<LogResponse> {
|
||||
if (this.isFlushInProgress) {
|
||||
if (this.config?.getDebugMode()) {
|
||||
console.debug(
|
||||
'QwenLogger: Flush already in progress, marking pending flush.',
|
||||
);
|
||||
}
|
||||
this.pendingFlush = true;
|
||||
return Promise.resolve({});
|
||||
}
|
||||
this.isFlushInProgress = true;
|
||||
|
||||
if (this.config?.getDebugMode()) {
|
||||
console.log('Flushing log events to RUM.');
|
||||
}
|
||||
if (this.events.length === 0) {
|
||||
if (this.events.size === 0) {
|
||||
this.isFlushInProgress = false;
|
||||
return {};
|
||||
}
|
||||
|
||||
this.isFlushInProgress = true;
|
||||
const eventsToSend = this.events.toArray() as RumEvent[];
|
||||
this.events.clear();
|
||||
|
||||
const rumPayload = await this.createRumPayload();
|
||||
// Override events with the ones we're sending
|
||||
rumPayload.events = eventsToSend;
|
||||
const flushFn = () =>
|
||||
new Promise<Buffer>((resolve, reject) => {
|
||||
const body = safeJsonStringify(rumPayload);
|
||||
@@ -246,16 +322,29 @@ export class QwenLogger {
|
||||
},
|
||||
});
|
||||
|
||||
this.events.splice(0, this.events.length);
|
||||
this.last_flush_time = Date.now();
|
||||
this.lastFlushTime = Date.now();
|
||||
return {};
|
||||
} catch (error) {
|
||||
if (this.config?.getDebugMode()) {
|
||||
console.error('RUM flush failed after multiple retries.', error);
|
||||
}
|
||||
|
||||
// Re-queue failed events for retry
|
||||
this.requeueFailedEvents(eventsToSend);
|
||||
return {};
|
||||
} finally {
|
||||
this.isFlushInProgress = false;
|
||||
|
||||
// If a flush was requested while we were flushing, flush again
|
||||
if (this.pendingFlush) {
|
||||
this.pendingFlush = false;
|
||||
// Fire and forget the pending flush
|
||||
this.flushToRum().catch((error) => {
|
||||
if (this.config?.getDebugMode()) {
|
||||
console.debug('Error in pending flush to RUM:', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -282,7 +371,9 @@ export class QwenLogger {
|
||||
// Flush start event immediately
|
||||
this.enqueueLogEvent(applicationEvent);
|
||||
this.flushToRum().catch((error: unknown) => {
|
||||
if (this.config?.getDebugMode()) {
|
||||
console.debug('Error flushing to RUM:', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -451,13 +542,41 @@ export class QwenLogger {
|
||||
this.flushIfNeeded();
|
||||
}
|
||||
|
||||
logIdeConnectionEvent(event: IdeConnectionEvent): void {
|
||||
const rumEvent = this.createActionEvent('connection', 'ide_connection', {
|
||||
snapshots: JSON.stringify({ connection_type: event.connection_type }),
|
||||
});
|
||||
|
||||
this.enqueueLogEvent(rumEvent);
|
||||
this.flushIfNeeded();
|
||||
}
|
||||
|
||||
logKittySequenceOverflowEvent(event: KittySequenceOverflowEvent): void {
|
||||
const rumEvent = this.createExceptionEvent(
|
||||
'overflow',
|
||||
'kitty_sequence_overflow',
|
||||
{
|
||||
subtype: 'kitty_sequence_overflow',
|
||||
snapshots: JSON.stringify({
|
||||
sequence_length: event.sequence_length,
|
||||
truncated_sequence: event.truncated_sequence,
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
||||
this.enqueueLogEvent(rumEvent);
|
||||
this.flushIfNeeded();
|
||||
}
|
||||
|
||||
logEndSessionEvent(_event: EndSessionEvent): void {
|
||||
const applicationEvent = this.createViewEvent('session', 'session_end', {});
|
||||
|
||||
// Flush immediately on session end.
|
||||
this.enqueueLogEvent(applicationEvent);
|
||||
this.flushToRum().catch((error: unknown) => {
|
||||
if (this.config?.getDebugMode()) {
|
||||
console.debug('Error flushing to RUM:', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -480,4 +599,60 @@ export class QwenLogger {
|
||||
const event = new EndSessionEvent(this.config);
|
||||
this.logEndSessionEvent(event);
|
||||
}
|
||||
|
||||
private requeueFailedEvents(eventsToSend: RumEvent[]): void {
|
||||
// Add the events back to the front of the queue to be retried, but limit retry queue size
|
||||
const eventsToRetry = eventsToSend.slice(-MAX_RETRY_EVENTS); // Keep only the most recent events
|
||||
|
||||
// Log a warning if we're dropping events
|
||||
if (eventsToSend.length > MAX_RETRY_EVENTS && this.config?.getDebugMode()) {
|
||||
console.warn(
|
||||
`QwenLogger: Dropping ${
|
||||
eventsToSend.length - MAX_RETRY_EVENTS
|
||||
} events due to retry queue limit. Total events: ${
|
||||
eventsToSend.length
|
||||
}, keeping: ${MAX_RETRY_EVENTS}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Determine how many events can be re-queued
|
||||
const availableSpace = MAX_EVENTS - this.events.size;
|
||||
const numEventsToRequeue = Math.min(eventsToRetry.length, availableSpace);
|
||||
|
||||
if (numEventsToRequeue === 0) {
|
||||
if (this.config?.getDebugMode()) {
|
||||
console.debug(
|
||||
`QwenLogger: No events re-queued (queue size: ${this.events.size})`,
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the most recent events to re-queue
|
||||
const eventsToRequeue = eventsToRetry.slice(
|
||||
eventsToRetry.length - numEventsToRequeue,
|
||||
);
|
||||
|
||||
// Prepend events to the front of the deque to be retried first.
|
||||
// We iterate backwards to maintain the original order of the failed events.
|
||||
for (let i = eventsToRequeue.length - 1; i >= 0; i--) {
|
||||
this.events.unshift(eventsToRequeue[i]);
|
||||
}
|
||||
// Clear any potential overflow
|
||||
while (this.events.size > MAX_EVENTS) {
|
||||
this.events.pop();
|
||||
}
|
||||
|
||||
if (this.config?.getDebugMode()) {
|
||||
console.debug(
|
||||
`QwenLogger: Re-queued ${numEventsToRequeue} events for retry (queue size: ${this.events.size})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const TEST_ONLY = {
|
||||
MAX_RETRY_EVENTS,
|
||||
MAX_EVENTS,
|
||||
FLUSH_INTERVAL_MS,
|
||||
};
|
||||
|
||||
@@ -210,16 +210,16 @@ describe('bfsFileSearch', () => {
|
||||
for (let i = 0; i < numTargetDirs; i++) {
|
||||
// Add target files in some directories
|
||||
fileCreationPromises.push(
|
||||
createTestFile('content', `dir${i}`, 'GEMINI.md'),
|
||||
createTestFile('content', `dir${i}`, 'QWEN.md'),
|
||||
);
|
||||
fileCreationPromises.push(
|
||||
createTestFile('content', `dir${i}`, 'subdir1', 'GEMINI.md'),
|
||||
createTestFile('content', `dir${i}`, 'subdir1', 'QWEN.md'),
|
||||
);
|
||||
}
|
||||
const expectedFiles = await Promise.all(fileCreationPromises);
|
||||
|
||||
const result = await bfsFileSearch(testRootDir, {
|
||||
fileName: 'GEMINI.md',
|
||||
fileName: 'QWEN.md',
|
||||
// Provide a generous maxDirs limit to ensure it doesn't prematurely stop
|
||||
// in this large test case. Total dirs created is 200.
|
||||
maxDirs: 250,
|
||||
|
||||
@@ -143,9 +143,28 @@ async function getGeminiMdFilePathsInternalForEachDir(
|
||||
// It's okay if it's not found.
|
||||
}
|
||||
|
||||
// Handle the case where we're in the home directory (dir is empty string or home path)
|
||||
const resolvedDir = dir ? path.resolve(dir) : resolvedHome;
|
||||
const isHomeDirectory = resolvedDir === resolvedHome;
|
||||
|
||||
if (isHomeDirectory) {
|
||||
// For home directory, only check for QWEN.md directly in the home directory
|
||||
const homeContextPath = path.join(resolvedHome, geminiMdFilename);
|
||||
try {
|
||||
await fs.access(homeContextPath, fsSync.constants.R_OK);
|
||||
if (homeContextPath !== globalMemoryPath) {
|
||||
allPaths.add(homeContextPath);
|
||||
if (debugMode)
|
||||
logger.debug(
|
||||
`Found readable home ${geminiMdFilename}: ${homeContextPath}`,
|
||||
);
|
||||
}
|
||||
} catch {
|
||||
// Not found, which is okay
|
||||
}
|
||||
} else if (dir) {
|
||||
// FIX: Only perform the workspace search (upward and downward scans)
|
||||
// if a valid currentWorkingDirectory is provided.
|
||||
if (dir) {
|
||||
// if a valid currentWorkingDirectory is provided and it's not the home directory.
|
||||
const resolvedCwd = path.resolve(dir);
|
||||
if (debugMode)
|
||||
logger.debug(
|
||||
|
||||
Reference in New Issue
Block a user