mirror of
https://github.com/QwenLM/qwen-code.git
synced 2025-12-19 09:33:53 +00:00
Refac: Centralize storage file management (#4078)
Co-authored-by: Taylor Mullen <ntaylormullen@google.com>
This commit is contained in:
@@ -11,7 +11,7 @@ import {
|
||||
clearCachedCredentialFile,
|
||||
clearOauthClientCache,
|
||||
} from './oauth2.js';
|
||||
import { getCachedGoogleAccount } from '../utils/user_account.js';
|
||||
import { UserAccountManager } from '../utils/userAccountManager.js';
|
||||
import { OAuth2Client, Compute } from 'google-auth-library';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
@@ -180,7 +180,10 @@ describe('oauth2', () => {
|
||||
});
|
||||
|
||||
// Verify the getCachedGoogleAccount function works
|
||||
expect(getCachedGoogleAccount()).toBe('test-google-account@gmail.com');
|
||||
const userAccountManager = new UserAccountManager();
|
||||
expect(userAccountManager.getCachedGoogleAccount()).toBe(
|
||||
'test-google-account@gmail.com',
|
||||
);
|
||||
});
|
||||
|
||||
it('should perform login with user code', async () => {
|
||||
@@ -533,14 +536,17 @@ describe('oauth2', () => {
|
||||
googleAccountPath,
|
||||
JSON.stringify(accountData),
|
||||
);
|
||||
const userAccountManager = new UserAccountManager();
|
||||
|
||||
expect(fs.existsSync(credsPath)).toBe(true);
|
||||
expect(fs.existsSync(googleAccountPath)).toBe(true);
|
||||
expect(getCachedGoogleAccount()).toBe('test@example.com');
|
||||
expect(userAccountManager.getCachedGoogleAccount()).toBe(
|
||||
'test@example.com',
|
||||
);
|
||||
|
||||
await clearCachedCredentialFile();
|
||||
expect(fs.existsSync(credsPath)).toBe(false);
|
||||
expect(getCachedGoogleAccount()).toBeNull();
|
||||
expect(userAccountManager.getCachedGoogleAccount()).toBeNull();
|
||||
const updatedAccountData = JSON.parse(
|
||||
fs.readFileSync(googleAccountPath, 'utf-8'),
|
||||
);
|
||||
|
||||
@@ -17,16 +17,14 @@ import * as net from 'net';
|
||||
import open from 'open';
|
||||
import path from 'node:path';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import * as os from 'os';
|
||||
import { Config } from '../config/config.js';
|
||||
import { getErrorMessage } from '../utils/errors.js';
|
||||
import {
|
||||
cacheGoogleAccount,
|
||||
getCachedGoogleAccount,
|
||||
clearCachedGoogleAccount,
|
||||
} from '../utils/user_account.js';
|
||||
import { UserAccountManager } from '../utils/userAccountManager.js';
|
||||
import { AuthType } from '../core/contentGenerator.js';
|
||||
import readline from 'node:readline';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
const userAccountManager = new UserAccountManager();
|
||||
|
||||
// OAuth Client ID used to initiate OAuth2Client class.
|
||||
const OAUTH_CLIENT_ID =
|
||||
@@ -53,9 +51,6 @@ const SIGN_IN_SUCCESS_URL =
|
||||
const SIGN_IN_FAILURE_URL =
|
||||
'https://developers.google.com/gemini-code-assist/auth_failure_gemini';
|
||||
|
||||
const GEMINI_DIR = '.gemini';
|
||||
const CREDENTIAL_FILENAME = 'oauth_creds.json';
|
||||
|
||||
/**
|
||||
* An Authentication URL for updating the credentials of a Oauth2Client
|
||||
* as well as a promise that will resolve when the credentials have
|
||||
@@ -99,7 +94,7 @@ async function initOauthClient(
|
||||
if (await loadCachedCredentials(client)) {
|
||||
// Found valid cached credentials.
|
||||
// Check if we need to retrieve Google Account ID or Email
|
||||
if (!getCachedGoogleAccount()) {
|
||||
if (!userAccountManager.getCachedGoogleAccount()) {
|
||||
try {
|
||||
await fetchAndCacheUserInfo(client);
|
||||
} catch {
|
||||
@@ -352,7 +347,7 @@ export function getAvailablePort(): Promise<number> {
|
||||
|
||||
async function loadCachedCredentials(client: OAuth2Client): Promise<boolean> {
|
||||
const pathsToTry = [
|
||||
getCachedCredentialPath(),
|
||||
Storage.getOAuthCredsPath(),
|
||||
process.env['GOOGLE_APPLICATION_CREDENTIALS'],
|
||||
].filter((p): p is string => !!p);
|
||||
|
||||
@@ -380,26 +375,22 @@ async function loadCachedCredentials(client: OAuth2Client): Promise<boolean> {
|
||||
}
|
||||
|
||||
async function cacheCredentials(credentials: Credentials) {
|
||||
const filePath = getCachedCredentialPath();
|
||||
const filePath = Storage.getOAuthCredsPath();
|
||||
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
||||
|
||||
const credString = JSON.stringify(credentials, null, 2);
|
||||
await fs.writeFile(filePath, credString, { mode: 0o600 });
|
||||
}
|
||||
|
||||
function getCachedCredentialPath(): string {
|
||||
return path.join(os.homedir(), GEMINI_DIR, CREDENTIAL_FILENAME);
|
||||
}
|
||||
|
||||
export function clearOauthClientCache() {
|
||||
oauthClientPromises.clear();
|
||||
}
|
||||
|
||||
export async function clearCachedCredentialFile() {
|
||||
try {
|
||||
await fs.rm(getCachedCredentialPath(), { force: true });
|
||||
await fs.rm(Storage.getOAuthCredsPath(), { force: true });
|
||||
// Clear the Google Account ID cache when credentials are cleared
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
// Clear the in-memory OAuth client cache to force re-authentication
|
||||
clearOauthClientCache();
|
||||
} catch (e) {
|
||||
@@ -433,9 +424,7 @@ async function fetchAndCacheUserInfo(client: OAuth2Client): Promise<void> {
|
||||
}
|
||||
|
||||
const userInfo = await response.json();
|
||||
if (userInfo.email) {
|
||||
await cacheGoogleAccount(userInfo.email);
|
||||
}
|
||||
await userAccountManager.cacheGoogleAccount(userInfo.email);
|
||||
} catch (error) {
|
||||
console.error('Error retrieving user info:', error);
|
||||
}
|
||||
|
||||
@@ -22,16 +22,11 @@ import { ShellTool } from '../tools/shell.js';
|
||||
import { WriteFileTool } from '../tools/write-file.js';
|
||||
import { WebFetchTool } from '../tools/web-fetch.js';
|
||||
import { ReadManyFilesTool } from '../tools/read-many-files.js';
|
||||
import {
|
||||
MemoryTool,
|
||||
setGeminiMdFilename,
|
||||
GEMINI_CONFIG_DIR as GEMINI_DIR,
|
||||
} from '../tools/memoryTool.js';
|
||||
import { MemoryTool, setGeminiMdFilename } from '../tools/memoryTool.js';
|
||||
import { WebSearchTool } from '../tools/web-search.js';
|
||||
import { GeminiClient } from '../core/client.js';
|
||||
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import { GitService } from '../services/gitService.js';
|
||||
import { getProjectTempDir } from '../utils/paths.js';
|
||||
import {
|
||||
initializeTelemetry,
|
||||
DEFAULT_TELEMETRY_TARGET,
|
||||
@@ -57,6 +52,7 @@ import { IdeConnectionEvent, IdeConnectionType } from '../telemetry/types.js';
|
||||
// Re-export OAuth config type
|
||||
export type { MCPOAuthConfig };
|
||||
import { WorkspaceContext } from '../utils/workspaceContext.js';
|
||||
import { Storage } from './storage.js';
|
||||
|
||||
export enum ApprovalMode {
|
||||
DEFAULT = 'default',
|
||||
@@ -272,6 +268,7 @@ export class Config {
|
||||
private readonly shouldUseNodePtyShell: boolean;
|
||||
private readonly skipNextSpeakerCheck: boolean;
|
||||
private initialized: boolean = false;
|
||||
readonly storage: Storage;
|
||||
|
||||
constructor(params: ConfigParameters) {
|
||||
this.sessionId = params.sessionId;
|
||||
@@ -340,6 +337,7 @@ export class Config {
|
||||
this.trustedFolder = params.trustedFolder;
|
||||
this.shouldUseNodePtyShell = params.shouldUseNodePtyShell ?? false;
|
||||
this.skipNextSpeakerCheck = params.skipNextSpeakerCheck ?? false;
|
||||
this.storage = new Storage(this.targetDir);
|
||||
|
||||
if (params.contextFileName) {
|
||||
setGeminiMdFilename(params.contextFileName);
|
||||
@@ -591,14 +589,6 @@ export class Config {
|
||||
return this.geminiClient;
|
||||
}
|
||||
|
||||
getGeminiDir(): string {
|
||||
return path.join(this.targetDir, GEMINI_DIR);
|
||||
}
|
||||
|
||||
getProjectTempDir(): string {
|
||||
return getProjectTempDir(this.getProjectRoot());
|
||||
}
|
||||
|
||||
getEnableRecursiveFileSearch(): boolean {
|
||||
return this.fileFiltering.enableRecursiveFileSearch;
|
||||
}
|
||||
@@ -744,7 +734,7 @@ export class Config {
|
||||
|
||||
async getGitService(): Promise<GitService> {
|
||||
if (!this.gitService) {
|
||||
this.gitService = new GitService(this.targetDir);
|
||||
this.gitService = new GitService(this.targetDir, this.storage);
|
||||
await this.gitService.initialize();
|
||||
}
|
||||
return this.gitService;
|
||||
|
||||
55
packages/core/src/config/storage.test.ts
Normal file
55
packages/core/src/config/storage.test.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import * as os from 'os';
|
||||
import * as path from 'node:path';
|
||||
|
||||
vi.mock('fs', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('fs')>();
|
||||
return {
|
||||
...actual,
|
||||
mkdirSync: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
import { Storage } from './storage.js';
|
||||
|
||||
describe('Storage – getGlobalSettingsPath', () => {
|
||||
it('returns path to ~/.gemini/settings.json', () => {
|
||||
const expected = path.join(os.homedir(), '.gemini', 'settings.json');
|
||||
expect(Storage.getGlobalSettingsPath()).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Storage – additional helpers', () => {
|
||||
const projectRoot = '/tmp/project';
|
||||
const storage = new Storage(projectRoot);
|
||||
|
||||
it('getWorkspaceSettingsPath returns project/.gemini/settings.json', () => {
|
||||
const expected = path.join(projectRoot, '.gemini', 'settings.json');
|
||||
expect(storage.getWorkspaceSettingsPath()).toBe(expected);
|
||||
});
|
||||
|
||||
it('getUserCommandsDir returns ~/.gemini/commands', () => {
|
||||
const expected = path.join(os.homedir(), '.gemini', 'commands');
|
||||
expect(Storage.getUserCommandsDir()).toBe(expected);
|
||||
});
|
||||
|
||||
it('getProjectCommandsDir returns project/.gemini/commands', () => {
|
||||
const expected = path.join(projectRoot, '.gemini', 'commands');
|
||||
expect(storage.getProjectCommandsDir()).toBe(expected);
|
||||
});
|
||||
|
||||
it('getMcpOAuthTokensPath returns ~/.gemini/mcp-oauth-tokens.json', () => {
|
||||
const expected = path.join(
|
||||
os.homedir(),
|
||||
'.gemini',
|
||||
'mcp-oauth-tokens.json',
|
||||
);
|
||||
expect(Storage.getMcpOAuthTokensPath()).toBe(expected);
|
||||
});
|
||||
});
|
||||
114
packages/core/src/config/storage.ts
Normal file
114
packages/core/src/config/storage.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'os';
|
||||
import * as crypto from 'crypto';
|
||||
import * as fs from 'fs';
|
||||
|
||||
export const GEMINI_DIR = '.gemini';
|
||||
export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json';
|
||||
const TMP_DIR_NAME = 'tmp';
|
||||
|
||||
export class Storage {
|
||||
private readonly targetDir: string;
|
||||
|
||||
constructor(targetDir: string) {
|
||||
this.targetDir = targetDir;
|
||||
}
|
||||
|
||||
static getGlobalGeminiDir(): string {
|
||||
const homeDir = os.homedir();
|
||||
if (!homeDir) {
|
||||
return path.join(os.tmpdir(), '.gemini');
|
||||
}
|
||||
return path.join(homeDir, GEMINI_DIR);
|
||||
}
|
||||
|
||||
static getMcpOAuthTokensPath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'mcp-oauth-tokens.json');
|
||||
}
|
||||
|
||||
static getGlobalSettingsPath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'settings.json');
|
||||
}
|
||||
|
||||
static getInstallationIdPath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'installation_id');
|
||||
}
|
||||
|
||||
static getGoogleAccountsPath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), GOOGLE_ACCOUNTS_FILENAME);
|
||||
}
|
||||
|
||||
static getUserCommandsDir(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'commands');
|
||||
}
|
||||
|
||||
static getGlobalMemoryFilePath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'memory.md');
|
||||
}
|
||||
|
||||
static getGlobalTempDir(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), TMP_DIR_NAME);
|
||||
}
|
||||
|
||||
getGeminiDir(): string {
|
||||
return path.join(this.targetDir, GEMINI_DIR);
|
||||
}
|
||||
|
||||
getProjectTempDir(): string {
|
||||
const hash = this.getFilePathHash(this.getProjectRoot());
|
||||
const tempDir = Storage.getGlobalTempDir();
|
||||
return path.join(tempDir, hash);
|
||||
}
|
||||
|
||||
ensureProjectTempDirExists(): void {
|
||||
fs.mkdirSync(this.getProjectTempDir(), { recursive: true });
|
||||
}
|
||||
|
||||
static getOAuthCredsPath(): string {
|
||||
return path.join(Storage.getGlobalGeminiDir(), 'oauth_creds.json');
|
||||
}
|
||||
|
||||
getProjectRoot(): string {
|
||||
return this.targetDir;
|
||||
}
|
||||
|
||||
private getFilePathHash(filePath: string): string {
|
||||
return crypto.createHash('sha256').update(filePath).digest('hex');
|
||||
}
|
||||
|
||||
getHistoryDir(): string {
|
||||
const hash = this.getFilePathHash(this.getProjectRoot());
|
||||
const historyDir = path.join(Storage.getGlobalGeminiDir(), 'history');
|
||||
return path.join(historyDir, hash);
|
||||
}
|
||||
|
||||
getWorkspaceSettingsPath(): string {
|
||||
return path.join(this.getGeminiDir(), 'settings.json');
|
||||
}
|
||||
|
||||
getProjectCommandsDir(): string {
|
||||
return path.join(this.getGeminiDir(), 'commands');
|
||||
}
|
||||
|
||||
getProjectTempCheckpointsDir(): string {
|
||||
return path.join(this.getProjectTempDir(), 'checkpoints');
|
||||
}
|
||||
|
||||
getExtensionsDir(): string {
|
||||
return path.join(this.getGeminiDir(), 'extensions');
|
||||
}
|
||||
|
||||
getExtensionsConfigPath(): string {
|
||||
return path.join(this.getExtensionsDir(), 'gemini-extension.json');
|
||||
}
|
||||
|
||||
getHistoryFilePath(): string {
|
||||
return path.join(this.getProjectTempDir(), 'shell_history');
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,7 @@ import { Config } from '../config/config.js';
|
||||
|
||||
import { UserTierId } from '../code_assist/types.js';
|
||||
import { LoggingContentGenerator } from './loggingContentGenerator.js';
|
||||
import { getInstallationId } from '../utils/user_id.js';
|
||||
import { InstallationManager } from '../utils/installationManager.js';
|
||||
|
||||
/**
|
||||
* Interface abstracting the core functionalities for generating content and counting tokens.
|
||||
@@ -136,7 +136,8 @@ export async function createContentGenerator(
|
||||
) {
|
||||
let headers: Record<string, string> = { ...baseHeaders };
|
||||
if (gcConfig?.getUsageStatisticsEnabled()) {
|
||||
const installationId = getInstallationId();
|
||||
const installationManager = new InstallationManager();
|
||||
const installationId = installationManager.getInstallationId();
|
||||
headers = {
|
||||
...headers,
|
||||
'x-gemini-api-privileged-user-id': `${installationId}`,
|
||||
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
encodeTagName,
|
||||
decodeTagName,
|
||||
} from './logger.js';
|
||||
import { Storage } from '../config/storage.js';
|
||||
import { promises as fs, existsSync } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { Content } from '@google/genai';
|
||||
@@ -83,7 +84,7 @@ describe('Logger', () => {
|
||||
await cleanupLogAndCheckpointFiles();
|
||||
// Ensure the directory exists for the test
|
||||
await fs.mkdir(TEST_GEMINI_DIR, { recursive: true });
|
||||
logger = new Logger(testSessionId);
|
||||
logger = new Logger(testSessionId, new Storage(process.cwd()));
|
||||
await logger.initialize();
|
||||
});
|
||||
|
||||
@@ -150,7 +151,10 @@ describe('Logger', () => {
|
||||
TEST_LOG_FILE_PATH,
|
||||
JSON.stringify(existingLogs, null, 2),
|
||||
);
|
||||
const newLogger = new Logger(currentSessionId);
|
||||
const newLogger = new Logger(
|
||||
currentSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
await newLogger.initialize();
|
||||
expect(newLogger['messageId']).toBe(2);
|
||||
expect(newLogger['logs']).toEqual(existingLogs);
|
||||
@@ -171,7 +175,7 @@ describe('Logger', () => {
|
||||
TEST_LOG_FILE_PATH,
|
||||
JSON.stringify(existingLogs, null, 2),
|
||||
);
|
||||
const newLogger = new Logger('a-new-session');
|
||||
const newLogger = new Logger('a-new-session', new Storage(process.cwd()));
|
||||
await newLogger.initialize();
|
||||
expect(newLogger['messageId']).toBe(0);
|
||||
newLogger.close();
|
||||
@@ -196,7 +200,7 @@ describe('Logger', () => {
|
||||
.spyOn(console, 'debug')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const newLogger = new Logger(testSessionId);
|
||||
const newLogger = new Logger(testSessionId, new Storage(process.cwd()));
|
||||
await newLogger.initialize();
|
||||
|
||||
expect(consoleDebugSpy).toHaveBeenCalledWith(
|
||||
@@ -224,7 +228,7 @@ describe('Logger', () => {
|
||||
.spyOn(console, 'debug')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const newLogger = new Logger(testSessionId);
|
||||
const newLogger = new Logger(testSessionId, new Storage(process.cwd()));
|
||||
await newLogger.initialize();
|
||||
|
||||
expect(consoleDebugSpy).toHaveBeenCalledWith(
|
||||
@@ -274,7 +278,10 @@ describe('Logger', () => {
|
||||
});
|
||||
|
||||
it('should handle logger not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close(); // Ensure it's treated as uninitialized
|
||||
const consoleDebugSpy = vi
|
||||
.spyOn(console, 'debug')
|
||||
@@ -289,10 +296,16 @@ describe('Logger', () => {
|
||||
|
||||
it('should simulate concurrent writes from different logger instances to the same file', async () => {
|
||||
const concurrentSessionId = 'concurrent-session';
|
||||
const logger1 = new Logger(concurrentSessionId);
|
||||
const logger1 = new Logger(
|
||||
concurrentSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
await logger1.initialize();
|
||||
|
||||
const logger2 = new Logger(concurrentSessionId);
|
||||
const logger2 = new Logger(
|
||||
concurrentSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
await logger2.initialize();
|
||||
expect(logger2['sessionId']).toEqual(logger1['sessionId']);
|
||||
|
||||
@@ -345,14 +358,14 @@ describe('Logger', () => {
|
||||
|
||||
describe('getPreviousUserMessages', () => {
|
||||
it('should retrieve all user messages from logs, sorted newest first', async () => {
|
||||
const loggerSort = new Logger('session-1');
|
||||
const loggerSort = new Logger('session-1', new Storage(process.cwd()));
|
||||
await loggerSort.initialize();
|
||||
await loggerSort.logMessage(MessageSenderType.USER, 'S1M0_ts100000');
|
||||
vi.advanceTimersByTime(1000);
|
||||
await loggerSort.logMessage(MessageSenderType.USER, 'S1M1_ts101000');
|
||||
vi.advanceTimersByTime(1000);
|
||||
// Switch to a different session to log
|
||||
const loggerSort2 = new Logger('session-2');
|
||||
const loggerSort2 = new Logger('session-2', new Storage(process.cwd()));
|
||||
await loggerSort2.initialize();
|
||||
await loggerSort2.logMessage(MessageSenderType.USER, 'S2M0_ts102000');
|
||||
vi.advanceTimersByTime(1000);
|
||||
@@ -365,7 +378,10 @@ describe('Logger', () => {
|
||||
loggerSort.close();
|
||||
loggerSort2.close();
|
||||
|
||||
const finalLogger = new Logger('final-session');
|
||||
const finalLogger = new Logger(
|
||||
'final-session',
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
await finalLogger.initialize();
|
||||
|
||||
const messages = await finalLogger.getPreviousUserMessages();
|
||||
@@ -385,7 +401,10 @@ describe('Logger', () => {
|
||||
});
|
||||
|
||||
it('should return empty array if logger not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close();
|
||||
const messages = await uninitializedLogger.getPreviousUserMessages();
|
||||
expect(messages).toEqual([]);
|
||||
@@ -428,7 +447,10 @@ describe('Logger', () => {
|
||||
});
|
||||
|
||||
it('should not throw if logger is not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close();
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
@@ -525,7 +547,10 @@ describe('Logger', () => {
|
||||
});
|
||||
|
||||
it('should return an empty array if logger is not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close();
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
@@ -613,7 +638,10 @@ describe('Logger', () => {
|
||||
});
|
||||
|
||||
it('should return false if logger is not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close();
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
@@ -651,7 +679,10 @@ describe('Logger', () => {
|
||||
});
|
||||
|
||||
it('should throw an error if logger is not initialized', async () => {
|
||||
const uninitializedLogger = new Logger(testSessionId);
|
||||
const uninitializedLogger = new Logger(
|
||||
testSessionId,
|
||||
new Storage(process.cwd()),
|
||||
);
|
||||
uninitializedLogger.close();
|
||||
|
||||
await expect(uninitializedLogger.checkpointExists(tag)).rejects.toThrow(
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import path from 'node:path';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import { Content } from '@google/genai';
|
||||
import { getProjectTempDir } from '../utils/paths.js';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
const LOG_FILE_NAME = 'logs.json';
|
||||
|
||||
@@ -67,7 +67,10 @@ export class Logger {
|
||||
private initialized = false;
|
||||
private logs: LogEntry[] = []; // In-memory cache, ideally reflects the last known state of the file
|
||||
|
||||
constructor(sessionId: string) {
|
||||
constructor(
|
||||
sessionId: string,
|
||||
private readonly storage: Storage,
|
||||
) {
|
||||
this.sessionId = sessionId;
|
||||
}
|
||||
|
||||
@@ -130,7 +133,7 @@ export class Logger {
|
||||
return;
|
||||
}
|
||||
|
||||
this.geminiDir = getProjectTempDir(process.cwd());
|
||||
this.geminiDir = this.storage.getProjectTempDir();
|
||||
this.logFilePath = path.join(this.geminiDir, LOG_FILE_NAME);
|
||||
|
||||
try {
|
||||
|
||||
@@ -101,3 +101,4 @@ export { OAuthUtils } from './mcp/oauth-utils.js';
|
||||
export * from './telemetry/index.js';
|
||||
export { sessionId } from './utils/session.js';
|
||||
export * from './utils/browser.js';
|
||||
export { Storage } from './config/storage.js';
|
||||
|
||||
@@ -21,6 +21,7 @@ vi.mock('node:fs', () => ({
|
||||
mkdir: vi.fn(),
|
||||
unlink: vi.fn(),
|
||||
},
|
||||
mkdirSync: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('node:os', () => ({
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { Storage } from '../config/storage.js';
|
||||
import { getErrorMessage } from '../utils/errors.js';
|
||||
|
||||
/**
|
||||
@@ -36,17 +36,13 @@ export interface MCPOAuthCredentials {
|
||||
* Class for managing MCP OAuth token storage and retrieval.
|
||||
*/
|
||||
export class MCPOAuthTokenStorage {
|
||||
private static readonly TOKEN_FILE = 'mcp-oauth-tokens.json';
|
||||
private static readonly CONFIG_DIR = '.gemini';
|
||||
|
||||
/**
|
||||
* Get the path to the token storage file.
|
||||
*
|
||||
* @returns The full path to the token storage file
|
||||
*/
|
||||
private static getTokenFilePath(): string {
|
||||
const homeDir = os.homedir();
|
||||
return path.join(homeDir, this.CONFIG_DIR, this.TOKEN_FILE);
|
||||
return Storage.getMcpOAuthTokensPath();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -40,9 +40,11 @@ describe('ChatRecordingService', () => {
|
||||
mockConfig = {
|
||||
getSessionId: vi.fn().mockReturnValue('test-session-id'),
|
||||
getProjectRoot: vi.fn().mockReturnValue('/test/project/root'),
|
||||
getProjectTempDir: vi
|
||||
.fn()
|
||||
.mockReturnValue('/test/project/root/.gemini/tmp'),
|
||||
storage: {
|
||||
getProjectTempDir: vi
|
||||
.fn()
|
||||
.mockReturnValue('/test/project/root/.gemini/tmp'),
|
||||
},
|
||||
getModel: vi.fn().mockReturnValue('gemini-pro'),
|
||||
getDebugMode: vi.fn().mockReturnValue(false),
|
||||
} as unknown as Config;
|
||||
|
||||
@@ -136,7 +136,10 @@ export class ChatRecordingService {
|
||||
this.cachedLastConvData = null;
|
||||
} else {
|
||||
// Create new session
|
||||
const chatsDir = path.join(this.config.getProjectTempDir(), 'chats');
|
||||
const chatsDir = path.join(
|
||||
this.config.storage.getProjectTempDir(),
|
||||
'chats',
|
||||
);
|
||||
fs.mkdirSync(chatsDir, { recursive: true });
|
||||
|
||||
const timestamp = new Date()
|
||||
@@ -422,7 +425,10 @@ export class ChatRecordingService {
|
||||
*/
|
||||
deleteSession(sessionId: string): void {
|
||||
try {
|
||||
const chatsDir = path.join(this.config.getProjectTempDir(), 'chats');
|
||||
const chatsDir = path.join(
|
||||
this.config.storage.getProjectTempDir(),
|
||||
'chats',
|
||||
);
|
||||
const sessionPath = path.join(chatsDir, `${sessionId}.json`);
|
||||
fs.unlinkSync(sessionPath);
|
||||
} catch (error) {
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { GitService } from './gitService.js';
|
||||
import { Storage } from '../config/storage.js';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as os from 'os';
|
||||
@@ -55,6 +56,7 @@ describe('GitService', () => {
|
||||
let projectRoot: string;
|
||||
let homedir: string;
|
||||
let hash: string;
|
||||
let storage: Storage;
|
||||
|
||||
beforeEach(async () => {
|
||||
testRootDir = await fs.mkdtemp(path.join(os.tmpdir(), 'git-service-test-'));
|
||||
@@ -100,6 +102,7 @@ describe('GitService', () => {
|
||||
hoistedMockCommit.mockResolvedValue({
|
||||
commit: 'initial',
|
||||
});
|
||||
storage = new Storage(projectRoot);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
@@ -109,13 +112,13 @@ describe('GitService', () => {
|
||||
|
||||
describe('constructor', () => {
|
||||
it('should successfully create an instance', () => {
|
||||
expect(() => new GitService(projectRoot)).not.toThrow();
|
||||
expect(() => new GitService(projectRoot, storage)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('verifyGitAvailability', () => {
|
||||
it('should resolve true if git --version command succeeds', async () => {
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await expect(service.verifyGitAvailability()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
@@ -124,7 +127,7 @@ describe('GitService', () => {
|
||||
callback(new Error('git not found'));
|
||||
return {} as ChildProcess;
|
||||
});
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await expect(service.verifyGitAvailability()).resolves.toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -135,14 +138,14 @@ describe('GitService', () => {
|
||||
callback(new Error('git not found'));
|
||||
return {} as ChildProcess;
|
||||
});
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await expect(service.initialize()).rejects.toThrow(
|
||||
'Checkpointing is enabled, but Git is not installed. Please install Git or disable checkpointing to continue.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should call setupShadowGitRepository if Git is available', async () => {
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
const setupSpy = vi
|
||||
.spyOn(service, 'setupShadowGitRepository')
|
||||
.mockResolvedValue(undefined);
|
||||
@@ -162,14 +165,14 @@ describe('GitService', () => {
|
||||
});
|
||||
|
||||
it('should create history and repository directories', async () => {
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
const stats = await fs.stat(repoDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it('should create a .gitconfig file with the correct content', async () => {
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
|
||||
const expectedConfigContent =
|
||||
@@ -180,7 +183,7 @@ describe('GitService', () => {
|
||||
|
||||
it('should initialize git repo in historyDir if not already initialized', async () => {
|
||||
hoistedMockCheckIsRepo.mockResolvedValue(false);
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
expect(hoistedMockSimpleGit).toHaveBeenCalledWith(repoDir);
|
||||
expect(hoistedMockInit).toHaveBeenCalled();
|
||||
@@ -188,7 +191,7 @@ describe('GitService', () => {
|
||||
|
||||
it('should not initialize git repo if already initialized', async () => {
|
||||
hoistedMockCheckIsRepo.mockResolvedValue(true);
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
expect(hoistedMockInit).not.toHaveBeenCalled();
|
||||
});
|
||||
@@ -198,7 +201,7 @@ describe('GitService', () => {
|
||||
const visibleGitIgnorePath = path.join(projectRoot, '.gitignore');
|
||||
await fs.writeFile(visibleGitIgnorePath, gitignoreContent);
|
||||
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
|
||||
const hiddenGitIgnorePath = path.join(repoDir, '.gitignore');
|
||||
@@ -207,7 +210,7 @@ describe('GitService', () => {
|
||||
});
|
||||
|
||||
it('should not create a .gitignore in shadow repo if project .gitignore does not exist', async () => {
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
|
||||
const hiddenGitIgnorePath = path.join(repoDir, '.gitignore');
|
||||
@@ -221,7 +224,7 @@ describe('GitService', () => {
|
||||
// Create a directory instead of a file to cause a read error
|
||||
await fs.mkdir(visibleGitIgnorePath);
|
||||
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
// EISDIR is the expected error code on Unix-like systems
|
||||
await expect(service.setupShadowGitRepository()).rejects.toThrow(
|
||||
/EISDIR: illegal operation on a directory, read|EBUSY: resource busy or locked, read/,
|
||||
@@ -230,7 +233,7 @@ describe('GitService', () => {
|
||||
|
||||
it('should make an initial commit if no commits exist in history repo', async () => {
|
||||
hoistedMockCheckIsRepo.mockResolvedValue(false);
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
expect(hoistedMockCommit).toHaveBeenCalledWith('Initial commit', {
|
||||
'--allow-empty': null,
|
||||
@@ -239,7 +242,7 @@ describe('GitService', () => {
|
||||
|
||||
it('should not make an initial commit if commits already exist', async () => {
|
||||
hoistedMockCheckIsRepo.mockResolvedValue(true);
|
||||
const service = new GitService(projectRoot);
|
||||
const service = new GitService(projectRoot, storage);
|
||||
await service.setupShadowGitRepository();
|
||||
expect(hoistedMockCommit).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -6,22 +6,22 @@
|
||||
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import { isNodeError } from '../utils/errors.js';
|
||||
import { exec } from 'node:child_process';
|
||||
import { simpleGit, SimpleGit, CheckRepoActions } from 'simple-git';
|
||||
import { getProjectHash, GEMINI_DIR } from '../utils/paths.js';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
export class GitService {
|
||||
private projectRoot: string;
|
||||
private storage: Storage;
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
constructor(projectRoot: string, storage: Storage) {
|
||||
this.projectRoot = path.resolve(projectRoot);
|
||||
this.storage = storage;
|
||||
}
|
||||
|
||||
private getHistoryDir(): string {
|
||||
const hash = getProjectHash(this.projectRoot);
|
||||
return path.join(os.homedir(), GEMINI_DIR, 'history', hash);
|
||||
return this.storage.getHistoryDir();
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
|
||||
@@ -22,13 +22,13 @@ import {
|
||||
TEST_ONLY,
|
||||
} from './clearcut-logger.js';
|
||||
import { ConfigParameters } from '../../config/config.js';
|
||||
import * as userAccount from '../../utils/user_account.js';
|
||||
import * as userId from '../../utils/user_id.js';
|
||||
import { EventMetadataKey } from './event-metadata-key.js';
|
||||
import { makeFakeConfig } from '../../test-utils/config.js';
|
||||
import { http, HttpResponse } from 'msw';
|
||||
import { server } from '../../mocks/msw.js';
|
||||
import { makeChatCompressionEvent } from '../types.js';
|
||||
import { UserAccountManager } from '../../utils/userAccountManager.js';
|
||||
import { InstallationManager } from '../../utils/installationManager.js';
|
||||
|
||||
interface CustomMatchers<R = unknown> {
|
||||
toHaveMetadataValue: ([key, value]: [EventMetadataKey, string]) => R;
|
||||
@@ -71,11 +71,11 @@ expect.extend({
|
||||
},
|
||||
});
|
||||
|
||||
vi.mock('../../utils/user_account');
|
||||
vi.mock('../../utils/user_id');
|
||||
vi.mock('../../utils/userAccountManager.js');
|
||||
vi.mock('../../utils/installationManager.js');
|
||||
|
||||
const mockUserAccount = vi.mocked(userAccount);
|
||||
const mockUserId = vi.mocked(userId);
|
||||
const mockUserAccount = vi.mocked(UserAccountManager.prototype);
|
||||
const mockInstallMgr = vi.mocked(InstallationManager.prototype);
|
||||
|
||||
// TODO(richieforeman): Consider moving this to test setup globally.
|
||||
beforeAll(() => {
|
||||
@@ -113,7 +113,6 @@ describe('ClearcutLogger', () => {
|
||||
config = {} as Partial<ConfigParameters>,
|
||||
lifetimeGoogleAccounts = 1,
|
||||
cachedGoogleAccount = 'test@google.com',
|
||||
installationId = 'test-installation-id',
|
||||
} = {}) {
|
||||
server.resetHandlers(
|
||||
http.post(CLEARCUT_URL, () => HttpResponse.text(EXAMPLE_RESPONSE)),
|
||||
@@ -131,7 +130,9 @@ describe('ClearcutLogger', () => {
|
||||
mockUserAccount.getLifetimeGoogleAccounts.mockReturnValue(
|
||||
lifetimeGoogleAccounts,
|
||||
);
|
||||
mockUserId.getInstallationId.mockReturnValue(installationId);
|
||||
mockInstallMgr.getInstallationId = vi
|
||||
.fn()
|
||||
.mockReturnValue('test-installation-id');
|
||||
|
||||
const logger = ClearcutLogger.getInstance(loggerConfig);
|
||||
|
||||
|
||||
@@ -22,12 +22,9 @@ import {
|
||||
} from '../types.js';
|
||||
import { EventMetadataKey } from './event-metadata-key.js';
|
||||
import { Config } from '../../config/config.js';
|
||||
import { InstallationManager } from '../../utils/installationManager.js';
|
||||
import { UserAccountManager } from '../../utils/userAccountManager.js';
|
||||
import { safeJsonStringify } from '../../utils/safeJsonStringify.js';
|
||||
import {
|
||||
getCachedGoogleAccount,
|
||||
getLifetimeGoogleAccounts,
|
||||
} from '../../utils/user_account.js';
|
||||
import { getInstallationId } from '../../utils/user_id.js';
|
||||
import { FixedDeque } from 'mnemonist';
|
||||
import { GIT_COMMIT_INFO, CLI_VERSION } from '../../generated/git-commit.js';
|
||||
import { DetectedIde, detectIde } from '../../ide/detect-ide.js';
|
||||
@@ -129,6 +126,8 @@ export class ClearcutLogger {
|
||||
private config?: Config;
|
||||
private sessionData: EventValue[] = [];
|
||||
private promptId: string = '';
|
||||
private readonly installationManager: InstallationManager;
|
||||
private readonly userAccountManager: UserAccountManager;
|
||||
|
||||
/**
|
||||
* Queue of pending events that need to be flushed to the server. New events
|
||||
@@ -152,10 +151,12 @@ export class ClearcutLogger {
|
||||
*/
|
||||
private pendingFlush: boolean = false;
|
||||
|
||||
private constructor(config?: Config) {
|
||||
private constructor(config: Config) {
|
||||
this.config = config;
|
||||
this.events = new FixedDeque<LogEventEntry[]>(Array, MAX_EVENTS);
|
||||
this.promptId = config?.getSessionId() ?? '';
|
||||
this.installationManager = new InstallationManager();
|
||||
this.userAccountManager = new UserAccountManager();
|
||||
}
|
||||
|
||||
static getInstance(config?: Config): ClearcutLogger | undefined {
|
||||
@@ -202,12 +203,14 @@ export class ClearcutLogger {
|
||||
}
|
||||
|
||||
createLogEvent(eventName: EventNames, data: EventValue[] = []): LogEvent {
|
||||
const email = getCachedGoogleAccount();
|
||||
const email = this.userAccountManager.getCachedGoogleAccount();
|
||||
|
||||
if (eventName !== EventNames.START_SESSION) {
|
||||
data.push(...this.sessionData);
|
||||
}
|
||||
data = this.addDefaultFields(data);
|
||||
const totalAccounts = this.userAccountManager.getLifetimeGoogleAccounts();
|
||||
|
||||
data = this.addDefaultFields(data, totalAccounts);
|
||||
|
||||
const logEvent: LogEvent = {
|
||||
console_type: 'GEMINI_CLI',
|
||||
@@ -220,7 +223,7 @@ export class ClearcutLogger {
|
||||
if (email) {
|
||||
logEvent.client_email = email;
|
||||
} else {
|
||||
logEvent.client_install_id = getInstallationId();
|
||||
logEvent.client_install_id = this.installationManager.getInstallationId();
|
||||
}
|
||||
|
||||
return logEvent;
|
||||
@@ -679,8 +682,7 @@ export class ClearcutLogger {
|
||||
* Adds default fields to data, and returns a new data array. This fields
|
||||
* should exist on all log events.
|
||||
*/
|
||||
addDefaultFields(data: EventValue[]): EventValue[] {
|
||||
const totalAccounts = getLifetimeGoogleAccounts();
|
||||
addDefaultFields(data: EventValue[], totalAccounts: number): EventValue[] {
|
||||
const surface = determineSurface();
|
||||
|
||||
const defaultLogMetadata: EventValue[] = [
|
||||
|
||||
@@ -17,6 +17,7 @@ vi.mock('fs', () => ({
|
||||
},
|
||||
statSync: vi.fn(),
|
||||
readdirSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
}));
|
||||
import { LSTool } from './ls.js';
|
||||
import { Config } from '../config/config.js';
|
||||
|
||||
@@ -18,7 +18,19 @@ import * as os from 'os';
|
||||
import { ToolConfirmationOutcome } from './tools.js';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('fs/promises');
|
||||
vi.mock(import('fs/promises'), async (importOriginal) => {
|
||||
const actual = await importOriginal();
|
||||
return {
|
||||
...actual,
|
||||
mkdir: vi.fn(),
|
||||
readFile: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('fs', () => ({
|
||||
mkdirSync: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('os');
|
||||
|
||||
const MEMORY_SECTION_HEADER = '## Gemini Added Memories';
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
import { FunctionDeclaration } from '@google/genai';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import { homedir } from 'os';
|
||||
import { Storage } from '../config/storage.js';
|
||||
import * as Diff from 'diff';
|
||||
import { DEFAULT_DIFF_OPTIONS } from './diffOptions.js';
|
||||
import { tildeifyPath } from '../utils/paths.js';
|
||||
@@ -96,7 +96,7 @@ interface SaveMemoryParams {
|
||||
}
|
||||
|
||||
function getGlobalMemoryFilePath(): string {
|
||||
return path.join(homedir(), GEMINI_CONFIG_DIR, getCurrentGeminiMdFilename());
|
||||
return path.join(Storage.getGlobalGeminiDir(), getCurrentGeminiMdFilename());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -27,6 +27,7 @@ let mockSendMessageStream: any;
|
||||
|
||||
vi.mock('fs', () => ({
|
||||
statSync: vi.fn(),
|
||||
mkdirSync: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../core/client.js', () => ({
|
||||
|
||||
102
packages/core/src/utils/installationManager.test.ts
Normal file
102
packages/core/src/utils/installationManager.test.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach, Mock } from 'vitest';
|
||||
import { InstallationManager } from './installationManager.js';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
vi.mock('node:fs', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('node:fs')>();
|
||||
return {
|
||||
...actual,
|
||||
readFileSync: vi.fn(actual.readFileSync),
|
||||
existsSync: vi.fn(actual.existsSync),
|
||||
} as typeof actual;
|
||||
});
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const os = await importOriginal<typeof import('os')>();
|
||||
return {
|
||||
...os,
|
||||
homedir: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('crypto', async (importOriginal) => {
|
||||
const crypto = await importOriginal<typeof import('crypto')>();
|
||||
return {
|
||||
...crypto,
|
||||
randomUUID: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('InstallationManager', () => {
|
||||
let tempHomeDir: string;
|
||||
let installationManager: InstallationManager;
|
||||
const installationIdFile = () =>
|
||||
path.join(tempHomeDir, '.gemini', 'installation_id');
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
|
||||
);
|
||||
(os.homedir as Mock).mockReturnValue(tempHomeDir);
|
||||
installationManager = new InstallationManager();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('getInstallationId', () => {
|
||||
it('should create and write a new installation ID if one does not exist', () => {
|
||||
const newId = 'new-uuid-123';
|
||||
(randomUUID as Mock).mockReturnValue(newId);
|
||||
|
||||
const installationId = installationManager.getInstallationId();
|
||||
|
||||
expect(installationId).toBe(newId);
|
||||
expect(fs.existsSync(installationIdFile())).toBe(true);
|
||||
expect(fs.readFileSync(installationIdFile(), 'utf-8')).toBe(newId);
|
||||
});
|
||||
|
||||
it('should read an existing installation ID from a file', () => {
|
||||
const existingId = 'existing-uuid-123';
|
||||
fs.mkdirSync(path.dirname(installationIdFile()), { recursive: true });
|
||||
fs.writeFileSync(installationIdFile(), existingId);
|
||||
|
||||
const installationId = installationManager.getInstallationId();
|
||||
|
||||
expect(installationId).toBe(existingId);
|
||||
});
|
||||
|
||||
it('should return the same ID on subsequent calls', () => {
|
||||
const firstId = installationManager.getInstallationId();
|
||||
const secondId = installationManager.getInstallationId();
|
||||
expect(secondId).toBe(firstId);
|
||||
});
|
||||
|
||||
it('should handle read errors and return a fallback ID', () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValueOnce(true);
|
||||
const readSpy = vi.mocked(fs.readFileSync);
|
||||
readSpy.mockImplementationOnce(() => {
|
||||
throw new Error('Read error');
|
||||
});
|
||||
const consoleErrorSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const id = installationManager.getInstallationId();
|
||||
|
||||
expect(id).toBe('123456789');
|
||||
expect(consoleErrorSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
58
packages/core/src/utils/installationManager.ts
Normal file
58
packages/core/src/utils/installationManager.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import { randomUUID } from 'crypto';
|
||||
import * as path from 'node:path';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
export class InstallationManager {
|
||||
private getInstallationIdPath(): string {
|
||||
return Storage.getInstallationIdPath();
|
||||
}
|
||||
|
||||
private readInstallationIdFromFile(): string | null {
|
||||
const installationIdFile = this.getInstallationIdPath();
|
||||
if (fs.existsSync(installationIdFile)) {
|
||||
const installationid = fs
|
||||
.readFileSync(installationIdFile, 'utf-8')
|
||||
.trim();
|
||||
return installationid || null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private writeInstallationIdToFile(installationId: string) {
|
||||
const installationIdFile = this.getInstallationIdPath();
|
||||
const dir = path.dirname(installationIdFile);
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
fs.writeFileSync(installationIdFile, installationId, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the installation ID from a file, creating it if it doesn't exist.
|
||||
* This ID is used for unique user installation tracking.
|
||||
* @returns A UUID string for the user.
|
||||
*/
|
||||
getInstallationId(): string {
|
||||
try {
|
||||
let installationId = this.readInstallationIdFromFile();
|
||||
|
||||
if (!installationId) {
|
||||
installationId = randomUUID();
|
||||
this.writeInstallationIdToFile(installationId);
|
||||
}
|
||||
|
||||
return installationId;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Error accessing installation ID file, generating ephemeral ID:',
|
||||
error,
|
||||
);
|
||||
return '123456789';
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,8 +10,6 @@ import * as crypto from 'crypto';
|
||||
|
||||
export const GEMINI_DIR = '.gemini';
|
||||
export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json';
|
||||
const TMP_DIR_NAME = 'tmp';
|
||||
const COMMANDS_DIR_NAME = 'commands';
|
||||
|
||||
/**
|
||||
* Special characters that need to be escaped in file paths for shell compatibility.
|
||||
@@ -174,33 +172,6 @@ export function getProjectHash(projectRoot: string): string {
|
||||
return crypto.createHash('sha256').update(projectRoot).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a unique temporary directory path for a project.
|
||||
* @param projectRoot The absolute path to the project's root directory.
|
||||
* @returns The path to the project's temporary directory.
|
||||
*/
|
||||
export function getProjectTempDir(projectRoot: string): string {
|
||||
const hash = getProjectHash(projectRoot);
|
||||
return path.join(os.homedir(), GEMINI_DIR, TMP_DIR_NAME, hash);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the absolute path to the user-level commands directory.
|
||||
* @returns The path to the user's commands directory.
|
||||
*/
|
||||
export function getUserCommandsDir(): string {
|
||||
return path.join(os.homedir(), GEMINI_DIR, COMMANDS_DIR_NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the absolute path to the project-level commands directory.
|
||||
* @param projectRoot The absolute path to the project's root directory.
|
||||
* @returns The path to the project's commands directory.
|
||||
*/
|
||||
export function getProjectCommandsDir(projectRoot: string): string {
|
||||
return path.join(projectRoot, GEMINI_DIR, COMMANDS_DIR_NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a path is a subpath of another path.
|
||||
* @param parentPath The parent path.
|
||||
|
||||
@@ -5,12 +5,7 @@
|
||||
*/
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach, Mock } from 'vitest';
|
||||
import {
|
||||
cacheGoogleAccount,
|
||||
getCachedGoogleAccount,
|
||||
clearCachedGoogleAccount,
|
||||
getLifetimeGoogleAccounts,
|
||||
} from './user_account.js';
|
||||
import { UserAccountManager } from './userAccountManager.js';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import path from 'node:path';
|
||||
@@ -23,16 +18,21 @@ vi.mock('os', async (importOriginal) => {
|
||||
};
|
||||
});
|
||||
|
||||
describe('user_account', () => {
|
||||
describe('UserAccountManager', () => {
|
||||
let tempHomeDir: string;
|
||||
const accountsFile = () =>
|
||||
path.join(tempHomeDir, '.gemini', 'google_accounts.json');
|
||||
let userAccountManager: UserAccountManager;
|
||||
let accountsFile: () => string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
|
||||
);
|
||||
(os.homedir as Mock).mockReturnValue(tempHomeDir);
|
||||
accountsFile = () =>
|
||||
path.join(tempHomeDir, '.gemini', 'google_accounts.json');
|
||||
userAccountManager = new UserAccountManager();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.clearAllMocks();
|
||||
@@ -40,7 +40,7 @@ describe('user_account', () => {
|
||||
|
||||
describe('cacheGoogleAccount', () => {
|
||||
it('should create directory and write initial account file', async () => {
|
||||
await cacheGoogleAccount('test1@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test1@google.com');
|
||||
|
||||
// Verify Google Account ID was cached
|
||||
expect(fs.existsSync(accountsFile())).toBe(true);
|
||||
@@ -60,7 +60,7 @@ describe('user_account', () => {
|
||||
),
|
||||
);
|
||||
|
||||
await cacheGoogleAccount('test3@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test3@google.com');
|
||||
|
||||
expect(fs.readFileSync(accountsFile(), 'utf-8')).toBe(
|
||||
JSON.stringify(
|
||||
@@ -84,8 +84,8 @@ describe('user_account', () => {
|
||||
2,
|
||||
),
|
||||
);
|
||||
await cacheGoogleAccount('test2@google.com');
|
||||
await cacheGoogleAccount('test1@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test2@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test1@google.com');
|
||||
|
||||
expect(fs.readFileSync(accountsFile(), 'utf-8')).toBe(
|
||||
JSON.stringify(
|
||||
@@ -103,7 +103,7 @@ describe('user_account', () => {
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
await cacheGoogleAccount('test1@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test1@google.com');
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
expect(JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'))).toEqual({
|
||||
@@ -122,7 +122,7 @@ describe('user_account', () => {
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
await cacheGoogleAccount('test2@google.com');
|
||||
await userAccountManager.cacheGoogleAccount('test2@google.com');
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
expect(JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'))).toEqual({
|
||||
@@ -139,19 +139,19 @@ describe('user_account', () => {
|
||||
accountsFile(),
|
||||
JSON.stringify({ active: 'active@google.com', old: [] }, null, 2),
|
||||
);
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBe('active@google.com');
|
||||
});
|
||||
|
||||
it('should return null if file does not exist', () => {
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null if file is empty', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), '');
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBeNull();
|
||||
});
|
||||
|
||||
@@ -162,7 +162,7 @@ describe('user_account', () => {
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
|
||||
expect(account).toBeNull();
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
@@ -171,7 +171,7 @@ describe('user_account', () => {
|
||||
it('should return null if active key is missing', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), JSON.stringify({ old: [] }));
|
||||
const account = getCachedGoogleAccount();
|
||||
const account = userAccountManager.getCachedGoogleAccount();
|
||||
expect(account).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -188,7 +188,7 @@ describe('user_account', () => {
|
||||
),
|
||||
);
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
@@ -198,7 +198,7 @@ describe('user_account', () => {
|
||||
it('should handle empty file gracefully', async () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), '');
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
expect(stored.old).toEqual([]);
|
||||
@@ -211,7 +211,7 @@ describe('user_account', () => {
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
@@ -226,7 +226,7 @@ describe('user_account', () => {
|
||||
JSON.stringify({ active: null, old: ['old1@google.com'] }, null, 2),
|
||||
);
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
@@ -247,7 +247,7 @@ describe('user_account', () => {
|
||||
),
|
||||
);
|
||||
|
||||
await clearCachedGoogleAccount();
|
||||
await userAccountManager.clearCachedGoogleAccount();
|
||||
|
||||
const stored = JSON.parse(fs.readFileSync(accountsFile(), 'utf-8'));
|
||||
expect(stored.active).toBeNull();
|
||||
@@ -257,24 +257,24 @@ describe('user_account', () => {
|
||||
|
||||
describe('getLifetimeGoogleAccounts', () => {
|
||||
it('should return 0 if the file does not exist', () => {
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 if the file is empty', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), '');
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 if the file is corrupted', () => {
|
||||
fs.mkdirSync(path.dirname(accountsFile()), { recursive: true });
|
||||
fs.writeFileSync(accountsFile(), 'invalid json');
|
||||
const consoleLogSpy = vi
|
||||
const consoleDebugSpy = vi
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(consoleDebugSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 1 if there is only an active account', () => {
|
||||
@@ -283,7 +283,7 @@ describe('user_account', () => {
|
||||
accountsFile(),
|
||||
JSON.stringify({ active: 'test1@google.com', old: [] }),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(1);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(1);
|
||||
});
|
||||
|
||||
it('should correctly count old accounts when active is null', () => {
|
||||
@@ -295,7 +295,7 @@ describe('user_account', () => {
|
||||
old: ['test1@google.com', 'test2@google.com'],
|
||||
}),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(2);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(2);
|
||||
});
|
||||
|
||||
it('should correctly count both active and old accounts', () => {
|
||||
@@ -307,7 +307,7 @@ describe('user_account', () => {
|
||||
old: ['test1@google.com', 'test2@google.com'],
|
||||
}),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(3);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(3);
|
||||
});
|
||||
|
||||
it('should handle valid JSON with incorrect schema by returning 0', () => {
|
||||
@@ -320,7 +320,7 @@ describe('user_account', () => {
|
||||
.spyOn(console, 'log')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
expect(getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(0);
|
||||
expect(consoleLogSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -333,7 +333,7 @@ describe('user_account', () => {
|
||||
old: ['test1@google.com', 'test2@google.com'],
|
||||
}),
|
||||
);
|
||||
expect(getLifetimeGoogleAccounts()).toBe(2);
|
||||
expect(userAccountManager.getLifetimeGoogleAccounts()).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
140
packages/core/src/utils/userAccountManager.ts
Normal file
140
packages/core/src/utils/userAccountManager.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import { promises as fsp, readFileSync } from 'node:fs';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
interface UserAccounts {
|
||||
active: string | null;
|
||||
old: string[];
|
||||
}
|
||||
|
||||
export class UserAccountManager {
|
||||
private getGoogleAccountsCachePath(): string {
|
||||
return Storage.getGoogleAccountsPath();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and validates the string content of an accounts file.
|
||||
* @param content The raw string content from the file.
|
||||
* @returns A valid UserAccounts object.
|
||||
*/
|
||||
private parseAndValidateAccounts(content: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
if (!content.trim()) {
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content);
|
||||
|
||||
// Inlined validation logic
|
||||
if (typeof parsed !== 'object' || parsed === null) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
const { active, old } = parsed as Partial<UserAccounts>;
|
||||
const isValid =
|
||||
(active === undefined || active === null || typeof active === 'string') &&
|
||||
(old === undefined ||
|
||||
(Array.isArray(old) && old.every((i) => typeof i === 'string')));
|
||||
|
||||
if (!isValid) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
return {
|
||||
active: parsed.active ?? null,
|
||||
old: parsed.old ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
private readAccountsSync(filePath: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
return this.parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'code' in error &&
|
||||
error.code === 'ENOENT'
|
||||
) {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Error during sync read of accounts, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
private async readAccounts(filePath: string): Promise<UserAccounts> {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = await fsp.readFile(filePath, 'utf-8');
|
||||
return this.parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'code' in error &&
|
||||
error.code === 'ENOENT'
|
||||
) {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Could not parse accounts file, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
async cacheGoogleAccount(email: string): Promise<void> {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
await fsp.mkdir(path.dirname(filePath), { recursive: true });
|
||||
|
||||
const accounts = await this.readAccounts(filePath);
|
||||
|
||||
if (accounts.active && accounts.active !== email) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
}
|
||||
|
||||
// If the new email was in the old list, remove it
|
||||
accounts.old = accounts.old.filter((oldEmail) => oldEmail !== email);
|
||||
|
||||
accounts.active = email;
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
getCachedGoogleAccount(): string | null {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
const accounts = this.readAccountsSync(filePath);
|
||||
return accounts.active;
|
||||
}
|
||||
|
||||
getLifetimeGoogleAccounts(): number {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
const accounts = this.readAccountsSync(filePath);
|
||||
const allAccounts = new Set(accounts.old);
|
||||
if (accounts.active) {
|
||||
allAccounts.add(accounts.active);
|
||||
}
|
||||
return allAccounts.size;
|
||||
}
|
||||
|
||||
async clearCachedGoogleAccount(): Promise<void> {
|
||||
const filePath = this.getGoogleAccountsCachePath();
|
||||
const accounts = await this.readAccounts(filePath);
|
||||
|
||||
if (accounts.active) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
accounts.active = null;
|
||||
}
|
||||
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
}
|
||||
@@ -1,131 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import { promises as fsp, readFileSync } from 'node:fs';
|
||||
import * as os from 'os';
|
||||
import { GEMINI_DIR, GOOGLE_ACCOUNTS_FILENAME } from './paths.js';
|
||||
|
||||
interface UserAccounts {
|
||||
active: string | null;
|
||||
old: string[];
|
||||
}
|
||||
|
||||
function getGoogleAccountsCachePath(): string {
|
||||
return path.join(os.homedir(), GEMINI_DIR, GOOGLE_ACCOUNTS_FILENAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and validates the string content of an accounts file.
|
||||
* @param content The raw string content from the file.
|
||||
* @returns A valid UserAccounts object.
|
||||
*/
|
||||
function parseAndValidateAccounts(content: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
if (!content.trim()) {
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content);
|
||||
|
||||
// Inlined validation logic
|
||||
if (typeof parsed !== 'object' || parsed === null) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
const { active, old } = parsed as Partial<UserAccounts>;
|
||||
const isValid =
|
||||
(active === undefined || active === null || typeof active === 'string') &&
|
||||
(old === undefined ||
|
||||
(Array.isArray(old) && old.every((i) => typeof i === 'string')));
|
||||
|
||||
if (!isValid) {
|
||||
console.log('Invalid accounts file schema, starting fresh.');
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
return {
|
||||
active: parsed.active ?? null,
|
||||
old: parsed.old ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
function readAccountsSync(filePath: string): UserAccounts {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
return parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Error during sync read of accounts, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
async function readAccounts(filePath: string): Promise<UserAccounts> {
|
||||
const defaultState = { active: null, old: [] };
|
||||
try {
|
||||
const content = await fsp.readFile(filePath, 'utf-8');
|
||||
return parseAndValidateAccounts(content);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
return defaultState;
|
||||
}
|
||||
console.log('Could not parse accounts file, starting fresh.', error);
|
||||
return defaultState;
|
||||
}
|
||||
}
|
||||
|
||||
export async function cacheGoogleAccount(email: string): Promise<void> {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
await fsp.mkdir(path.dirname(filePath), { recursive: true });
|
||||
|
||||
const accounts = await readAccounts(filePath);
|
||||
|
||||
if (accounts.active && accounts.active !== email) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
}
|
||||
|
||||
// If the new email was in the old list, remove it
|
||||
accounts.old = accounts.old.filter((oldEmail) => oldEmail !== email);
|
||||
|
||||
accounts.active = email;
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
export function getCachedGoogleAccount(): string | null {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
const accounts = readAccountsSync(filePath);
|
||||
return accounts.active;
|
||||
}
|
||||
|
||||
export function getLifetimeGoogleAccounts(): number {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
const accounts = readAccountsSync(filePath);
|
||||
const allAccounts = new Set(accounts.old);
|
||||
if (accounts.active) {
|
||||
allAccounts.add(accounts.active);
|
||||
}
|
||||
return allAccounts.size;
|
||||
}
|
||||
|
||||
export async function clearCachedGoogleAccount(): Promise<void> {
|
||||
const filePath = getGoogleAccountsCachePath();
|
||||
const accounts = await readAccounts(filePath);
|
||||
|
||||
if (accounts.active) {
|
||||
if (!accounts.old.includes(accounts.active)) {
|
||||
accounts.old.push(accounts.active);
|
||||
}
|
||||
accounts.active = null;
|
||||
}
|
||||
|
||||
await fsp.writeFile(filePath, JSON.stringify(accounts, null, 2), 'utf-8');
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { getInstallationId } from './user_id.js';
|
||||
|
||||
describe('user_id', () => {
|
||||
describe('getInstallationId', () => {
|
||||
it('should return a valid UUID format string', () => {
|
||||
const installationId = getInstallationId();
|
||||
|
||||
expect(installationId).toBeDefined();
|
||||
expect(typeof installationId).toBe('string');
|
||||
expect(installationId.length).toBeGreaterThan(0);
|
||||
|
||||
// Should return the same ID on subsequent calls (consistent)
|
||||
const secondCall = getInstallationId();
|
||||
expect(secondCall).toBe(installationId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,58 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { GEMINI_DIR } from './paths.js';
|
||||
|
||||
const homeDir = os.homedir() ?? '';
|
||||
const geminiDir = path.join(homeDir, GEMINI_DIR);
|
||||
const installationIdFile = path.join(geminiDir, 'installation_id');
|
||||
|
||||
function ensureGeminiDirExists() {
|
||||
if (!fs.existsSync(geminiDir)) {
|
||||
fs.mkdirSync(geminiDir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
function readInstallationIdFromFile(): string | null {
|
||||
if (fs.existsSync(installationIdFile)) {
|
||||
const installationid = fs.readFileSync(installationIdFile, 'utf-8').trim();
|
||||
return installationid || null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function writeInstallationIdToFile(installationId: string) {
|
||||
fs.writeFileSync(installationIdFile, installationId, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the installation ID from a file, creating it if it doesn't exist.
|
||||
* This ID is used for unique user installation tracking.
|
||||
* @returns A UUID string for the user.
|
||||
*/
|
||||
export function getInstallationId(): string {
|
||||
try {
|
||||
ensureGeminiDirExists();
|
||||
let installationId = readInstallationIdFromFile();
|
||||
|
||||
if (!installationId) {
|
||||
installationId = randomUUID();
|
||||
writeInstallationIdToFile(installationId);
|
||||
}
|
||||
|
||||
return installationId;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Error accessing installation ID file, generating ephemeral ID:',
|
||||
error,
|
||||
);
|
||||
return '123456789';
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user