Sync upstream Gemini-CLI v0.8.2 (#838)

This commit is contained in:
tanzhenxin
2025-10-23 09:27:04 +08:00
committed by GitHub
parent 096fabb5d6
commit eb95c131be
644 changed files with 70389 additions and 23709 deletions

View File

@@ -0,0 +1,424 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as path from 'node:path';
import fs from 'node:fs';
import os from 'node:os';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { ExtensionEnablementManager, Override } from './extensionEnablement.js';
import type { Extension } from '../extension.js';
// Helper to create a temporary directory for testing
function createTestDir() {
const dirPath = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-test-'));
return {
path: dirPath,
cleanup: () => fs.rmSync(dirPath, { recursive: true, force: true }),
};
}
let testDir: { path: string; cleanup: () => void };
let configDir: string;
let manager: ExtensionEnablementManager;
describe('ExtensionEnablementManager', () => {
beforeEach(() => {
testDir = createTestDir();
configDir = path.join(testDir.path, '.gemini');
manager = new ExtensionEnablementManager(configDir);
});
afterEach(() => {
testDir.cleanup();
// Reset the singleton instance for test isolation
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(ExtensionEnablementManager as any).instance = undefined;
});
describe('isEnabled', () => {
it('should return true if extension is not configured', () => {
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
});
it('should return true if no overrides match', () => {
manager.disable('ext-test', false, '/another/path');
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
});
it('should enable a path based on an override rule', () => {
manager.disable('ext-test', true, '/');
manager.enable('ext-test', true, '/home/user/projects/');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
true,
);
});
it('should disable a path based on a disable override rule', () => {
manager.enable('ext-test', true, '/');
manager.disable('ext-test', true, '/home/user/projects/');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
});
it('should respect the last matching rule (enable wins)', () => {
manager.disable('ext-test', true, '/home/user/projects/');
manager.enable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
true,
);
});
it('should respect the last matching rule (disable wins)', () => {
manager.enable('ext-test', true, '/home/user/projects/');
manager.disable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
});
it('should handle', () => {
manager.enable('ext-test', true, '/home/user/projects');
manager.disable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
expect(
manager.isEnabled('ext-test', '/home/user/projects/something-else'),
).toBe(true);
});
});
describe('includeSubdirs', () => {
it('should add a glob when enabling with includeSubdirs', () => {
manager.enable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
});
it('should not add a glob when enabling without includeSubdirs', () => {
manager.enable('ext-test', false, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should add a glob when disabling with includeSubdirs', () => {
manager.disable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('!/path/to/dir/*');
});
it('should remove conflicting glob rule when enabling without subdirs', () => {
manager.enable('ext-test', true, '/path/to/dir'); // Adds /path/to/dir*
manager.enable('ext-test', false, '/path/to/dir'); // Should remove the glob
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should remove conflicting non-glob rule when enabling with subdirs', () => {
manager.enable('ext-test', false, '/path/to/dir'); // Adds /path/to/dir
manager.enable('ext-test', true, '/path/to/dir'); // Should remove the non-glob
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/');
});
it('should remove conflicting rules when disabling', () => {
manager.enable('ext-test', true, '/path/to/dir'); // enabled with glob
manager.disable('ext-test', false, '/path/to/dir'); // disabled without
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('!/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should correctly evaluate isEnabled with subdirs', () => {
manager.disable('ext-test', true, '/');
manager.enable('ext-test', true, '/path/to/dir');
expect(manager.isEnabled('ext-test', '/path/to/dir/')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/dir/sub/')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/another/')).toBe(false);
});
it('should correctly evaluate isEnabled without subdirs', () => {
manager.disable('ext-test', true, '/*');
manager.enable('ext-test', false, '/path/to/dir');
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/dir/sub')).toBe(false);
});
});
describe('pruning child rules', () => {
it('should remove child rules when enabling a parent with subdirs', () => {
// Pre-existing rules for children
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.disable('ext-test', true, '/path/to/dir/subdir2');
manager.enable('ext-test', false, '/path/to/another/dir');
// Enable the parent directory
manager.enable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
// The new parent rule should be present
expect(overrides).toContain(`/path/to/dir/*`);
// Child rules should be removed
expect(overrides).not.toContain('/path/to/dir/subdir1/');
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
// Unrelated rules should remain
expect(overrides).toContain('/path/to/another/dir/');
});
it('should remove child rules when disabling a parent with subdirs', () => {
// Pre-existing rules for children
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.disable('ext-test', true, '/path/to/dir/subdir2');
manager.enable('ext-test', false, '/path/to/another/dir');
// Disable the parent directory
manager.disable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
// The new parent rule should be present
expect(overrides).toContain(`!/path/to/dir/*`);
// Child rules should be removed
expect(overrides).not.toContain('/path/to/dir/subdir1/');
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
// Unrelated rules should remain
expect(overrides).toContain('/path/to/another/dir/');
});
it('should not remove child rules if includeSubdirs is false', () => {
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.enable('ext-test', false, '/path/to/dir'); // Not including subdirs
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
expect(overrides).toContain('/path/to/dir/subdir1/');
expect(overrides).toContain('/path/to/dir/');
});
});
it('should enable a path based on an enable override', () => {
manager.disable('ext-test', true, '/Users/chrstn');
manager.enable('ext-test', true, '/Users/chrstn/gemini-cli');
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
true,
);
});
it('should ignore subdirs', () => {
manager.disable('ext-test', false, '/Users/chrstn');
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
true,
);
});
describe('extension overrides (-e <name>)', () => {
beforeEach(() => {
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
});
it('can enable extensions, case-insensitive', () => {
manager.disable('ext-test', true, '/');
expect(manager.isEnabled('ext-test', '/')).toBe(true);
expect(manager.isEnabled('Ext-Test', '/')).toBe(true);
// Double check that it would have been disabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
).toBe(false);
});
it('disable all other extensions', () => {
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
manager.enable('ext-test-2', true, '/');
expect(manager.isEnabled('ext-test-2', '/')).toBe(false);
// Double check that it would have been enabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test-2', '/'),
).toBe(true);
});
it('none disables all extensions', () => {
manager = new ExtensionEnablementManager(configDir, ['none']);
manager.enable('ext-test', true, '/');
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(false);
// Double check that it would have been enabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
).toBe(true);
});
});
describe('validateExtensionOverrides', () => {
let consoleErrorSpy: ReturnType<typeof vi.spyOn>;
beforeEach(() => {
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
});
afterEach(() => {
consoleErrorSpy.mockRestore();
});
it('should not log an error if enabledExtensionNamesOverride is empty', () => {
const manager = new ExtensionEnablementManager(configDir, []);
manager.validateExtensionOverrides([]);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
it('should not log an error if all enabledExtensionNamesOverride are valid', () => {
const manager = new ExtensionEnablementManager(configDir, [
'ext-one',
'ext-two',
]);
const extensions = [
{ config: { name: 'ext-one' } },
{ config: { name: 'ext-two' } },
] as Extension[];
manager.validateExtensionOverrides(extensions);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
it('should log an error for each invalid extension name in enabledExtensionNamesOverride', () => {
const manager = new ExtensionEnablementManager(configDir, [
'ext-one',
'ext-invalid',
'ext-another-invalid',
]);
const extensions = [
{ config: { name: 'ext-one' } },
{ config: { name: 'ext-two' } },
] as Extension[];
manager.validateExtensionOverrides(extensions);
expect(consoleErrorSpy).toHaveBeenCalledTimes(2);
expect(consoleErrorSpy).toHaveBeenCalledWith(
'Extension not found: ext-invalid',
);
expect(consoleErrorSpy).toHaveBeenCalledWith(
'Extension not found: ext-another-invalid',
);
});
it('should not log an error if "none" is in enabledExtensionNamesOverride', () => {
const manager = new ExtensionEnablementManager(configDir, ['none']);
manager.validateExtensionOverrides([]);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
});
});
describe('Override', () => {
it('should create an override from input', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should create a disable override from input', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(true);
expect(override.includeSubdirs).toBe(false);
});
it('should create an override from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir');
expect(override.baseRule).toBe('/path/to/dir');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(false);
});
it('should create a disable override from a file rule', () => {
const override = Override.fromFileRule('!/path/to/dir/');
expect(override.isDisable).toBe(true);
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.includeSubdirs).toBe(false);
});
it('should create an override with subdirs from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir/*');
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should correctly identify conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', false);
expect(override1.conflictsWith(override2)).toBe(true);
});
it('should correctly identify non-conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/another/dir', true);
expect(override1.conflictsWith(override2)).toBe(false);
});
it('should correctly identify equal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(true);
});
it('should correctly identify unequal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('!/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(false);
});
it('should generate the correct regex', () => {
const override = Override.fromInput('/path/to/dir', true);
const regex = override.asRegex();
expect(regex.test('/path/to/dir/')).toBe(true);
expect(regex.test('/path/to/dir/subdir')).toBe(true);
expect(regex.test('/path/to/another/dir')).toBe(false);
});
it('should correctly identify child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify child overrides with glob', () => {
const parent = Override.fromInput('/path/to/dir/*', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify non-child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const other = Override.fromInput('/path/to/another/dir', false);
expect(other.isChildOf(parent)).toBe(false);
});
it('should generate the correct output string', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.output()).toBe(`/path/to/dir/*`);
});
it('should generate the correct output string for a disable override', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
it('should disable a path based on a disable override rule', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
});

View File

@@ -0,0 +1,239 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import fs from 'node:fs';
import path from 'node:path';
import { type Extension } from '../extension.js';
export interface ExtensionEnablementConfig {
overrides: string[];
}
export interface AllExtensionsEnablementConfig {
[extensionName: string]: ExtensionEnablementConfig;
}
export class Override {
constructor(
public baseRule: string,
public isDisable: boolean,
public includeSubdirs: boolean,
) {}
static fromInput(inputRule: string, includeSubdirs: boolean): Override {
const isDisable = inputRule.startsWith('!');
let baseRule = isDisable ? inputRule.substring(1) : inputRule;
baseRule = ensureLeadingAndTrailingSlash(baseRule);
return new Override(baseRule, isDisable, includeSubdirs);
}
static fromFileRule(fileRule: string): Override {
const isDisable = fileRule.startsWith('!');
let baseRule = isDisable ? fileRule.substring(1) : fileRule;
const includeSubdirs = baseRule.endsWith('*');
baseRule = includeSubdirs
? baseRule.substring(0, baseRule.length - 1)
: baseRule;
return new Override(baseRule, isDisable, includeSubdirs);
}
conflictsWith(other: Override): boolean {
if (this.baseRule === other.baseRule) {
return (
this.includeSubdirs !== other.includeSubdirs ||
this.isDisable !== other.isDisable
);
}
return false;
}
isEqualTo(other: Override): boolean {
return (
this.baseRule === other.baseRule &&
this.includeSubdirs === other.includeSubdirs &&
this.isDisable === other.isDisable
);
}
asRegex(): RegExp {
return globToRegex(`${this.baseRule}${this.includeSubdirs ? '*' : ''}`);
}
isChildOf(parent: Override) {
if (!parent.includeSubdirs) {
return false;
}
return parent.asRegex().test(this.baseRule);
}
output(): string {
return `${this.isDisable ? '!' : ''}${this.baseRule}${this.includeSubdirs ? '*' : ''}`;
}
matchesPath(path: string) {
return this.asRegex().test(path);
}
}
const ensureLeadingAndTrailingSlash = function (dirPath: string): string {
// Normalize separators to forward slashes for consistent matching across platforms.
let result = dirPath.replace(/\\/g, '/');
if (result.charAt(0) !== '/') {
result = '/' + result;
}
if (result.charAt(result.length - 1) !== '/') {
result = result + '/';
}
return result;
};
/**
* Converts a glob pattern to a RegExp object.
* This is a simplified implementation that supports `*`.
*
* @param glob The glob pattern to convert.
* @returns A RegExp object.
*/
function globToRegex(glob: string): RegExp {
const regexString = glob
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex characters
.replace(/(\/?)\*/g, '($1.*)?'); // Convert * to optional group
return new RegExp(`^${regexString}$`);
}
export class ExtensionEnablementManager {
private configFilePath: string;
private configDir: string;
// If non-empty, this overrides all other extension configuration and enables
// only the ones in this list.
private enabledExtensionNamesOverride: string[];
constructor(configDir: string, enabledExtensionNames?: string[]) {
this.configDir = configDir;
this.configFilePath = path.join(configDir, 'extension-enablement.json');
this.enabledExtensionNamesOverride =
enabledExtensionNames?.map((name) => name.toLowerCase()) ?? [];
}
validateExtensionOverrides(extensions: Extension[]) {
for (const name of this.enabledExtensionNamesOverride) {
if (name === 'none') continue;
if (
!extensions.some(
(ext) => ext.config.name.toLowerCase() === name.toLowerCase(),
)
) {
console.error(`Extension not found: ${name}`);
}
}
}
/**
* Determines if an extension is enabled based on its name and the current
* path. The last matching rule in the overrides list wins.
*
* @param extensionName The name of the extension.
* @param currentPath The absolute path of the current working directory.
* @returns True if the extension is enabled, false otherwise.
*/
isEnabled(extensionName: string, currentPath: string): boolean {
// If we have a single override called 'none', this disables all extensions.
// Typically, this comes from the user passing `-e none`.
if (
this.enabledExtensionNamesOverride.length === 1 &&
this.enabledExtensionNamesOverride[0] === 'none'
) {
return false;
}
// If we have explicit overrides, only enable those extensions.
if (this.enabledExtensionNamesOverride.length > 0) {
// When checking against overrides ONLY, we use a case insensitive match.
// The override names are already lowercased in the constructor.
return this.enabledExtensionNamesOverride.includes(
extensionName.toLocaleLowerCase(),
);
}
// Otherwise, we use the configuration settings
const config = this.readConfig();
const extensionConfig = config[extensionName];
// Extensions are enabled by default.
let enabled = true;
const allOverrides = extensionConfig?.overrides ?? [];
for (const rule of allOverrides) {
const override = Override.fromFileRule(rule);
if (override.matchesPath(ensureLeadingAndTrailingSlash(currentPath))) {
enabled = !override.isDisable;
}
}
return enabled;
}
readConfig(): AllExtensionsEnablementConfig {
try {
const content = fs.readFileSync(this.configFilePath, 'utf-8');
return JSON.parse(content);
} catch (error) {
if (
error instanceof Error &&
'code' in error &&
error.code === 'ENOENT'
) {
return {};
}
console.error('Error reading extension enablement config:', error);
return {};
}
}
writeConfig(config: AllExtensionsEnablementConfig): void {
fs.mkdirSync(this.configDir, { recursive: true });
fs.writeFileSync(this.configFilePath, JSON.stringify(config, null, 2));
}
enable(
extensionName: string,
includeSubdirs: boolean,
scopePath: string,
): void {
const config = this.readConfig();
if (!config[extensionName]) {
config[extensionName] = { overrides: [] };
}
const override = Override.fromInput(scopePath, includeSubdirs);
const overrides = config[extensionName].overrides.filter((rule) => {
const fileOverride = Override.fromFileRule(rule);
if (
fileOverride.conflictsWith(override) ||
fileOverride.isEqualTo(override)
) {
return false; // Remove conflicts and equivalent values.
}
return !fileOverride.isChildOf(override);
});
overrides.push(override.output());
config[extensionName].overrides = overrides;
this.writeConfig(config);
}
disable(
extensionName: string,
includeSubdirs: boolean,
scopePath: string,
): void {
this.enable(extensionName, includeSubdirs, `!${scopePath}`);
}
remove(extensionName: string): void {
const config = this.readConfig();
if (config[extensionName]) {
delete config[extensionName];
this.writeConfig(config);
}
}
}

View File

@@ -0,0 +1,429 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import {
checkForExtensionUpdate,
cloneFromGit,
extractFile,
findReleaseAsset,
parseGitHubRepoForReleases,
} from './github.js';
import { simpleGit, type SimpleGit } from 'simple-git';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import * as os from 'node:os';
import * as fs from 'node:fs/promises';
import * as fsSync from 'node:fs';
import * as path from 'node:path';
import * as tar from 'tar';
import * as archiver from 'archiver';
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
const mockPlatform = vi.hoisted(() => vi.fn());
const mockArch = vi.hoisted(() => vi.fn());
vi.mock('node:os', async (importOriginal) => {
const actual = await importOriginal<typeof os>();
return {
...actual,
platform: mockPlatform,
arch: mockArch,
};
});
vi.mock('simple-git');
describe('git extension helpers', () => {
afterEach(() => {
vi.restoreAllMocks();
});
describe('cloneFromGit', () => {
const mockGit = {
clone: vi.fn(),
getRemotes: vi.fn(),
fetch: vi.fn(),
checkout: vi.fn(),
};
beforeEach(() => {
vi.mocked(simpleGit).mockReturnValue(mockGit as unknown as SimpleGit);
});
it('should clone, fetch and checkout a repo', async () => {
const installMetadata = {
source: 'http://my-repo.com',
ref: 'my-ref',
type: 'git' as const,
};
const destination = '/dest';
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
]);
await cloneFromGit(installMetadata, destination);
expect(mockGit.clone).toHaveBeenCalledWith('http://my-repo.com', './', [
'--depth',
'1',
]);
expect(mockGit.getRemotes).toHaveBeenCalledWith(true);
expect(mockGit.fetch).toHaveBeenCalledWith('origin', 'my-ref');
expect(mockGit.checkout).toHaveBeenCalledWith('FETCH_HEAD');
});
it('should use HEAD if ref is not provided', async () => {
const installMetadata = {
source: 'http://my-repo.com',
type: 'git' as const,
};
const destination = '/dest';
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
]);
await cloneFromGit(installMetadata, destination);
expect(mockGit.fetch).toHaveBeenCalledWith('origin', 'HEAD');
});
it('should throw if no remotes are found', async () => {
const installMetadata = {
source: 'http://my-repo.com',
type: 'git' as const,
};
const destination = '/dest';
mockGit.getRemotes.mockResolvedValue([]);
await expect(cloneFromGit(installMetadata, destination)).rejects.toThrow(
'Failed to clone Git repository from http://my-repo.com',
);
});
it('should throw on clone error', async () => {
const installMetadata = {
source: 'http://my-repo.com',
type: 'git' as const,
};
const destination = '/dest';
mockGit.clone.mockRejectedValue(new Error('clone failed'));
await expect(cloneFromGit(installMetadata, destination)).rejects.toThrow(
'Failed to clone Git repository from http://my-repo.com',
);
});
});
describe('checkForExtensionUpdate', () => {
const mockGit = {
getRemotes: vi.fn(),
listRemote: vi.fn(),
revparse: vi.fn(),
};
beforeEach(() => {
vi.mocked(simpleGit).mockReturnValue(mockGit as unknown as SimpleGit);
});
it('should return NOT_UPDATABLE for non-git extensions', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
installMetadata: {
type: 'link',
source: '',
},
};
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
);
expect(result).toBe(ExtensionUpdateState.NOT_UPDATABLE);
});
it('should return ERROR if no remotes found', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
installMetadata: {
type: 'git',
source: '',
},
};
mockGit.getRemotes.mockResolvedValue([]);
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
);
expect(result).toBe(ExtensionUpdateState.ERROR);
});
it('should return UPDATE_AVAILABLE when remote hash is different', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
]);
mockGit.listRemote.mockResolvedValue('remote-hash\tHEAD');
mockGit.revparse.mockResolvedValue('local-hash');
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
);
expect(result).toBe(ExtensionUpdateState.UPDATE_AVAILABLE);
});
it('should return UP_TO_DATE when remote and local hashes are the same', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
]);
mockGit.listRemote.mockResolvedValue('same-hash\tHEAD');
mockGit.revparse.mockResolvedValue('same-hash');
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
);
expect(result).toBe(ExtensionUpdateState.UP_TO_DATE);
});
it('should return ERROR on git error', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
mockGit.getRemotes.mockRejectedValue(new Error('git error'));
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
);
expect(result).toBe(ExtensionUpdateState.ERROR);
});
});
describe('findReleaseAsset', () => {
const assets = [
{ name: 'darwin.arm64.extension.tar.gz', browser_download_url: 'url1' },
{ name: 'darwin.x64.extension.tar.gz', browser_download_url: 'url2' },
{ name: 'linux.x64.extension.tar.gz', browser_download_url: 'url3' },
{ name: 'win32.x64.extension.tar.gz', browser_download_url: 'url4' },
{ name: 'extension-generic.tar.gz', browser_download_url: 'url5' },
];
it('should find asset matching platform and architecture', () => {
mockPlatform.mockReturnValue('darwin');
mockArch.mockReturnValue('arm64');
const result = findReleaseAsset(assets);
expect(result).toEqual(assets[0]);
});
it('should find asset matching platform if arch does not match', () => {
mockPlatform.mockReturnValue('linux');
mockArch.mockReturnValue('arm64');
const result = findReleaseAsset(assets);
expect(result).toEqual(assets[2]);
});
it('should return undefined if no matching asset is found', () => {
mockPlatform.mockReturnValue('sunos');
mockArch.mockReturnValue('x64');
const result = findReleaseAsset(assets);
expect(result).toBeUndefined();
});
it('should find generic asset if it is the only one', () => {
const singleAsset = [
{ name: 'extension.tar.gz', browser_download_url: 'url' },
];
mockPlatform.mockReturnValue('darwin');
mockArch.mockReturnValue('arm64');
const result = findReleaseAsset(singleAsset);
expect(result).toEqual(singleAsset[0]);
});
it('should return undefined if multiple generic assets exist', () => {
const multipleGenericAssets = [
{ name: 'extension-1.tar.gz', browser_download_url: 'url1' },
{ name: 'extension-2.tar.gz', browser_download_url: 'url2' },
];
mockPlatform.mockReturnValue('darwin');
mockArch.mockReturnValue('arm64');
const result = findReleaseAsset(multipleGenericAssets);
expect(result).toBeUndefined();
});
});
describe('parseGitHubRepoForReleases', () => {
it('should parse owner and repo from a full GitHub URL', () => {
const source = 'https://github.com/owner/repo.git';
const { owner, repo } = parseGitHubRepoForReleases(source);
expect(owner).toBe('owner');
expect(repo).toBe('repo');
});
it('should parse owner and repo from a full GitHub UR without .git', () => {
const source = 'https://github.com/owner/repo';
const { owner, repo } = parseGitHubRepoForReleases(source);
expect(owner).toBe('owner');
expect(repo).toBe('repo');
});
it('should fail on a GitHub SSH URL', () => {
const source = 'git@github.com:owner/repo.git';
expect(() => parseGitHubRepoForReleases(source)).toThrow(
'GitHub release-based extensions are not supported for SSH. You must use an HTTPS URI with a personal access token to download releases from private repositories. You can set your personal access token in the GITHUB_TOKEN environment variable and install the extension via SSH.',
);
});
it('should fail on a non-GitHub URL', () => {
const source = 'https://example.com/owner/repo.git';
expect(() => parseGitHubRepoForReleases(source)).toThrow(
'Invalid GitHub repository source: https://example.com/owner/repo.git. Expected "owner/repo" or a github repo uri.',
);
});
it('should parse owner and repo from a shorthand string', () => {
const source = 'owner/repo';
const { owner, repo } = parseGitHubRepoForReleases(source);
expect(owner).toBe('owner');
expect(repo).toBe('repo');
});
it('should handle .git suffix in repo name', () => {
const source = 'owner/repo.git';
const { owner, repo } = parseGitHubRepoForReleases(source);
expect(owner).toBe('owner');
expect(repo).toBe('repo');
});
it('should throw error for invalid source format', () => {
const source = 'invalid-format';
expect(() => parseGitHubRepoForReleases(source)).toThrow(
'Invalid GitHub repository source: invalid-format. Expected "owner/repo" or a github repo uri.',
);
});
it('should throw error for source with too many parts', () => {
const source = 'https://github.com/owner/repo/extra';
expect(() => parseGitHubRepoForReleases(source)).toThrow(
'Invalid GitHub repository source: https://github.com/owner/repo/extra. Expected "owner/repo" or a github repo uri.',
);
});
});
describe('extractFile', () => {
let tempDir: string;
beforeEach(async () => {
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'gemini-test-'));
});
afterEach(async () => {
await fs.rm(tempDir, { recursive: true, force: true });
});
it('should extract a .tar.gz file', async () => {
const archivePath = path.join(tempDir, 'test.tar.gz');
const extractionDest = path.join(tempDir, 'extracted');
await fs.mkdir(extractionDest);
// Create a dummy file to be archived
const dummyFilePath = path.join(tempDir, 'test.txt');
await fs.writeFile(dummyFilePath, 'hello tar');
// Create the tar.gz file
await tar.c(
{
gzip: true,
file: archivePath,
cwd: tempDir,
},
['test.txt'],
);
await extractFile(archivePath, extractionDest);
const extractedFilePath = path.join(extractionDest, 'test.txt');
const content = await fs.readFile(extractedFilePath, 'utf-8');
expect(content).toBe('hello tar');
});
it('should extract a .zip file', async () => {
const archivePath = path.join(tempDir, 'test.zip');
const extractionDest = path.join(tempDir, 'extracted');
await fs.mkdir(extractionDest);
// Create a dummy file to be archived
const dummyFilePath = path.join(tempDir, 'test.txt');
await fs.writeFile(dummyFilePath, 'hello zip');
// Create the zip file
const output = fsSync.createWriteStream(archivePath);
const archive = archiver.create('zip');
const streamFinished = new Promise((resolve, reject) => {
output.on('close', () => resolve(null));
archive.on('error', reject);
});
archive.pipe(output);
archive.file(dummyFilePath, { name: 'test.txt' });
await archive.finalize();
await streamFinished;
await extractFile(archivePath, extractionDest);
const extractedFilePath = path.join(extractionDest, 'test.txt');
const content = await fs.readFile(extractedFilePath, 'utf-8');
expect(content).toBe('hello zip');
});
it('should throw an error for unsupported file types', async () => {
const unsupportedFilePath = path.join(tempDir, 'test.txt');
await fs.writeFile(unsupportedFilePath, 'some content');
const extractionDest = path.join(tempDir, 'extracted');
await fs.mkdir(extractionDest);
await expect(
extractFile(unsupportedFilePath, extractionDest),
).rejects.toThrow('Unsupported file extension for extraction:');
});
});
});

View File

@@ -0,0 +1,431 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { simpleGit } from 'simple-git';
import { getErrorMessage } from '../../utils/errors.js';
import type {
ExtensionInstallMetadata,
GeminiCLIExtension,
} from '@qwen-code/qwen-code-core';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import * as os from 'node:os';
import * as https from 'node:https';
import * as fs from 'node:fs';
import * as path from 'node:path';
import { EXTENSIONS_CONFIG_FILENAME, loadExtension } from '../extension.js';
import * as tar from 'tar';
import extract from 'extract-zip';
function getGitHubToken(): string | undefined {
return process.env['GITHUB_TOKEN'];
}
/**
* Clones a Git repository to a specified local path.
* @param installMetadata The metadata for the extension to install.
* @param destination The destination path to clone the repository to.
*/
export async function cloneFromGit(
installMetadata: ExtensionInstallMetadata,
destination: string,
): Promise<void> {
try {
const git = simpleGit(destination);
let sourceUrl = installMetadata.source;
const token = getGitHubToken();
if (token) {
try {
const parsedUrl = new URL(sourceUrl);
if (
parsedUrl.protocol === 'https:' &&
parsedUrl.hostname === 'github.com'
) {
if (!parsedUrl.username) {
parsedUrl.username = token;
}
sourceUrl = parsedUrl.toString();
}
} catch {
// If source is not a valid URL, we don't inject the token.
// We let git handle the source as is.
}
}
await git.clone(sourceUrl, './', ['--depth', '1']);
const remotes = await git.getRemotes(true);
if (remotes.length === 0) {
throw new Error(
`Unable to find any remotes for repo ${installMetadata.source}`,
);
}
const refToFetch = installMetadata.ref || 'HEAD';
await git.fetch(remotes[0].name, refToFetch);
// After fetching, checkout FETCH_HEAD to get the content of the fetched ref.
// This results in a detached HEAD state, which is fine for this purpose.
await git.checkout('FETCH_HEAD');
} catch (error) {
throw new Error(
`Failed to clone Git repository from ${installMetadata.source} ${getErrorMessage(error)}`,
{
cause: error,
},
);
}
}
export function parseGitHubRepoForReleases(source: string): {
owner: string;
repo: string;
} {
// Default to a github repo path, so `source` can be just an org/repo
const parsedUrl = URL.parse(source, 'https://github.com');
// The pathname should be "/owner/repo".
const parts = parsedUrl?.pathname.substring(1).split('/');
if (parts?.length !== 2 || parsedUrl?.host !== 'github.com') {
throw new Error(
`Invalid GitHub repository source: ${source}. Expected "owner/repo" or a github repo uri.`,
);
}
const owner = parts[0];
const repo = parts[1].replace('.git', '');
if (owner.startsWith('git@github.com')) {
throw new Error(
`GitHub release-based extensions are not supported for SSH. You must use an HTTPS URI with a personal access token to download releases from private repositories. You can set your personal access token in the GITHUB_TOKEN environment variable and install the extension via SSH.`,
);
}
return { owner, repo };
}
async function fetchReleaseFromGithub(
owner: string,
repo: string,
ref?: string,
): Promise<GithubReleaseData> {
const endpoint = ref ? `releases/tags/${ref}` : 'releases/latest';
const url = `https://api.github.com/repos/${owner}/${repo}/${endpoint}`;
return await fetchJson(url);
}
export async function checkForExtensionUpdate(
extension: GeminiCLIExtension,
setExtensionUpdateState: (updateState: ExtensionUpdateState) => void,
cwd: string = process.cwd(),
): Promise<void> {
setExtensionUpdateState(ExtensionUpdateState.CHECKING_FOR_UPDATES);
const installMetadata = extension.installMetadata;
if (installMetadata?.type === 'local') {
const newExtension = loadExtension({
extensionDir: installMetadata.source,
workspaceDir: cwd,
});
if (!newExtension) {
console.error(
`Failed to check for update for local extension "${extension.name}". Could not load extension from source path: ${installMetadata.source}`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
}
if (newExtension.config.version !== extension.version) {
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
}
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
}
if (
!installMetadata ||
(installMetadata.type !== 'git' &&
installMetadata.type !== 'github-release')
) {
setExtensionUpdateState(ExtensionUpdateState.NOT_UPDATABLE);
return;
}
try {
if (installMetadata.type === 'git') {
const git = simpleGit(extension.path);
const remotes = await git.getRemotes(true);
if (remotes.length === 0) {
console.error('No git remotes found.');
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
}
const remoteUrl = remotes[0].refs.fetch;
if (!remoteUrl) {
console.error(`No fetch URL found for git remote ${remotes[0].name}.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
}
// Determine the ref to check on the remote.
const refToCheck = installMetadata.ref || 'HEAD';
const lsRemoteOutput = await git.listRemote([remoteUrl, refToCheck]);
if (typeof lsRemoteOutput !== 'string' || lsRemoteOutput.trim() === '') {
console.error(`Git ref ${refToCheck} not found.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
}
const remoteHash = lsRemoteOutput.split('\t')[0];
const localHash = await git.revparse(['HEAD']);
if (!remoteHash) {
console.error(
`Unable to parse hash from git ls-remote output "${lsRemoteOutput}"`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
}
if (remoteHash === localHash) {
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
}
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
} else {
const { source, releaseTag } = installMetadata;
if (!source) {
console.error(`No "source" provided for extension.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
}
const { owner, repo } = parseGitHubRepoForReleases(source);
const releaseData = await fetchReleaseFromGithub(
owner,
repo,
installMetadata.ref,
);
if (releaseData.tag_name !== releaseTag) {
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
}
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
}
} catch (error) {
console.error(
`Failed to check for updates for extension "${installMetadata.source}": ${getErrorMessage(error)}`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
}
}
export interface GitHubDownloadResult {
tagName: string;
type: 'git' | 'github-release';
}
export async function downloadFromGitHubRelease(
installMetadata: ExtensionInstallMetadata,
destination: string,
): Promise<GitHubDownloadResult> {
const { source, ref } = installMetadata;
const { owner, repo } = parseGitHubRepoForReleases(source);
try {
const releaseData = await fetchReleaseFromGithub(owner, repo, ref);
if (!releaseData) {
throw new Error(
`No release data found for ${owner}/${repo} at tag ${ref}`,
);
}
const asset = findReleaseAsset(releaseData.assets);
let archiveUrl: string | undefined;
let isTar = false;
let isZip = false;
if (asset) {
archiveUrl = asset.browser_download_url;
} else {
if (releaseData.tarball_url) {
archiveUrl = releaseData.tarball_url;
isTar = true;
} else if (releaseData.zipball_url) {
archiveUrl = releaseData.zipball_url;
isZip = true;
}
}
if (!archiveUrl) {
throw new Error(
`No assets found for release with tag ${releaseData.tag_name}`,
);
}
let downloadedAssetPath = path.join(
destination,
path.basename(new URL(archiveUrl).pathname),
);
if (isTar && !downloadedAssetPath.endsWith('.tar.gz')) {
downloadedAssetPath += '.tar.gz';
} else if (isZip && !downloadedAssetPath.endsWith('.zip')) {
downloadedAssetPath += '.zip';
}
await downloadFile(archiveUrl, downloadedAssetPath);
await extractFile(downloadedAssetPath, destination);
// For regular github releases, the repository is put inside of a top level
// directory. In this case we should see exactly two file in the destination
// dir, the archive and the directory. If we see that, validate that the
// dir has a qwen extension configuration file and then move all files
// from the directory up one level into the destination directory.
const entries = await fs.promises.readdir(destination, {
withFileTypes: true,
});
if (entries.length === 2) {
const lonelyDir = entries.find((entry) => entry.isDirectory());
if (
lonelyDir &&
fs.existsSync(
path.join(destination, lonelyDir.name, EXTENSIONS_CONFIG_FILENAME),
)
) {
const dirPathToExtract = path.join(destination, lonelyDir.name);
const extractedDirFiles = await fs.promises.readdir(dirPathToExtract);
for (const file of extractedDirFiles) {
await fs.promises.rename(
path.join(dirPathToExtract, file),
path.join(destination, file),
);
}
await fs.promises.rmdir(dirPathToExtract);
}
}
await fs.promises.unlink(downloadedAssetPath);
return {
tagName: releaseData.tag_name,
type: 'github-release',
};
} catch (error) {
throw new Error(
`Failed to download release from ${installMetadata.source}: ${getErrorMessage(error)}`,
);
}
}
interface GithubReleaseData {
assets: Asset[];
tag_name: string;
tarball_url?: string;
zipball_url?: string;
}
interface Asset {
name: string;
browser_download_url: string;
}
export function findReleaseAsset(assets: Asset[]): Asset | undefined {
const platform = os.platform();
const arch = os.arch();
const platformArchPrefix = `${platform}.${arch}.`;
const platformPrefix = `${platform}.`;
// Check for platform + architecture specific asset
const platformArchAsset = assets.find((asset) =>
asset.name.toLowerCase().startsWith(platformArchPrefix),
);
if (platformArchAsset) {
return platformArchAsset;
}
// Check for platform specific asset
const platformAsset = assets.find((asset) =>
asset.name.toLowerCase().startsWith(platformPrefix),
);
if (platformAsset) {
return platformAsset;
}
// Check for generic asset if only one is available
const genericAsset = assets.find(
(asset) =>
!asset.name.toLowerCase().includes('darwin') &&
!asset.name.toLowerCase().includes('linux') &&
!asset.name.toLowerCase().includes('win32'),
);
if (assets.length === 1) {
return genericAsset;
}
return undefined;
}
async function fetchJson<T>(url: string): Promise<T> {
const headers: { 'User-Agent': string; Authorization?: string } = {
'User-Agent': 'gemini-cli',
};
const token = getGitHubToken();
if (token) {
headers.Authorization = `token ${token}`;
}
return new Promise((resolve, reject) => {
https
.get(url, { headers }, (res) => {
if (res.statusCode !== 200) {
return reject(
new Error(`Request failed with status code ${res.statusCode}`),
);
}
const chunks: Buffer[] = [];
res.on('data', (chunk) => chunks.push(chunk));
res.on('end', () => {
const data = Buffer.concat(chunks).toString();
resolve(JSON.parse(data) as T);
});
})
.on('error', reject);
});
}
async function downloadFile(url: string, dest: string): Promise<void> {
const headers: { 'User-agent': string; Authorization?: string } = {
'User-agent': 'gemini-cli',
};
const token = getGitHubToken();
if (token) {
headers.Authorization = `token ${token}`;
}
return new Promise((resolve, reject) => {
https
.get(url, { headers }, (res) => {
if (res.statusCode === 302 || res.statusCode === 301) {
downloadFile(res.headers.location!, dest).then(resolve).catch(reject);
return;
}
if (res.statusCode !== 200) {
return reject(
new Error(`Request failed with status code ${res.statusCode}`),
);
}
const file = fs.createWriteStream(dest);
res.pipe(file);
file.on('finish', () => file.close(resolve as () => void));
})
.on('error', reject);
});
}
export async function extractFile(file: string, dest: string): Promise<void> {
if (file.endsWith('.tar.gz')) {
await tar.x({
file,
cwd: dest,
});
} else if (file.endsWith('.zip')) {
await extract(file, { dir: dest });
} else {
throw new Error(`Unsupported file extension for extraction: ${file}`);
}
}

View File

@@ -0,0 +1,457 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { vi } from 'vitest';
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import {
EXTENSIONS_CONFIG_FILENAME,
ExtensionStorage,
INSTALL_METADATA_FILENAME,
annotateActiveExtensions,
loadExtension,
} from '../extension.js';
import { checkForAllExtensionUpdates, updateExtension } from './update.js';
import { QWEN_DIR } from '@qwen-code/qwen-code-core';
import { isWorkspaceTrusted } from '../trustedFolders.js';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import { createExtension } from '../../test-utils/createExtension.js';
import { ExtensionEnablementManager } from './extensionEnablement.js';
const mockGit = {
clone: vi.fn(),
getRemotes: vi.fn(),
fetch: vi.fn(),
checkout: vi.fn(),
listRemote: vi.fn(),
revparse: vi.fn(),
// Not a part of the actual API, but we need to use this to do the correct
// file system interactions.
path: vi.fn(),
};
vi.mock('simple-git', () => ({
simpleGit: vi.fn((path: string) => {
mockGit.path.mockReturnValue(path);
return mockGit;
}),
}));
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
return {
...mockedOs,
homedir: vi.fn(),
};
});
vi.mock('../trustedFolders.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('../trustedFolders.js')>();
return {
...actual,
isWorkspaceTrusted: vi.fn(),
};
});
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
const actual =
await importOriginal<typeof import('@qwen-code/qwen-code-core')>();
return {
...actual,
logExtensionInstallEvent: mockLogExtensionInstallEvent,
logExtensionUninstall: mockLogExtensionUninstall,
ExtensionInstallEvent: vi.fn(),
ExtensionUninstallEvent: vi.fn(),
};
});
describe('update tests', () => {
let tempHomeDir: string;
let tempWorkspaceDir: string;
let userExtensionsDir: string;
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'qwen-code-test-home-'),
);
tempWorkspaceDir = fs.mkdtempSync(
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
);
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
// Clean up before each test
fs.rmSync(userExtensionsDir, { recursive: true, force: true });
fs.mkdirSync(userExtensionsDir, { recursive: true });
vi.mocked(isWorkspaceTrusted).mockReturnValue({
isTrusted: true,
source: 'file',
});
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
Object.values(mockGit).forEach((fn) => fn.mockReset());
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
});
describe('updateExtension', () => {
it('should update a git-installed extension', async () => {
const gitUrl = 'https://github.com/google/gemini-extensions.git';
const extensionName = 'qwen-extensions';
const targetExtDir = path.join(userExtensionsDir, extensionName);
const metadataPath = path.join(targetExtDir, INSTALL_METADATA_FILENAME);
fs.mkdirSync(targetExtDir, { recursive: true });
fs.writeFileSync(
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.0.0' }),
);
fs.writeFileSync(
metadataPath,
JSON.stringify({ source: gitUrl, type: 'git' }),
);
mockGit.clone.mockImplementation(async (_, destination) => {
fs.mkdirSync(path.join(mockGit.path(), destination), {
recursive: true,
});
fs.writeFileSync(
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.1.0' }),
);
});
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: targetExtDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const updateInfo = await updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
() => {},
);
expect(updateInfo).toEqual({
name: 'qwen-extensions',
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
});
const updatedConfig = JSON.parse(
fs.readFileSync(
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
'utf-8',
),
);
expect(updatedConfig.version).toBe('1.1.0');
});
it('should call setExtensionUpdateState with UPDATING and then UPDATED_NEEDS_RESTART on success', async () => {
const extensionName = 'test-extension';
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: extensionName,
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
mockGit.clone.mockImplementation(async (_, destination) => {
fs.mkdirSync(path.join(mockGit.path(), destination), {
recursive: true,
});
fs.writeFileSync(
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.1.0' }),
);
});
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const dispatch = vi.fn();
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
await updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
dispatch,
);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATING,
},
});
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
},
});
});
it('should call setExtensionUpdateState with ERROR on failure', async () => {
const extensionName = 'test-extension';
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: extensionName,
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
mockGit.clone.mockRejectedValue(new Error('Git clone failed'));
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const dispatch = vi.fn();
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
await expect(
updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
dispatch,
),
).rejects.toThrow();
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATING,
},
});
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.ERROR,
},
});
});
});
describe('checkForAllExtensionUpdates', () => {
it('should return UpdateAvailable for a git extension with updates', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
]);
mockGit.listRemote.mockResolvedValue('remoteHash HEAD');
mockGit.revparse.mockResolvedValue('localHash');
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
});
it('should return UpToDate for a git extension with no updates', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
]);
mockGit.listRemote.mockResolvedValue('sameHash HEAD');
mockGit.revparse.mockResolvedValue('sameHash');
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UP_TO_DATE,
},
});
});
it('should return UpToDate for a local extension with no updates', async () => {
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
const sourceExtensionDir = createExtension({
extensionsDir: localExtensionSourcePath,
name: 'my-local-ext',
version: '1.0.0',
});
const installedExtensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'local-extension',
version: '1.0.0',
installMetadata: { source: sourceExtensionDir, type: 'local' },
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: installedExtensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'local-extension',
state: ExtensionUpdateState.UP_TO_DATE,
},
});
});
it('should return UpdateAvailable for a local extension with updates', async () => {
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
const sourceExtensionDir = createExtension({
extensionsDir: localExtensionSourcePath,
name: 'my-local-ext',
version: '1.1.0',
});
const installedExtensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'local-extension',
version: '1.0.0',
installMetadata: { source: sourceExtensionDir, type: 'local' },
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: installedExtensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'local-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
});
it('should return Error when git check fails', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'error-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockRejectedValue(new Error('Git error'));
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'error-extension',
state: ExtensionUpdateState.ERROR,
},
});
});
});
});

View File

@@ -0,0 +1,182 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import {
type ExtensionUpdateAction,
ExtensionUpdateState,
type ExtensionUpdateStatus,
} from '../../ui/state/extensions.js';
import {
copyExtension,
installExtension,
uninstallExtension,
loadExtension,
loadInstallMetadata,
ExtensionStorage,
loadExtensionConfig,
} from '../extension.js';
import { checkForExtensionUpdate } from './github.js';
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import * as fs from 'node:fs';
import { getErrorMessage } from '../../utils/errors.js';
export interface ExtensionUpdateInfo {
name: string;
originalVersion: string;
updatedVersion: string;
}
export async function updateExtension(
extension: GeminiCLIExtension,
cwd: string = process.cwd(),
requestConsent: (consent: string) => Promise<boolean>,
currentState: ExtensionUpdateState,
dispatchExtensionStateUpdate: (action: ExtensionUpdateAction) => void,
): Promise<ExtensionUpdateInfo | undefined> {
if (currentState === ExtensionUpdateState.UPDATING) {
return undefined;
}
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.UPDATING },
});
const installMetadata = loadInstallMetadata(extension.path);
if (!installMetadata?.type) {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
throw new Error(
`Extension ${extension.name} cannot be updated, type is unknown.`,
);
}
if (installMetadata?.type === 'link') {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.UP_TO_DATE },
});
throw new Error(`Extension is linked so does not need to be updated`);
}
const originalVersion = extension.version;
const tempDir = await ExtensionStorage.createTmpDir();
try {
await copyExtension(extension.path, tempDir);
const previousExtensionConfig = await loadExtensionConfig({
extensionDir: extension.path,
workspaceDir: cwd,
});
await uninstallExtension(extension.name, cwd);
await installExtension(
installMetadata,
requestConsent,
cwd,
previousExtensionConfig,
);
const updatedExtensionStorage = new ExtensionStorage(extension.name);
const updatedExtension = loadExtension({
extensionDir: updatedExtensionStorage.getExtensionDir(),
workspaceDir: cwd,
});
if (!updatedExtension) {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
throw new Error('Updated extension not found after installation.');
}
const updatedVersion = updatedExtension.config.version;
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: {
name: extension.name,
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
},
});
return {
name: extension.name,
originalVersion,
updatedVersion,
};
} catch (e) {
console.error(
`Error updating extension, rolling back. ${getErrorMessage(e)}`,
);
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
await copyExtension(tempDir, extension.path);
throw e;
} finally {
await fs.promises.rm(tempDir, { recursive: true, force: true });
}
}
export async function updateAllUpdatableExtensions(
cwd: string = process.cwd(),
requestConsent: (consent: string) => Promise<boolean>,
extensions: GeminiCLIExtension[],
extensionsState: Map<string, ExtensionUpdateStatus>,
dispatch: (action: ExtensionUpdateAction) => void,
): Promise<ExtensionUpdateInfo[]> {
return (
await Promise.all(
extensions
.filter(
(extension) =>
extensionsState.get(extension.name)?.status ===
ExtensionUpdateState.UPDATE_AVAILABLE,
)
.map((extension) =>
updateExtension(
extension,
cwd,
requestConsent,
extensionsState.get(extension.name)!.status,
dispatch,
),
),
)
).filter((updateInfo) => !!updateInfo);
}
export interface ExtensionUpdateCheckResult {
state: ExtensionUpdateState;
error?: string;
}
export async function checkForAllExtensionUpdates(
extensions: GeminiCLIExtension[],
dispatch: (action: ExtensionUpdateAction) => void,
): Promise<void> {
dispatch({ type: 'BATCH_CHECK_START' });
const promises: Array<Promise<void>> = [];
for (const extension of extensions) {
if (!extension.installMetadata) {
dispatch({
type: 'SET_STATE',
payload: {
name: extension.name,
state: ExtensionUpdateState.NOT_UPDATABLE,
},
});
continue;
}
promises.push(
checkForExtensionUpdate(extension, (updatedState) => {
dispatch({
type: 'SET_STATE',
payload: { name: extension.name, state: updatedState },
});
}),
);
}
await Promise.all(promises);
dispatch({ type: 'BATCH_CHECK_END' });
}

View File

@@ -15,6 +15,11 @@ export interface VariableSchema {
[key: string]: VariableDefinition;
}
export interface LoadExtensionContext {
extensionDir: string;
workspaceDir: string;
}
const PATH_SEPARATOR_DEFINITION = {
type: 'string',
description: 'The path separator.',
@@ -25,6 +30,10 @@ export const VARIABLE_SCHEMA = {
type: 'string',
description: 'The path of the extension in the filesystem.',
},
workspacePath: {
type: 'string',
description: 'The absolute path of the current workspace.',
},
'/': PATH_SEPARATOR_DEFINITION,
pathSeparator: PATH_SEPARATOR_DEFINITION,
} as const;