Compare commits

..

29 Commits

Author SHA1 Message Date
LaZzyMan
b37ede07e8 fix/gemini extension install error 2026-01-14 17:48:25 +08:00
LaZzyMan
0a88dd7861 fix: fix tests 2026-01-14 16:50:59 +08:00
LaZzyMan
70991e474f fix/lint error 2026-01-14 15:42:50 +08:00
LaZzyMan
551e546974 feat: move extension to core package 2026-01-14 15:30:27 +08:00
LaZzyMan
74013bd8b2 feat: settings extension 2026-01-08 13:49:59 +08:00
LaZzyMan
18713ef2b0 feat: install from gemini 2026-01-07 19:17:34 +08:00
LaZzyMan
50dac93c80 feat: migrate command format 2026-01-07 13:43:00 +08:00
LaZzyMan
22504b0a5b feat: add extension for gemini and claude 2026-01-07 11:06:17 +08:00
Mingholy
105ad743fa Merge pull request #1284 from tt-a1i/fix/boolean-string-coercion
fix(core): coerce string boolean values in schema validation
2025-12-29 18:27:36 +08:00
mingholy.lmh
ac3f7cb8c8 fix: ts erros in test file 2025-12-29 17:20:25 +08:00
顾盼
e27e9a5f18 Merge pull request #1288 from Weaxs/main
support merge ChatCompletionContentPart && add filterEmptyMessages
2025-12-29 10:50:30 +08:00
pomelo
2578d8c151 Merge pull request #1360 from IceyLiu/icey-feat
docs: add AionUi to ecosystem section
2025-12-29 10:12:53 +08:00
VeryLiu-lab
a877fedc52 docs: add AionUi to ecosystem section
Add AionUi as a graphical interface option for Qwen Code users.
AionUi provides a modern GUI for command-line AI tools including
Qwen Code, offering an alternative to the terminal interface.
2025-12-28 21:56:59 +08:00
pomelo
2bc8079519 Merge pull request #1332 from QwenLM/fix-language
Fix multi-language and documentation related issues.
2025-12-26 23:02:37 +08:00
pomelo-nwu
25dbe98e6e fix(cli): prevent HTML comment escape by sanitizing --!> and --> 2025-12-26 22:45:35 +08:00
pomelo-nwu
e5dbd69899 feat: fix ci 2025-12-26 22:38:44 +08:00
pomelo-nwu
101bd5f9b3 i18n: fix missing translations for /clear command 2025-12-24 17:28:49 +08:00
pomelo-nwu
61c626b618 fix: check-i18n script 2025-12-24 17:22:21 +08:00
pomelo-nwu
a28278e950 feat: update code 2025-12-24 17:12:27 +08:00
pomelo
a8f7bab544 Merge pull request #1293 from fazilus/feat/russian
feat(i18n): update Russian translation with new strings
2025-12-24 11:38:29 +08:00
pomelo-nwu
4ca62ba836 feat: adjust code 2025-12-24 10:26:30 +08:00
pomelo
398a1044ce Merge pull request #1247 from afarber/1244-language-output-default
feat(i18n): auto-detect LLM output language from system locale
2025-12-23 15:49:06 +08:00
Alexander Farber
f07259a7c9 Add German UI language support and normalize locale codes for LLM output 2025-12-20 10:21:16 +01:00
Alexander Farber
4d9f25e9fe Auto-detect LLM output language from system locale on first startup 2025-12-20 10:21:16 +01:00
Fazil
15efeb0107 feat(i18n): update Russian translation with new strings 2025-12-18 15:14:08 +03:00
Weaxs
d2bc46cbb4 remove filterEmptyMessages 2025-12-18 00:55:47 +08:00
Weaxs
84eb5c562f support merge ChatCompletionContentPart && add filterEmptyMessages 2025-12-18 00:46:48 +08:00
Tu Shaokun
7b01b26ff5 perf(core): avoid recompiling schema on retry 2025-12-17 16:27:42 +08:00
Tu Shaokun
0f3e97ea1c fix(core): coerce string boolean values in schema validation
Self-hosted LLMs sometimes return "true"/"false" strings instead of
actual boolean values for tool parameters like `is_background`. This
causes schema validation to fail with type errors.

Fixes #1267
2025-12-17 16:14:02 +08:00
120 changed files with 10018 additions and 5844 deletions

1
.gitignore vendored
View File

@@ -23,6 +23,7 @@ package-lock.json
.idea
*.iml
.cursor
.qoder
# OS metadata
.DS_Store

View File

@@ -191,6 +191,7 @@ See [settings](https://qwenlm.github.io/qwen-code-docs/en/users/configuration/se
Looking for a graphical interface?
- [**AionUi**](https://github.com/iOfficeAI/AionUi) A modern GUI for command-line AI tools including Qwen Code
- [**Gemini CLI Desktop**](https://github.com/Piebald-AI/gemini-cli-desktop) A cross-platform desktop/web/mobile UI for Qwen Code
## Troubleshooting

View File

@@ -10,4 +10,5 @@ export default {
mcp: 'MCP',
'token-caching': 'Token Caching',
sandbox: 'Sandboxing',
language: 'i18n',
};

View File

@@ -48,7 +48,7 @@ Commands specifically for controlling interface and output language.
| → `ui [language]` | Set UI interface language | `/language ui zh-CN` |
| → `output [language]` | Set LLM output language | `/language output Chinese` |
- Available UI languages: `zh-CN` (Simplified Chinese), `en-US` (English)
- Available built-in UI languages: `zh-CN` (Simplified Chinese), `en-US` (English), `ru-RU` (Russian), `de-DE` (German)
- Output language examples: `Chinese`, `English`, `Japanese`, etc.
### 1.4 Tool and Model Management
@@ -72,17 +72,16 @@ Commands for managing AI tools and models.
Commands for obtaining information and performing system settings.
| Command | Description | Usage Examples |
| --------------- | ----------------------------------------------- | ------------------------------------------------ |
| `/help` | Display help information for available commands | `/help` or `/?` |
| `/about` | Display version information | `/about` |
| `/stats` | Display detailed statistics for current session | `/stats` |
| `/settings` | Open settings editor | `/settings` |
| `/auth` | Change authentication method | `/auth` |
| `/bug` | Submit issue about Qwen Code | `/bug Button click unresponsive` |
| `/copy` | Copy last output content to clipboard | `/copy` |
| `/quit-confirm` | Show confirmation dialog before quitting | `/quit-confirm` (shortcut: press `Ctrl+C` twice) |
| `/quit` | Exit Qwen Code immediately | `/quit` or `/exit` |
| Command | Description | Usage Examples |
| ----------- | ----------------------------------------------- | -------------------------------- |
| `/help` | Display help information for available commands | `/help` or `/?` |
| `/about` | Display version information | `/about` |
| `/stats` | Display detailed statistics for current session | `/stats` |
| `/settings` | Open settings editor | `/settings` |
| `/auth` | Change authentication method | `/auth` |
| `/bug` | Submit issue about Qwen Code | `/bug Button click unresponsive` |
| `/copy` | Copy last output content to clipboard | `/copy` |
| `/quit` | Exit Qwen Code immediately | `/quit` or `/exit` |
### 1.6 Common Shortcuts

View File

@@ -0,0 +1,136 @@
# Internationalization (i18n) & Language
Qwen Code is built for multilingual workflows: it supports UI localization (i18n/l10n) in the CLI, lets you choose the assistant output language, and allows custom UI language packs.
## Overview
From a user point of view, Qwen Codes “internationalization” spans multiple layers:
| Capability / Setting | What it controls | Where stored |
| ------------------------ | ---------------------------------------------------------------------- | ---------------------------- |
| `/language ui` | Terminal UI text (menus, system messages, prompts) | `~/.qwen/settings.json` |
| `/language output` | Language the AI responds in (an output preference, not UI translation) | `~/.qwen/output-language.md` |
| Custom UI language packs | Overrides/extends built-in UI translations | `~/.qwen/locales/*.js` |
## UI Language
This is the CLIs UI localization layer (i18n/l10n): it controls the language of menus, prompts, and system messages.
### Setting the UI Language
Use the `/language ui` command:
```bash
/language ui zh-CN # Chinese
/language ui en-US # English
/language ui ru-RU # Russian
/language ui de-DE # German
```
Aliases are also supported:
```bash
/language ui zh # Chinese
/language ui en # English
/language ui ru # Russian
/language ui de # German
```
### Auto-detection
On first startup, Qwen Code detects your system locale and sets the UI language automatically.
Detection priority:
1. `QWEN_CODE_LANG` environment variable
2. `LANG` environment variable
3. System locale via JavaScript Intl API
4. Default: English
## LLM Output Language
The LLM output language controls what language the AI assistant responds in, regardless of what language you type your questions in.
### How It Works
The LLM output language is controlled by a rule file at `~/.qwen/output-language.md`. This file is automatically included in the LLM's context during startup, instructing it to respond in the specified language.
### Auto-detection
On first startup, if no `output-language.md` file exists, Qwen Code automatically creates one based on your system locale. For example:
- System locale `zh` creates a rule for Chinese responses
- System locale `en` creates a rule for English responses
- System locale `ru` creates a rule for Russian responses
- System locale `de` creates a rule for German responses
### Manual Setting
Use `/language output <language>` to change:
```bash
/language output Chinese
/language output English
/language output Japanese
/language output German
```
Any language name works. The LLM will be instructed to respond in that language.
> [!note]
>
> After changing the output language, restart Qwen Code for the change to take effect.
### File Location
```
~/.qwen/output-language.md
```
## Configuration
### Via Settings Dialog
1. Run `/settings`
2. Find "Language" under General
3. Select your preferred UI language
### Via Environment Variable
```bash
export QWEN_CODE_LANG=zh
```
This influences auto-detection on first startup (if you havent set a UI language and no `output-language.md` file exists yet).
## Custom Language Packs
For UI translations, you can create custom language packs in `~/.qwen/locales/`:
- Example: `~/.qwen/locales/es.js` for Spanish
- Example: `~/.qwen/locales/fr.js` for French
User directory takes precedence over built-in translations.
> [!tip]
>
> Contributions are welcome! If youd like to improve built-in translations or add new languages.
> For a concrete example, see [PR #1238: feat(i18n): add Russian language support](https://github.com/QwenLM/qwen-code/pull/1238).
### Language Pack Format
```javascript
// ~/.qwen/locales/es.js
export default {
Hello: 'Hola',
Settings: 'Configuracion',
// ... more translations
};
```
## Related Commands
- `/language` - Show current language settings
- `/language ui [lang]` - Set UI language
- `/language output <language>` - Set LLM output language
- `/settings` - Open settings dialog

View File

@@ -24,6 +24,8 @@ export default tseslint.config(
'.integration-tests/**',
'packages/**/.integration-test/**',
'dist/**',
'docs-site/.next/**',
'docs-site/out/**',
],
},
eslint.configs.recommended,

41
package-lock.json generated
View File

@@ -3875,6 +3875,17 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/prompts": {
"version": "2.4.9",
"resolved": "https://registry.npmjs.org/@types/prompts/-/prompts-2.4.9.tgz",
"integrity": "sha512-qTxFi6Buiu8+50/+3DGIWLHM6QuWsEKugJnnP6iv2Mc4ncxE4A/OJkjuVOA+5X0X1S/nq5VJRa8Lu+nwcvbrKA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*",
"kleur": "^3.0.3"
}
},
"node_modules/@types/prop-types": {
"version": "15.7.15",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz",
@@ -10984,6 +10995,15 @@
"json-buffer": "3.0.1"
}
},
"node_modules/kleur": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz",
"integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==",
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/ky": {
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/ky/-/ky-1.8.1.tgz",
@@ -13393,6 +13413,19 @@
"dev": true,
"license": "MIT"
},
"node_modules/prompts": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz",
"integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==",
"license": "MIT",
"dependencies": {
"kleur": "^3.0.3",
"sisteransi": "^1.0.5"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/prop-types": {
"version": "15.8.1",
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz",
@@ -14753,6 +14786,12 @@
"url": "https://github.com/steveukx/git-js?sponsor=1"
}
},
"node_modules/sisteransi": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
"integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==",
"license": "MIT"
},
"node_modules/slash": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz",
@@ -17338,6 +17377,7 @@
"ink-spinner": "^5.0.0",
"lowlight": "^3.3.0",
"open": "^10.1.2",
"prompts": "^2.4.2",
"qrcode-terminal": "^0.12.0",
"react": "^19.1.0",
"read-package-up": "^11.0.0",
@@ -17366,6 +17406,7 @@
"@types/diff": "^7.0.2",
"@types/dotenv": "^6.1.1",
"@types/node": "^20.11.24",
"@types/prompts": "^2.4.9",
"@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6",
"@types/semver": "^7.7.0",

View File

@@ -46,6 +46,7 @@
"comment-json": "^4.2.5",
"diff": "^7.0.0",
"dotenv": "^17.1.0",
"prompts": "^2.4.2",
"fzf": "^0.5.2",
"glob": "^10.5.0",
"highlight.js": "^11.11.1",
@@ -79,6 +80,7 @@
"@types/command-exists": "^1.2.3",
"@types/diff": "^7.0.2",
"@types/dotenv": "^6.1.1",
"@types/prompts": "^2.4.9",
"@types/node": "^20.11.24",
"@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6",

View File

@@ -27,10 +27,8 @@ import { Readable, Writable } from 'node:stream';
import type { LoadedSettings } from '../config/settings.js';
import { SettingScope } from '../config/settings.js';
import { z } from 'zod';
import { ExtensionStorage, type Extension } from '../config/extension.js';
import type { CliArgs } from '../config/config.js';
import { loadCliConfig } from '../config/config.js';
import { ExtensionEnablementManager } from '../config/extensions/extensionEnablement.js';
// Import the modular Session class
import { Session } from './session/Session.js';
@@ -38,7 +36,6 @@ import { Session } from './session/Session.js';
export async function runAcpAgent(
config: Config,
settings: LoadedSettings,
extensions: Extension[],
argv: CliArgs,
) {
const stdout = Writable.toWeb(process.stdout) as WritableStream;
@@ -51,8 +48,7 @@ export async function runAcpAgent(
console.debug = console.error;
new acp.AgentSideConnection(
(client: acp.Client) =>
new GeminiAgent(config, settings, extensions, argv, client),
(client: acp.Client) => new GeminiAgent(config, settings, argv, client),
stdout,
stdin,
);
@@ -65,7 +61,6 @@ class GeminiAgent {
constructor(
private config: Config,
private settings: LoadedSettings,
private extensions: Extension[],
private argv: CliArgs,
private client: acp.Client,
) {}
@@ -215,16 +210,7 @@ class GeminiAgent {
continue: false,
};
const config = await loadCliConfig(
settings,
this.extensions,
new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
this.argv.extensions,
),
argvForSession,
cwd,
);
const config = await loadCliConfig(settings, argvForSession, cwd);
await config.initialize();
return config;

View File

@@ -0,0 +1,87 @@
import type { ConfirmationRequest } from '../../ui/types.js';
/**
* Requests consent from the user to perform an action, by reading a Y/n
* character from stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param consentDescription The description of the thing they will be consenting to.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentNonInteractive(
consentDescription: string,
): Promise<boolean> {
console.info(consentDescription);
const result = await promptForConsentNonInteractive(
'Do you want to continue? [Y/n]: ',
);
return result;
}
/**
* Requests consent from the user to perform an action, in interactive mode.
*
* This should not be called from non-interactive mode as it will not work.
*
* @param consentDescription The description of the thing they will be consenting to.
* @param addExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentInteractive(
consentDescription: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return promptForConsentInteractive(
consentDescription + '\n\nDo you want to continue?',
addExtensionUpdateConfirmationRequest,
);
}
/**
* Asks users a prompt and awaits for a y/n response on stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param prompt A yes/no prompt to ask the user
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
*/
async function promptForConsentNonInteractive(
prompt: string,
): Promise<boolean> {
const readline = await import('node:readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise((resolve) => {
rl.question(prompt, (answer) => {
rl.close();
resolve(['y', ''].includes(answer.trim().toLowerCase()));
});
});
}
/**
* Asks users an interactive yes/no prompt.
*
* This should not be called from non-interactive mode as it will break the CLI.
*
* @param prompt A markdown prompt to ask the user
* @param addExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
* @returns Whether or not the user answers yes.
*/
async function promptForConsentInteractive(
prompt: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return new Promise<boolean>((resolve) => {
addExtensionUpdateConfirmationRequest({
prompt,
onConfirm: (resolvedConfirmed) => {
resolve(resolvedConfirmed);
},
});
});
}

View File

@@ -5,21 +5,22 @@
*/
import { type CommandModule } from 'yargs';
import { disableExtension } from '../../config/extension.js';
import { SettingScope } from '../../config/settings.js';
import { getErrorMessage } from '../../utils/errors.js';
import { getExtensionManager } from './utils.js';
interface DisableArgs {
name: string;
scope?: string;
}
export function handleDisable(args: DisableArgs) {
export async function handleDisable(args: DisableArgs) {
const extensionManager = await getExtensionManager();
try {
if (args.scope?.toLowerCase() === 'workspace') {
disableExtension(args.name, SettingScope.Workspace);
extensionManager.disableExtension(args.name, SettingScope.Workspace);
} else {
disableExtension(args.name, SettingScope.User);
extensionManager.disableExtension(args.name, SettingScope.User);
}
console.log(
`Extension "${args.name}" successfully disabled for scope "${args.scope}".`,
@@ -61,8 +62,8 @@ export const disableCommand: CommandModule = {
}
return true;
}),
handler: (argv) => {
handleDisable({
handler: async (argv) => {
await handleDisable({
name: argv['name'] as string,
scope: argv['scope'] as string,
});

View File

@@ -6,20 +6,22 @@
import { type CommandModule } from 'yargs';
import { FatalConfigError, getErrorMessage } from '@qwen-code/qwen-code-core';
import { enableExtension } from '../../config/extension.js';
import { SettingScope } from '../../config/settings.js';
import { getExtensionManager } from './utils.js';
interface EnableArgs {
name: string;
scope?: string;
}
export function handleEnable(args: EnableArgs) {
export async function handleEnable(args: EnableArgs) {
const extensionManager = await getExtensionManager();
try {
if (args.scope?.toLowerCase() === 'workspace') {
enableExtension(args.name, SettingScope.Workspace);
extensionManager.enableExtension(args.name, SettingScope.Workspace);
} else {
enableExtension(args.name, SettingScope.User);
extensionManager.enableExtension(args.name, SettingScope.User);
}
if (args.scope) {
console.log(
@@ -66,8 +68,8 @@ export const enableCommand: CommandModule = {
}
return true;
}),
handler: (argv) => {
handleEnable({
handler: async (argv) => {
await handleEnable({
name: argv['name'] as string,
scope: argv['scope'] as string,
});

View File

@@ -5,58 +5,64 @@
*/
import type { CommandModule } from 'yargs';
import {
installExtension,
requestConsentNonInteractive,
} from '../../config/extension.js';
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
ExtensionManager,
parseInstallSource,
} from '@qwen-code/qwen-code-core';
import { getErrorMessage } from '../../utils/errors.js';
import { stat } from 'node:fs/promises';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
import { loadSettings } from '../../config/settings.js';
import { requestConsentNonInteractive } from './consent.js';
interface InstallArgs {
source: string;
ref?: string;
autoUpdate?: boolean;
allowPreRelease?: boolean;
consent?: boolean;
}
export async function handleInstall(args: InstallArgs) {
try {
let installMetadata: ExtensionInstallMetadata;
const { source } = args;
const installMetadata = await parseInstallSource(args.source);
if (
source.startsWith('http://') ||
source.startsWith('https://') ||
source.startsWith('git@') ||
source.startsWith('sso://')
installMetadata.type !== 'git' &&
installMetadata.type !== 'github-release'
) {
installMetadata = {
source,
type: 'git',
ref: args.ref,
autoUpdate: args.autoUpdate,
};
} else {
if (args.ref || args.autoUpdate) {
throw new Error(
'--ref and --auto-update are not applicable for local extensions.',
'--ref and --auto-update are not applicable for marketplace extensions.',
);
}
try {
await stat(source);
installMetadata = {
source,
type: 'local',
};
} catch {
throw new Error('Install source not found.');
}
}
const name = await installExtension(
installMetadata,
requestConsentNonInteractive,
const requestConsent = args.consent
? () => Promise.resolve(true)
: requestConsentNonInteractive;
const workspaceDir = process.cwd();
const extensionManager = new ExtensionManager({
workspaceDir,
isWorkspaceTrusted: !!isWorkspaceTrusted(
loadSettings(workspaceDir).merged,
),
requestConsent,
});
await extensionManager.refreshCache();
const extension = await extensionManager.installExtension(
{
...installMetadata,
ref: args.ref,
autoUpdate: args.autoUpdate,
allowPreRelease: args.allowPreRelease,
},
requestConsent,
);
console.log(
`Extension "${extension.name}" installed successfully and enabled.`,
);
console.log(`Extension "${name}" installed successfully and enabled.`);
} catch (error) {
console.error(getErrorMessage(error));
process.exit(1);
@@ -65,11 +71,13 @@ export async function handleInstall(args: InstallArgs) {
export const installCommand: CommandModule = {
command: 'install <source>',
describe: 'Installs an extension from a git repository URL or a local path.',
describe:
'Installs an extension from a git repository URL, local path, or claude marketplace (marketplace-url:plugin-name).',
builder: (yargs) =>
yargs
.positional('source', {
describe: 'The github URL or local path of the extension to install.',
describe:
'The github URL, local path, or marketplace source (marketplace-url:plugin-name) of the extension to install.',
type: 'string',
demandOption: true,
})
@@ -81,6 +89,16 @@ export const installCommand: CommandModule = {
describe: 'Enable auto-update for this extension.',
type: 'boolean',
})
.option('pre-release', {
describe: 'Enable pre-release versions for this extension.',
type: 'boolean',
})
.option('consent', {
describe:
'Acknowledge the security risks of installing an extension and skip the confirmation prompt.',
type: 'boolean',
default: false,
})
.check((argv) => {
if (!argv.source) {
throw new Error('The source argument must be provided.');
@@ -92,6 +110,8 @@ export const installCommand: CommandModule = {
source: argv['source'] as string,
ref: argv['ref'] as string | undefined,
autoUpdate: argv['auto-update'] as boolean | undefined,
allowPreRelease: argv['pre-release'] as boolean | undefined,
consent: argv['consent'] as boolean | undefined,
});
},
};

View File

@@ -5,13 +5,10 @@
*/
import type { CommandModule } from 'yargs';
import {
installExtension,
requestConsentNonInteractive,
} from '../../config/extension.js';
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
import { type ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
import { getErrorMessage } from '../../utils/errors.js';
import { requestConsentNonInteractive } from './consent.js';
import { getExtensionManager } from './utils.js';
interface InstallArgs {
path: string;
@@ -23,12 +20,14 @@ export async function handleLink(args: InstallArgs) {
source: args.path,
type: 'link',
};
const extensionName = await installExtension(
const extensionManager = await getExtensionManager();
const extension = await extensionManager.installExtension(
installMetadata,
requestConsentNonInteractive,
);
console.log(
`Extension "${extensionName}" linked successfully and enabled.`,
`Extension "${extension.name}" linked successfully and enabled.`,
);
} catch (error) {
console.error(getErrorMessage(error));

View File

@@ -5,19 +5,23 @@
*/
import type { CommandModule } from 'yargs';
import { loadUserExtensions, toOutputString } from '../../config/extension.js';
import { getErrorMessage } from '../../utils/errors.js';
import { getExtensionManager } from './utils.js';
export async function handleList() {
try {
const extensions = loadUserExtensions();
const extensionManager = await getExtensionManager();
const extensions = extensionManager.getLoadedExtensions();
if (extensions.length === 0) {
console.log('No extensions installed.');
return;
}
console.log(
extensions
.map((extension, _): string => toOutputString(extension, process.cwd()))
.map((extension, _): string =>
extensionManager.toOutputString(extension, process.cwd()),
)
.join('\n\n'),
);
} catch (error) {

View File

@@ -5,8 +5,11 @@
*/
import type { CommandModule } from 'yargs';
import { uninstallExtension } from '../../config/extension.js';
import { getErrorMessage } from '../../utils/errors.js';
import { ExtensionManager } from '@qwen-code/qwen-code-core';
import { requestConsentNonInteractive } from './consent.js';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
import { loadSettings } from '../../config/settings.js';
interface UninstallArgs {
name: string; // can be extension name or source URL.
@@ -14,7 +17,16 @@ interface UninstallArgs {
export async function handleUninstall(args: UninstallArgs) {
try {
await uninstallExtension(args.name);
const workspaceDir = process.cwd();
const extensionManager = new ExtensionManager({
workspaceDir,
requestConsent: requestConsentNonInteractive,
isWorkspaceTrusted: !!isWorkspaceTrusted(
loadSettings(workspaceDir).merged,
),
});
await extensionManager.refreshCache();
await extensionManager.uninstallExtension(args.name, false);
console.log(`Extension "${args.name}" successfully uninstalled.`);
} catch (error) {
console.error(getErrorMessage(error));

View File

@@ -5,22 +5,13 @@
*/
import type { CommandModule } from 'yargs';
import {
loadExtensions,
annotateActiveExtensions,
ExtensionStorage,
requestConsentNonInteractive,
} from '../../config/extension.js';
import {
updateAllUpdatableExtensions,
type ExtensionUpdateInfo,
checkForAllExtensionUpdates,
updateExtension,
} from '../../config/extensions/update.js';
import { checkForExtensionUpdate } from '../../config/extensions/github.js';
import { getErrorMessage } from '../../utils/errors.js';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
import {
checkForExtensionUpdate,
type ExtensionUpdateInfo,
} from '@qwen-code/qwen-code-core';
import { getExtensionManager } from './utils.js';
interface UpdateArgs {
name?: string;
@@ -31,19 +22,9 @@ const updateOutput = (info: ExtensionUpdateInfo) =>
`Extension "${info.name}" successfully updated: ${info.originalVersion}${info.updatedVersion}.`;
export async function handleUpdate(args: UpdateArgs) {
const workingDir = process.cwd();
const extensionEnablementManager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
// Force enable named extensions, otherwise we will only update the enabled
// ones.
args.name ? [args.name] : [],
);
const allExtensions = loadExtensions(extensionEnablementManager);
const extensions = annotateActiveExtensions(
allExtensions,
workingDir,
extensionEnablementManager,
);
const extensionManager = await getExtensionManager();
const extensions = extensionManager.getLoadedExtensions();
if (args.name) {
try {
const extension = extensions.find(
@@ -53,25 +34,23 @@ export async function handleUpdate(args: UpdateArgs) {
console.log(`Extension "${args.name}" not found.`);
return;
}
let updateState: ExtensionUpdateState | undefined;
if (!extension.installMetadata) {
console.log(
`Unable to install extension "${args.name}" due to missing install metadata`,
);
return;
}
await checkForExtensionUpdate(extension, (newState) => {
updateState = newState;
});
const updateState = await checkForExtensionUpdate(
extension,
extensionManager,
);
if (updateState !== ExtensionUpdateState.UPDATE_AVAILABLE) {
console.log(`Extension "${args.name}" is already up to date.`);
return;
}
// TODO(chrstnb): we should list extensions if the requested extension is not installed.
const updatedExtensionInfo = (await updateExtension(
const updatedExtensionInfo = (await extensionManager.updateExtension(
extension,
workingDir,
requestConsentNonInteractive,
updateState,
() => {},
))!;
@@ -92,18 +71,15 @@ export async function handleUpdate(args: UpdateArgs) {
if (args.all) {
try {
const extensionState = new Map();
await checkForAllExtensionUpdates(extensions, (action) => {
if (action.type === 'SET_STATE') {
extensionState.set(action.payload.name, {
status: action.payload.state,
await extensionManager.checkForAllExtensionUpdates(
(extensionName, state) => {
extensionState.set(extensionName, {
status: state,
processed: true, // No need to process as we will force the update.
});
}
});
let updateInfos = await updateAllUpdatableExtensions(
workingDir,
requestConsentNonInteractive,
extensions,
},
);
let updateInfos = await extensionManager.updateAllUpdatableExtensions(
extensionState,
() => {},
);

View File

@@ -0,0 +1,21 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { ExtensionManager } from '@qwen-code/qwen-code-core';
import { loadSettings } from '../../config/settings.js';
import { requestConsentNonInteractive } from './consent.js';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
export async function getExtensionManager(): Promise<ExtensionManager> {
const workspaceDir = process.cwd();
const extensionManager = new ExtensionManager({
workspaceDir,
requestConsent: requestConsentNonInteractive,
isWorkspaceTrusted: !!isWorkspaceTrusted(loadSettings(workspaceDir).merged),
});
await extensionManager.refreshCache();
return extensionManager;
}

View File

@@ -7,7 +7,8 @@
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
import { listMcpServers } from './list.js';
import { loadSettings } from '../../config/settings.js';
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
import { loadExtensions } from '../../config/extension.js';
import { ExtensionStorage } from '../../config/extensions/storage.js';
import { createTransport } from '@qwen-code/qwen-code-core';
import { Client } from '@modelcontextprotocol/sdk/client/index.js';

View File

@@ -8,10 +8,13 @@
import type { CommandModule } from 'yargs';
import { loadSettings } from '../../config/settings.js';
import type { MCPServerConfig } from '@qwen-code/qwen-code-core';
import { MCPServerStatus, createTransport } from '@qwen-code/qwen-code-core';
import {
MCPServerStatus,
createTransport,
ExtensionManager,
} from '@qwen-code/qwen-code-core';
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
const COLOR_GREEN = '\u001b[32m';
const COLOR_YELLOW = '\u001b[33m';
@@ -22,22 +25,27 @@ async function getMcpServersFromConfig(): Promise<
Record<string, MCPServerConfig>
> {
const settings = loadSettings();
const extensions = loadExtensions(
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
);
const extensionManager = new ExtensionManager({
isWorkspaceTrusted: !!isWorkspaceTrusted(settings.merged),
telemetrySettings: settings.merged.telemetry,
});
await extensionManager.refreshCache();
const extensions = extensionManager.getLoadedExtensions();
const mcpServers = { ...(settings.merged.mcpServers || {}) };
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) {
return;
}
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
if (extension.isActive) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) {
return;
}
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
}
return mcpServers;
}

View File

@@ -16,7 +16,8 @@ import {
} from '@qwen-code/qwen-code-core';
import { loadCliConfig, parseArguments, type CliArgs } from './config.js';
import type { Settings } from './settings.js';
import { ExtensionStorage, type Extension } from './extension.js';
import type { Extension } from './extension.js';
import { ExtensionStorage } from './extensions/storage.js';
import * as ServerConfig from '@qwen-code/qwen-code-core';
import { isWorkspaceTrusted } from './trustedFolders.js';
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';

View File

@@ -9,7 +9,6 @@ import {
AuthType,
Config,
DEFAULT_QWEN_EMBEDDING_MODEL,
DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
EditTool,
FileDiscoveryService,
getCurrentGeminiMdFilename,
@@ -25,7 +24,6 @@ import {
SessionService,
type ResumedSessionData,
type FileFilteringOptions,
type MCPServerConfig,
} from '@qwen-code/qwen-code-core';
import { extensionsCommand } from '../commands/extensions.js';
import type { Settings } from './settings.js';
@@ -37,14 +35,11 @@ import { homedir } from 'node:os';
import { resolvePath } from '../utils/resolvePath.js';
import { getCliVersion } from '../utils/version.js';
import type { Extension } from './extension.js';
import { annotateActiveExtensions } from './extension.js';
import { loadSandboxConfig } from './sandboxConfig.js';
import { appEvents } from '../utils/events.js';
import { mcpCommand } from '../commands/mcp.js';
import { isWorkspaceTrusted } from './trustedFolders.js';
import type { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
import { buildWebSearchConfig } from './webSearch.js';
// Simple console logger for now - replace with actual logger if available
@@ -162,7 +157,7 @@ function normalizeOutputFormat(
return OutputFormat.TEXT;
}
export async function parseArguments(settings: Settings): Promise<CliArgs> {
export async function parseArguments(): Promise<CliArgs> {
const rawArgv = hideBin(process.argv);
const yargsInstance = yargs(rawArgv)
.locale('en')
@@ -537,11 +532,9 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
}),
)
// Register MCP subcommands
.command(mcpCommand);
if (settings?.experimental?.extensionManagement ?? true) {
yargsInstance.command(extensionsCommand);
}
.command(mcpCommand)
// Register Extension subcommands
.command(extensionsCommand);
yargsInstance
.version(await getCliVersion()) // This will enable the --version flag based on package.json
@@ -605,11 +598,11 @@ export async function loadHierarchicalGeminiMemory(
includeDirectoriesToReadGemini: readonly string[] = [],
debugMode: boolean,
fileService: FileDiscoveryService,
settings: Settings,
extensionContextFilePaths: string[] = [],
folderTrust: boolean,
memoryImportFormat: 'flat' | 'tree' = 'tree',
fileFilteringOptions?: FileFilteringOptions,
maxDirs: number = 200,
): Promise<{ memoryContent: string; fileCount: number }> {
// FIX: Use real, canonical paths for a reliable comparison to handle symlinks.
const realCwd = fs.realpathSync(path.resolve(currentWorkingDirectory));
@@ -636,7 +629,7 @@ export async function loadHierarchicalGeminiMemory(
folderTrust,
memoryImportFormat,
fileFilteringOptions,
settings.context?.discoveryMaxDirs,
maxDirs,
);
}
@@ -651,30 +644,17 @@ export function isDebugMode(argv: CliArgs): boolean {
export async function loadCliConfig(
settings: Settings,
extensions: Extension[],
extensionEnablementManager: ExtensionEnablementManager,
argv: CliArgs,
cwd: string = process.cwd(),
overrideExtensions?: string[],
): Promise<Config> {
const debugMode = isDebugMode(argv);
const memoryImportFormat = settings.context?.importFormat || 'tree';
const ideMode = settings.ide?.enabled ?? false;
const folderTrust = settings.security?.folderTrust?.enabled ?? false;
const trustedFolder = isWorkspaceTrusted(settings)?.isTrusted ?? true;
const allExtensions = annotateActiveExtensions(
extensions,
cwd,
extensionEnablementManager,
);
const activeExtensions = extensions.filter(
(_, i) => allExtensions[i].isActive,
);
// Set the context filename in the server's memoryTool module BEFORE loading memory
// TODO(b/343434939): This is a bit of a hack. The contextFileName should ideally be passed
// directly to the Config constructor in core, and have core handle setGeminiMdFilename.
@@ -686,51 +666,27 @@ export async function loadCliConfig(
setServerGeminiMdFilename(getCurrentGeminiMdFilename());
}
const extensionContextFilePaths = activeExtensions.flatMap(
(e) => e.contextFiles,
);
// Automatically load output-language.md if it exists
const outputLanguageFilePath = path.join(
let outputLanguageFilePath: string | undefined = path.join(
Storage.getGlobalQwenDir(),
'output-language.md',
);
if (fs.existsSync(outputLanguageFilePath)) {
extensionContextFilePaths.push(outputLanguageFilePath);
if (debugMode) {
logger.debug(
`Found output-language.md, adding to context files: ${outputLanguageFilePath}`,
);
}
} else {
outputLanguageFilePath = undefined;
}
const fileService = new FileDiscoveryService(cwd);
const fileFiltering = {
...DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
...settings.context?.fileFiltering,
};
const includeDirectories = (settings.context?.includeDirectories || [])
.map(resolvePath)
.concat((argv.includeDirectories || []).map(resolvePath));
// Call the (now wrapper) loadHierarchicalGeminiMemory which calls the server's version
const { memoryContent, fileCount } = await loadHierarchicalGeminiMemory(
cwd,
settings.context?.loadMemoryFromIncludeDirectories
? includeDirectories
: [],
debugMode,
fileService,
settings,
extensionContextFilePaths,
trustedFolder,
memoryImportFormat,
fileFiltering,
);
let mcpServers = mergeMcpServers(settings, activeExtensions);
const question = argv.promptInteractive || argv.prompt || '';
const inputFormat: InputFormat =
(argv.inputFormat as InputFormat | undefined) ?? InputFormat.TEXT;
@@ -844,38 +800,18 @@ export async function loadCliConfig(
const excludeTools = mergeExcludeTools(
settings,
activeExtensions,
extraExcludes.length > 0 ? extraExcludes : undefined,
argv.excludeTools,
);
const blockedMcpServers: Array<{ name: string; extensionName: string }> = [];
if (!argv.allowedMcpServerNames) {
if (settings.mcp?.allowed) {
mcpServers = allowedMcpServers(
mcpServers,
settings.mcp.allowed,
blockedMcpServers,
);
}
if (settings.mcp?.excluded) {
const excludedNames = new Set(settings.mcp.excluded.filter(Boolean));
if (excludedNames.size > 0) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key]) => !excludedNames.has(key)),
);
}
}
}
if (argv.allowedMcpServerNames) {
mcpServers = allowedMcpServers(
mcpServers,
argv.allowedMcpServerNames,
blockedMcpServers,
);
}
const allowedMcpServers = argv.allowedMcpServerNames
? new Set(argv.allowedMcpServerNames.filter(Boolean))
: settings.mcp?.allowed
? new Set(settings.mcp.allowed.filter(Boolean))
: undefined;
const excludedMcpServers = settings.mcp?.excluded
? new Set(settings.mcp.excluded.filter(Boolean))
: undefined;
const selectedAuthType =
(argv.authType as AuthType | undefined) ||
@@ -943,6 +879,8 @@ export async function loadCliConfig(
includeDirectories,
loadMemoryFromIncludeDirectories:
settings.context?.loadMemoryFromIncludeDirectories || false,
importFormat: settings.context?.importFormat || 'tree',
discoveryMaxDirs: settings.context?.discoveryMaxDirs || 200,
debugMode,
question,
fullContext: argv.allFiles || false,
@@ -952,9 +890,13 @@ export async function loadCliConfig(
toolDiscoveryCommand: settings.tools?.discoveryCommand,
toolCallCommand: settings.tools?.callCommand,
mcpServerCommand: settings.mcp?.serverCommand,
mcpServers,
userMemory: memoryContent,
geminiMdFileCount: fileCount,
mcpServers: settings.mcpServers || {},
allowedMcpServers: allowedMcpServers
? Array.from(allowedMcpServers)
: undefined,
excludedMcpServers: excludedMcpServers
? Array.from(excludedMcpServers)
: undefined,
approvalMode,
showMemoryUsage:
argv.showMemoryUsage || settings.ui?.showMemoryUsage || false,
@@ -977,15 +919,14 @@ export async function loadCliConfig(
fileDiscoveryService: fileService,
bugCommand: settings.advanced?.bugCommand,
model: resolvedModel,
extensionContextFilePaths,
outputLanguageFilePath,
sessionTokenLimit: settings.model?.sessionTokenLimit ?? -1,
maxSessionTurns:
argv.maxSessionTurns ?? settings.model?.maxSessionTurns ?? -1,
experimentalZedIntegration: argv.experimentalAcp || false,
experimentalSkills: argv.experimentalSkills || false,
listExtensions: argv.listExtensions || false,
extensions: allExtensions,
blockedMcpServers,
overrideExtensions: overrideExtensions || argv.extensions,
noBrowser: !!process.env['NO_BROWSER'],
authType: selectedAuthType,
inputFormat,
@@ -1040,61 +981,8 @@ export async function loadCliConfig(
});
}
function allowedMcpServers(
mcpServers: { [x: string]: MCPServerConfig },
allowMCPServers: string[],
blockedMcpServers: Array<{ name: string; extensionName: string }>,
) {
const allowedNames = new Set(allowMCPServers.filter(Boolean));
if (allowedNames.size > 0) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key, server]) => {
const isAllowed = allowedNames.has(key);
if (!isAllowed) {
blockedMcpServers.push({
name: key,
extensionName: server.extensionName || '',
});
}
return isAllowed;
}),
);
} else {
blockedMcpServers.push(
...Object.entries(mcpServers).map(([key, server]) => ({
name: key,
extensionName: server.extensionName || '',
})),
);
mcpServers = {};
}
return mcpServers;
}
function mergeMcpServers(settings: Settings, extensions: Extension[]) {
const mcpServers = { ...(settings.mcpServers || {}) };
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) {
logger.warn(
`Skipping extension MCP config for server with key "${key}" as it already exists.`,
);
return;
}
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
return mcpServers;
}
function mergeExcludeTools(
settings: Settings,
extensions: Extension[],
extraExcludes?: string[] | undefined,
cliExcludeTools?: string[] | undefined,
): string[] {
@@ -1103,10 +991,5 @@ function mergeExcludeTools(
...(settings.tools?.exclude || []),
...(extraExcludes || []),
]);
for (const extension of extensions) {
for (const tool of extension.config.excludeTools || []) {
allExcludeTools.add(tool);
}
}
return [...allExcludeTools];
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,786 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import type {
MCPServerConfig,
GeminiCLIExtension,
ExtensionInstallMetadata,
} from '@qwen-code/qwen-code-core';
import {
QWEN_DIR,
Storage,
Config,
ExtensionInstallEvent,
ExtensionUninstallEvent,
ExtensionDisableEvent,
ExtensionEnableEvent,
logExtensionEnable,
logExtensionInstallEvent,
logExtensionUninstall,
logExtensionDisable,
} from '@qwen-code/qwen-code-core';
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { SettingScope, loadSettings } from '../config/settings.js';
import { getErrorMessage } from '../utils/errors.js';
import { recursivelyHydrateStrings } from './extensions/variables.js';
import { isWorkspaceTrusted } from './trustedFolders.js';
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
import {
cloneFromGit,
downloadFromGitHubRelease,
} from './extensions/github.js';
import type { LoadExtensionContext } from './extensions/variableSchema.js';
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
import chalk from 'chalk';
import type { ConfirmationRequest } from '../ui/types.js';
export const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
export const EXTENSIONS_CONFIG_FILENAME = 'qwen-extension.json';
export const INSTALL_METADATA_FILENAME = '.qwen-extension-install.json';
export interface Extension {
path: string;
config: ExtensionConfig;
contextFiles: string[];
installMetadata?: ExtensionInstallMetadata | undefined;
}
export interface ExtensionConfig {
name: string;
version: string;
mcpServers?: Record<string, MCPServerConfig>;
contextFileName?: string | string[];
excludeTools?: string[];
}
export interface ExtensionUpdateInfo {
name: string;
originalVersion: string;
updatedVersion: string;
}
export class ExtensionStorage {
private readonly extensionName: string;
constructor(extensionName: string) {
this.extensionName = extensionName;
}
getExtensionDir(): string {
return path.join(
ExtensionStorage.getUserExtensionsDir(),
this.extensionName,
);
}
getConfigPath(): string {
return path.join(this.getExtensionDir(), EXTENSIONS_CONFIG_FILENAME);
}
static getUserExtensionsDir(): string {
const storage = new Storage(os.homedir());
return storage.getExtensionsDir();
}
static async createTmpDir(): Promise<string> {
return await fs.promises.mkdtemp(path.join(os.tmpdir(), 'qwen-extension'));
}
}
export function getWorkspaceExtensions(workspaceDir: string): Extension[] {
// If the workspace dir is the user extensions dir, there are no workspace extensions.
if (path.resolve(workspaceDir) === path.resolve(os.homedir())) {
return [];
}
return loadExtensionsFromDir(workspaceDir);
}
export async function copyExtension(
source: string,
destination: string,
): Promise<void> {
await fs.promises.cp(source, destination, { recursive: true });
}
export async function performWorkspaceExtensionMigration(
extensions: Extension[],
requestConsent: (consent: string) => Promise<boolean>,
): Promise<string[]> {
const failedInstallNames: string[] = [];
for (const extension of extensions) {
try {
const installMetadata: ExtensionInstallMetadata = {
source: extension.path,
type: 'local',
};
await installExtension(installMetadata, requestConsent);
} catch (_) {
failedInstallNames.push(extension.config.name);
}
}
return failedInstallNames;
}
function getTelemetryConfig(cwd: string) {
const settings = loadSettings(cwd);
const config = new Config({
telemetry: settings.merged.telemetry,
interactive: false,
targetDir: cwd,
cwd,
model: '',
debugMode: false,
});
return config;
}
export function loadExtensions(
extensionEnablementManager: ExtensionEnablementManager,
workspaceDir: string = process.cwd(),
): Extension[] {
const settings = loadSettings(workspaceDir).merged;
const allExtensions = [...loadUserExtensions()];
if (
(isWorkspaceTrusted(settings) ?? true) &&
// Default management setting to true
!(settings.experimental?.extensionManagement ?? true)
) {
allExtensions.push(...getWorkspaceExtensions(workspaceDir));
}
const uniqueExtensions = new Map<string, Extension>();
for (const extension of allExtensions) {
if (
!uniqueExtensions.has(extension.config.name) &&
extensionEnablementManager.isEnabled(extension.config.name, workspaceDir)
) {
uniqueExtensions.set(extension.config.name, extension);
}
}
return Array.from(uniqueExtensions.values());
}
export function loadUserExtensions(): Extension[] {
const userExtensions = loadExtensionsFromDir(os.homedir());
const uniqueExtensions = new Map<string, Extension>();
for (const extension of userExtensions) {
if (!uniqueExtensions.has(extension.config.name)) {
uniqueExtensions.set(extension.config.name, extension);
}
}
return Array.from(uniqueExtensions.values());
}
export function loadExtensionsFromDir(dir: string): Extension[] {
const storage = new Storage(dir);
const extensionsDir = storage.getExtensionsDir();
if (!fs.existsSync(extensionsDir)) {
return [];
}
const extensions: Extension[] = [];
for (const subdir of fs.readdirSync(extensionsDir)) {
const extensionDir = path.join(extensionsDir, subdir);
const extension = loadExtension({ extensionDir, workspaceDir: dir });
if (extension != null) {
extensions.push(extension);
}
}
return extensions;
}
export function loadExtension(context: LoadExtensionContext): Extension | null {
const { extensionDir, workspaceDir } = context;
if (!fs.statSync(extensionDir).isDirectory()) {
return null;
}
const installMetadata = loadInstallMetadata(extensionDir);
let effectiveExtensionPath = extensionDir;
if (installMetadata?.type === 'link') {
effectiveExtensionPath = installMetadata.source;
}
try {
let config = loadExtensionConfig({
extensionDir: effectiveExtensionPath,
workspaceDir,
});
config = resolveEnvVarsInObject(config);
if (config.mcpServers) {
config.mcpServers = Object.fromEntries(
Object.entries(config.mcpServers).map(([key, value]) => [
key,
filterMcpConfig(value),
]),
);
}
const contextFiles = getContextFileNames(config)
.map((contextFileName) =>
path.join(effectiveExtensionPath, contextFileName),
)
.filter((contextFilePath) => fs.existsSync(contextFilePath));
return {
path: effectiveExtensionPath,
config,
contextFiles,
installMetadata,
};
} catch (e) {
console.error(
`Warning: Skipping extension in ${effectiveExtensionPath}: ${getErrorMessage(
e,
)}`,
);
return null;
}
}
export function loadExtensionByName(
name: string,
workspaceDir: string = process.cwd(),
): Extension | null {
const userExtensionsDir = ExtensionStorage.getUserExtensionsDir();
if (!fs.existsSync(userExtensionsDir)) {
return null;
}
for (const subdir of fs.readdirSync(userExtensionsDir)) {
const extensionDir = path.join(userExtensionsDir, subdir);
if (!fs.statSync(extensionDir).isDirectory()) {
continue;
}
const extension = loadExtension({ extensionDir, workspaceDir });
if (
extension &&
extension.config.name.toLowerCase() === name.toLowerCase()
) {
return extension;
}
}
return null;
}
function filterMcpConfig(original: MCPServerConfig): MCPServerConfig {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { trust, ...rest } = original;
return Object.freeze(rest);
}
export function loadInstallMetadata(
extensionDir: string,
): ExtensionInstallMetadata | undefined {
const metadataFilePath = path.join(extensionDir, INSTALL_METADATA_FILENAME);
try {
const configContent = fs.readFileSync(metadataFilePath, 'utf-8');
const metadata = JSON.parse(configContent) as ExtensionInstallMetadata;
return metadata;
} catch (_e) {
return undefined;
}
}
function getContextFileNames(config: ExtensionConfig): string[] {
if (!config.contextFileName) {
return ['QWEN.md'];
} else if (!Array.isArray(config.contextFileName)) {
return [config.contextFileName];
}
return config.contextFileName;
}
/**
* Returns an annotated list of extensions. If an extension is listed in enabledExtensionNames, it will be active.
* If enabledExtensionNames is empty, an extension is active unless it is disabled.
* @param extensions The base list of extensions.
* @param enabledExtensionNames The names of explicitly enabled extensions.
* @param workspaceDir The current workspace directory.
*/
export function annotateActiveExtensions(
extensions: Extension[],
workspaceDir: string,
manager: ExtensionEnablementManager,
): GeminiCLIExtension[] {
manager.validateExtensionOverrides(extensions);
return extensions.map((extension) => ({
name: extension.config.name,
version: extension.config.version,
isActive: manager.isEnabled(extension.config.name, workspaceDir),
path: extension.path,
installMetadata: extension.installMetadata,
}));
}
/**
* Requests consent from the user to perform an action, by reading a Y/n
* character from stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param consentDescription The description of the thing they will be consenting to.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentNonInteractive(
consentDescription: string,
): Promise<boolean> {
console.info(consentDescription);
const result = await promptForConsentNonInteractive(
'Do you want to continue? [Y/n]: ',
);
return result;
}
/**
* Requests consent from the user to perform an action, in interactive mode.
*
* This should not be called from non-interactive mode as it will not work.
*
* @param consentDescription The description of the thing they will be consenting to.
* @param setExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentInteractive(
consentDescription: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return await promptForConsentInteractive(
consentDescription + '\n\nDo you want to continue?',
addExtensionUpdateConfirmationRequest,
);
}
/**
* Asks users a prompt and awaits for a y/n response on stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param prompt A yes/no prompt to ask the user
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
*/
async function promptForConsentNonInteractive(
prompt: string,
): Promise<boolean> {
const readline = await import('node:readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise((resolve) => {
rl.question(prompt, (answer) => {
rl.close();
resolve(['y', ''].includes(answer.trim().toLowerCase()));
});
});
}
/**
* Asks users an interactive yes/no prompt.
*
* This should not be called from non-interactive mode as it will break the CLI.
*
* @param prompt A markdown prompt to ask the user
* @param setExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
* @returns Whether or not the user answers yes.
*/
async function promptForConsentInteractive(
prompt: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return await new Promise<boolean>((resolve) => {
addExtensionUpdateConfirmationRequest({
prompt,
onConfirm: (resolvedConfirmed) => {
resolve(resolvedConfirmed);
},
});
});
}
export async function installExtension(
installMetadata: ExtensionInstallMetadata,
requestConsent: (consent: string) => Promise<boolean>,
cwd: string = process.cwd(),
previousExtensionConfig?: ExtensionConfig,
): Promise<string> {
const telemetryConfig = getTelemetryConfig(cwd);
let newExtensionConfig: ExtensionConfig | null = null;
let localSourcePath: string | undefined;
try {
const settings = loadSettings(cwd).merged;
if (!isWorkspaceTrusted(settings)) {
throw new Error(
`Could not install extension from untrusted folder at ${installMetadata.source}`,
);
}
const extensionsDir = ExtensionStorage.getUserExtensionsDir();
await fs.promises.mkdir(extensionsDir, { recursive: true });
if (
!path.isAbsolute(installMetadata.source) &&
(installMetadata.type === 'local' || installMetadata.type === 'link')
) {
installMetadata.source = path.resolve(cwd, installMetadata.source);
}
let tempDir: string | undefined;
if (
installMetadata.type === 'git' ||
installMetadata.type === 'github-release'
) {
tempDir = await ExtensionStorage.createTmpDir();
try {
const result = await downloadFromGitHubRelease(
installMetadata,
tempDir,
);
installMetadata.type = result.type;
installMetadata.releaseTag = result.tagName;
} catch (_error) {
await cloneFromGit(installMetadata, tempDir);
installMetadata.type = 'git';
}
localSourcePath = tempDir;
} else if (
installMetadata.type === 'local' ||
installMetadata.type === 'link'
) {
localSourcePath = installMetadata.source;
} else {
throw new Error(`Unsupported install type: ${installMetadata.type}`);
}
try {
newExtensionConfig = loadExtensionConfig({
extensionDir: localSourcePath,
workspaceDir: cwd,
});
const newExtensionName = newExtensionConfig.name;
const extensionStorage = new ExtensionStorage(newExtensionName);
const destinationPath = extensionStorage.getExtensionDir();
const installedExtensions = loadUserExtensions();
if (
installedExtensions.some(
(installed) => installed.config.name === newExtensionName,
)
) {
throw new Error(
`Extension "${newExtensionName}" is already installed. Please uninstall it first.`,
);
}
await maybeRequestConsentOrFail(
newExtensionConfig,
requestConsent,
previousExtensionConfig,
);
await fs.promises.mkdir(destinationPath, { recursive: true });
if (
installMetadata.type === 'local' ||
installMetadata.type === 'git' ||
installMetadata.type === 'github-release'
) {
await copyExtension(localSourcePath, destinationPath);
}
const metadataString = JSON.stringify(installMetadata, null, 2);
const metadataPath = path.join(
destinationPath,
INSTALL_METADATA_FILENAME,
);
await fs.promises.writeFile(metadataPath, metadataString);
} finally {
if (tempDir) {
await fs.promises.rm(tempDir, { recursive: true, force: true });
}
}
logExtensionInstallEvent(
telemetryConfig,
new ExtensionInstallEvent(
newExtensionConfig!.name,
newExtensionConfig!.version,
installMetadata.source,
'success',
),
);
enableExtension(newExtensionConfig!.name, SettingScope.User);
return newExtensionConfig!.name;
} catch (error) {
// Attempt to load config from the source path even if installation fails
// to get the name and version for logging.
if (!newExtensionConfig && localSourcePath) {
try {
newExtensionConfig = loadExtensionConfig({
extensionDir: localSourcePath,
workspaceDir: cwd,
});
} catch {
// Ignore error, this is just for logging.
}
}
logExtensionInstallEvent(
telemetryConfig,
new ExtensionInstallEvent(
newExtensionConfig?.name ?? '',
newExtensionConfig?.version ?? '',
installMetadata.source,
'error',
),
);
throw error;
}
}
/**
* Builds a consent string for installing an extension based on it's
* extensionConfig.
*/
function extensionConsentString(extensionConfig: ExtensionConfig): string {
const output: string[] = [];
const mcpServerEntries = Object.entries(extensionConfig.mcpServers || {});
output.push(`Installing extension "${extensionConfig.name}".`);
output.push(
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**',
);
if (mcpServerEntries.length) {
output.push('This extension will run the following MCP servers:');
for (const [key, mcpServer] of mcpServerEntries) {
const isLocal = !!mcpServer.command;
const source =
mcpServer.httpUrl ??
`${mcpServer.command || ''}${mcpServer.args ? ' ' + mcpServer.args.join(' ') : ''}`;
output.push(` * ${key} (${isLocal ? 'local' : 'remote'}): ${source}`);
}
}
if (extensionConfig.contextFileName) {
output.push(
`This extension will append info to your QWEN.md context using ${extensionConfig.contextFileName}`,
);
}
if (extensionConfig.excludeTools) {
output.push(
`This extension will exclude the following core tools: ${extensionConfig.excludeTools}`,
);
}
return output.join('\n');
}
/**
* Requests consent from the user to install an extension (extensionConfig), if
* there is any difference between the consent string for `extensionConfig` and
* `previousExtensionConfig`.
*
* Always requests consent if previousExtensionConfig is null.
*
* Throws if the user does not consent.
*/
async function maybeRequestConsentOrFail(
extensionConfig: ExtensionConfig,
requestConsent: (consent: string) => Promise<boolean>,
previousExtensionConfig?: ExtensionConfig,
) {
const extensionConsent = extensionConsentString(extensionConfig);
if (previousExtensionConfig) {
const previousExtensionConsent = extensionConsentString(
previousExtensionConfig,
);
if (previousExtensionConsent === extensionConsent) {
return;
}
}
if (!(await requestConsent(extensionConsent))) {
throw new Error(`Installation cancelled for "${extensionConfig.name}".`);
}
}
export function validateName(name: string) {
if (!/^[a-zA-Z0-9-]+$/.test(name)) {
throw new Error(
`Invalid extension name: "${name}". Only letters (a-z, A-Z), numbers (0-9), and dashes (-) are allowed.`,
);
}
}
export function loadExtensionConfig(
context: LoadExtensionContext,
): ExtensionConfig {
const { extensionDir, workspaceDir } = context;
const configFilePath = path.join(extensionDir, EXTENSIONS_CONFIG_FILENAME);
if (!fs.existsSync(configFilePath)) {
throw new Error(`Configuration file not found at ${configFilePath}`);
}
try {
const configContent = fs.readFileSync(configFilePath, 'utf-8');
const config = recursivelyHydrateStrings(JSON.parse(configContent), {
extensionPath: extensionDir,
workspacePath: workspaceDir,
'/': path.sep,
pathSeparator: path.sep,
}) as unknown as ExtensionConfig;
if (!config.name || !config.version) {
throw new Error(
`Invalid configuration in ${configFilePath}: missing ${!config.name ? '"name"' : '"version"'}`,
);
}
validateName(config.name);
return config;
} catch (e) {
throw new Error(
`Failed to load extension config from ${configFilePath}: ${getErrorMessage(
e,
)}`,
);
}
}
export async function uninstallExtension(
extensionIdentifier: string,
cwd: string = process.cwd(),
): Promise<void> {
const telemetryConfig = getTelemetryConfig(cwd);
const installedExtensions = loadUserExtensions();
const extensionName = installedExtensions.find(
(installed) =>
installed.config.name.toLowerCase() ===
extensionIdentifier.toLowerCase() ||
installed.installMetadata?.source.toLowerCase() ===
extensionIdentifier.toLowerCase(),
)?.config.name;
if (!extensionName) {
throw new Error(`Extension not found.`);
}
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
[extensionName],
);
manager.remove(extensionName);
const storage = new ExtensionStorage(extensionName);
await fs.promises.rm(storage.getExtensionDir(), {
recursive: true,
force: true,
});
logExtensionUninstall(
telemetryConfig,
new ExtensionUninstallEvent(extensionName, 'success'),
);
}
export function toOutputString(
extension: Extension,
workspaceDir: string,
): string {
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
);
const userEnabled = manager.isEnabled(extension.config.name, os.homedir());
const workspaceEnabled = manager.isEnabled(
extension.config.name,
workspaceDir,
);
const status = workspaceEnabled ? chalk.green('✓') : chalk.red('✗');
let output = `${status} ${extension.config.name} (${extension.config.version})`;
output += `\n Path: ${extension.path}`;
if (extension.installMetadata) {
output += `\n Source: ${extension.installMetadata.source} (Type: ${extension.installMetadata.type})`;
if (extension.installMetadata.ref) {
output += `\n Ref: ${extension.installMetadata.ref}`;
}
if (extension.installMetadata.releaseTag) {
output += `\n Release tag: ${extension.installMetadata.releaseTag}`;
}
}
output += `\n Enabled (User): ${userEnabled}`;
output += `\n Enabled (Workspace): ${workspaceEnabled}`;
if (extension.contextFiles.length > 0) {
output += `\n Context files:`;
extension.contextFiles.forEach((contextFile) => {
output += `\n ${contextFile}`;
});
}
if (extension.config.mcpServers) {
output += `\n MCP servers:`;
Object.keys(extension.config.mcpServers).forEach((key) => {
output += `\n ${key}`;
});
}
if (extension.config.excludeTools) {
output += `\n Excluded tools:`;
extension.config.excludeTools.forEach((tool) => {
output += `\n ${tool}`;
});
}
return output;
}
export function disableExtension(
name: string,
scope: SettingScope,
cwd: string = process.cwd(),
) {
const config = getTelemetryConfig(cwd);
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
throw new Error('System and SystemDefaults scopes are not supported.');
}
const extension = loadExtensionByName(name, cwd);
if (!extension) {
throw new Error(`Extension with name ${name} does not exist.`);
}
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
[name],
);
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
manager.disable(name, true, scopePath);
logExtensionDisable(config, new ExtensionDisableEvent(name, scope));
}
export function enableExtension(
name: string,
scope: SettingScope,
cwd: string = process.cwd(),
) {
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
throw new Error('System and SystemDefaults scopes are not supported.');
}
const extension = loadExtensionByName(name, cwd);
if (!extension) {
throw new Error(`Extension with name ${name} does not exist.`);
}
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
);
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
manager.enable(name, true, scopePath);
const config = getTelemetryConfig(cwd);
logExtensionEnable(config, new ExtensionEnableEvent(name, scope));
}

View File

@@ -1,424 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as path from 'node:path';
import fs from 'node:fs';
import os from 'node:os';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { ExtensionEnablementManager, Override } from './extensionEnablement.js';
import type { Extension } from '../extension.js';
// Helper to create a temporary directory for testing
function createTestDir() {
const dirPath = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-test-'));
return {
path: dirPath,
cleanup: () => fs.rmSync(dirPath, { recursive: true, force: true }),
};
}
let testDir: { path: string; cleanup: () => void };
let configDir: string;
let manager: ExtensionEnablementManager;
describe('ExtensionEnablementManager', () => {
beforeEach(() => {
testDir = createTestDir();
configDir = path.join(testDir.path, '.gemini');
manager = new ExtensionEnablementManager(configDir);
});
afterEach(() => {
testDir.cleanup();
// Reset the singleton instance for test isolation
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(ExtensionEnablementManager as any).instance = undefined;
});
describe('isEnabled', () => {
it('should return true if extension is not configured', () => {
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
});
it('should return true if no overrides match', () => {
manager.disable('ext-test', false, '/another/path');
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
});
it('should enable a path based on an override rule', () => {
manager.disable('ext-test', true, '/');
manager.enable('ext-test', true, '/home/user/projects/');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
true,
);
});
it('should disable a path based on a disable override rule', () => {
manager.enable('ext-test', true, '/');
manager.disable('ext-test', true, '/home/user/projects/');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
});
it('should respect the last matching rule (enable wins)', () => {
manager.disable('ext-test', true, '/home/user/projects/');
manager.enable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
true,
);
});
it('should respect the last matching rule (disable wins)', () => {
manager.enable('ext-test', true, '/home/user/projects/');
manager.disable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
});
it('should handle', () => {
manager.enable('ext-test', true, '/home/user/projects');
manager.disable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
expect(
manager.isEnabled('ext-test', '/home/user/projects/something-else'),
).toBe(true);
});
});
describe('includeSubdirs', () => {
it('should add a glob when enabling with includeSubdirs', () => {
manager.enable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
});
it('should not add a glob when enabling without includeSubdirs', () => {
manager.enable('ext-test', false, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should add a glob when disabling with includeSubdirs', () => {
manager.disable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('!/path/to/dir/*');
});
it('should remove conflicting glob rule when enabling without subdirs', () => {
manager.enable('ext-test', true, '/path/to/dir'); // Adds /path/to/dir*
manager.enable('ext-test', false, '/path/to/dir'); // Should remove the glob
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should remove conflicting non-glob rule when enabling with subdirs', () => {
manager.enable('ext-test', false, '/path/to/dir'); // Adds /path/to/dir
manager.enable('ext-test', true, '/path/to/dir'); // Should remove the non-glob
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/');
});
it('should remove conflicting rules when disabling', () => {
manager.enable('ext-test', true, '/path/to/dir'); // enabled with glob
manager.disable('ext-test', false, '/path/to/dir'); // disabled without
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('!/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should correctly evaluate isEnabled with subdirs', () => {
manager.disable('ext-test', true, '/');
manager.enable('ext-test', true, '/path/to/dir');
expect(manager.isEnabled('ext-test', '/path/to/dir/')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/dir/sub/')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/another/')).toBe(false);
});
it('should correctly evaluate isEnabled without subdirs', () => {
manager.disable('ext-test', true, '/*');
manager.enable('ext-test', false, '/path/to/dir');
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/dir/sub')).toBe(false);
});
});
describe('pruning child rules', () => {
it('should remove child rules when enabling a parent with subdirs', () => {
// Pre-existing rules for children
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.disable('ext-test', true, '/path/to/dir/subdir2');
manager.enable('ext-test', false, '/path/to/another/dir');
// Enable the parent directory
manager.enable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
// The new parent rule should be present
expect(overrides).toContain(`/path/to/dir/*`);
// Child rules should be removed
expect(overrides).not.toContain('/path/to/dir/subdir1/');
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
// Unrelated rules should remain
expect(overrides).toContain('/path/to/another/dir/');
});
it('should remove child rules when disabling a parent with subdirs', () => {
// Pre-existing rules for children
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.disable('ext-test', true, '/path/to/dir/subdir2');
manager.enable('ext-test', false, '/path/to/another/dir');
// Disable the parent directory
manager.disable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
// The new parent rule should be present
expect(overrides).toContain(`!/path/to/dir/*`);
// Child rules should be removed
expect(overrides).not.toContain('/path/to/dir/subdir1/');
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
// Unrelated rules should remain
expect(overrides).toContain('/path/to/another/dir/');
});
it('should not remove child rules if includeSubdirs is false', () => {
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.enable('ext-test', false, '/path/to/dir'); // Not including subdirs
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
expect(overrides).toContain('/path/to/dir/subdir1/');
expect(overrides).toContain('/path/to/dir/');
});
});
it('should enable a path based on an enable override', () => {
manager.disable('ext-test', true, '/Users/chrstn');
manager.enable('ext-test', true, '/Users/chrstn/gemini-cli');
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
true,
);
});
it('should ignore subdirs', () => {
manager.disable('ext-test', false, '/Users/chrstn');
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
true,
);
});
describe('extension overrides (-e <name>)', () => {
beforeEach(() => {
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
});
it('can enable extensions, case-insensitive', () => {
manager.disable('ext-test', true, '/');
expect(manager.isEnabled('ext-test', '/')).toBe(true);
expect(manager.isEnabled('Ext-Test', '/')).toBe(true);
// Double check that it would have been disabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
).toBe(false);
});
it('disable all other extensions', () => {
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
manager.enable('ext-test-2', true, '/');
expect(manager.isEnabled('ext-test-2', '/')).toBe(false);
// Double check that it would have been enabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test-2', '/'),
).toBe(true);
});
it('none disables all extensions', () => {
manager = new ExtensionEnablementManager(configDir, ['none']);
manager.enable('ext-test', true, '/');
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(false);
// Double check that it would have been enabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
).toBe(true);
});
});
describe('validateExtensionOverrides', () => {
let consoleErrorSpy: ReturnType<typeof vi.spyOn>;
beforeEach(() => {
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
});
afterEach(() => {
consoleErrorSpy.mockRestore();
});
it('should not log an error if enabledExtensionNamesOverride is empty', () => {
const manager = new ExtensionEnablementManager(configDir, []);
manager.validateExtensionOverrides([]);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
it('should not log an error if all enabledExtensionNamesOverride are valid', () => {
const manager = new ExtensionEnablementManager(configDir, [
'ext-one',
'ext-two',
]);
const extensions = [
{ config: { name: 'ext-one' } },
{ config: { name: 'ext-two' } },
] as Extension[];
manager.validateExtensionOverrides(extensions);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
it('should log an error for each invalid extension name in enabledExtensionNamesOverride', () => {
const manager = new ExtensionEnablementManager(configDir, [
'ext-one',
'ext-invalid',
'ext-another-invalid',
]);
const extensions = [
{ config: { name: 'ext-one' } },
{ config: { name: 'ext-two' } },
] as Extension[];
manager.validateExtensionOverrides(extensions);
expect(consoleErrorSpy).toHaveBeenCalledTimes(2);
expect(consoleErrorSpy).toHaveBeenCalledWith(
'Extension not found: ext-invalid',
);
expect(consoleErrorSpy).toHaveBeenCalledWith(
'Extension not found: ext-another-invalid',
);
});
it('should not log an error if "none" is in enabledExtensionNamesOverride', () => {
const manager = new ExtensionEnablementManager(configDir, ['none']);
manager.validateExtensionOverrides([]);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
});
});
describe('Override', () => {
it('should create an override from input', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should create a disable override from input', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(true);
expect(override.includeSubdirs).toBe(false);
});
it('should create an override from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir');
expect(override.baseRule).toBe('/path/to/dir');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(false);
});
it('should create a disable override from a file rule', () => {
const override = Override.fromFileRule('!/path/to/dir/');
expect(override.isDisable).toBe(true);
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.includeSubdirs).toBe(false);
});
it('should create an override with subdirs from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir/*');
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should correctly identify conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', false);
expect(override1.conflictsWith(override2)).toBe(true);
});
it('should correctly identify non-conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/another/dir', true);
expect(override1.conflictsWith(override2)).toBe(false);
});
it('should correctly identify equal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(true);
});
it('should correctly identify unequal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('!/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(false);
});
it('should generate the correct regex', () => {
const override = Override.fromInput('/path/to/dir', true);
const regex = override.asRegex();
expect(regex.test('/path/to/dir/')).toBe(true);
expect(regex.test('/path/to/dir/subdir')).toBe(true);
expect(regex.test('/path/to/another/dir')).toBe(false);
});
it('should correctly identify child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify child overrides with glob', () => {
const parent = Override.fromInput('/path/to/dir/*', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify non-child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const other = Override.fromInput('/path/to/another/dir', false);
expect(other.isChildOf(parent)).toBe(false);
});
it('should generate the correct output string', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.output()).toBe(`/path/to/dir/*`);
});
it('should generate the correct output string for a disable override', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
it('should disable a path based on a disable override rule', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
});

View File

@@ -1,239 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import fs from 'node:fs';
import path from 'node:path';
import { type Extension } from '../extension.js';
export interface ExtensionEnablementConfig {
overrides: string[];
}
export interface AllExtensionsEnablementConfig {
[extensionName: string]: ExtensionEnablementConfig;
}
export class Override {
constructor(
public baseRule: string,
public isDisable: boolean,
public includeSubdirs: boolean,
) {}
static fromInput(inputRule: string, includeSubdirs: boolean): Override {
const isDisable = inputRule.startsWith('!');
let baseRule = isDisable ? inputRule.substring(1) : inputRule;
baseRule = ensureLeadingAndTrailingSlash(baseRule);
return new Override(baseRule, isDisable, includeSubdirs);
}
static fromFileRule(fileRule: string): Override {
const isDisable = fileRule.startsWith('!');
let baseRule = isDisable ? fileRule.substring(1) : fileRule;
const includeSubdirs = baseRule.endsWith('*');
baseRule = includeSubdirs
? baseRule.substring(0, baseRule.length - 1)
: baseRule;
return new Override(baseRule, isDisable, includeSubdirs);
}
conflictsWith(other: Override): boolean {
if (this.baseRule === other.baseRule) {
return (
this.includeSubdirs !== other.includeSubdirs ||
this.isDisable !== other.isDisable
);
}
return false;
}
isEqualTo(other: Override): boolean {
return (
this.baseRule === other.baseRule &&
this.includeSubdirs === other.includeSubdirs &&
this.isDisable === other.isDisable
);
}
asRegex(): RegExp {
return globToRegex(`${this.baseRule}${this.includeSubdirs ? '*' : ''}`);
}
isChildOf(parent: Override) {
if (!parent.includeSubdirs) {
return false;
}
return parent.asRegex().test(this.baseRule);
}
output(): string {
return `${this.isDisable ? '!' : ''}${this.baseRule}${this.includeSubdirs ? '*' : ''}`;
}
matchesPath(path: string) {
return this.asRegex().test(path);
}
}
const ensureLeadingAndTrailingSlash = function (dirPath: string): string {
// Normalize separators to forward slashes for consistent matching across platforms.
let result = dirPath.replace(/\\/g, '/');
if (result.charAt(0) !== '/') {
result = '/' + result;
}
if (result.charAt(result.length - 1) !== '/') {
result = result + '/';
}
return result;
};
/**
* Converts a glob pattern to a RegExp object.
* This is a simplified implementation that supports `*`.
*
* @param glob The glob pattern to convert.
* @returns A RegExp object.
*/
function globToRegex(glob: string): RegExp {
const regexString = glob
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex characters
.replace(/(\/?)\*/g, '($1.*)?'); // Convert * to optional group
return new RegExp(`^${regexString}$`);
}
export class ExtensionEnablementManager {
private configFilePath: string;
private configDir: string;
// If non-empty, this overrides all other extension configuration and enables
// only the ones in this list.
private enabledExtensionNamesOverride: string[];
constructor(configDir: string, enabledExtensionNames?: string[]) {
this.configDir = configDir;
this.configFilePath = path.join(configDir, 'extension-enablement.json');
this.enabledExtensionNamesOverride =
enabledExtensionNames?.map((name) => name.toLowerCase()) ?? [];
}
validateExtensionOverrides(extensions: Extension[]) {
for (const name of this.enabledExtensionNamesOverride) {
if (name === 'none') continue;
if (
!extensions.some(
(ext) => ext.config.name.toLowerCase() === name.toLowerCase(),
)
) {
console.error(`Extension not found: ${name}`);
}
}
}
/**
* Determines if an extension is enabled based on its name and the current
* path. The last matching rule in the overrides list wins.
*
* @param extensionName The name of the extension.
* @param currentPath The absolute path of the current working directory.
* @returns True if the extension is enabled, false otherwise.
*/
isEnabled(extensionName: string, currentPath: string): boolean {
// If we have a single override called 'none', this disables all extensions.
// Typically, this comes from the user passing `-e none`.
if (
this.enabledExtensionNamesOverride.length === 1 &&
this.enabledExtensionNamesOverride[0] === 'none'
) {
return false;
}
// If we have explicit overrides, only enable those extensions.
if (this.enabledExtensionNamesOverride.length > 0) {
// When checking against overrides ONLY, we use a case insensitive match.
// The override names are already lowercased in the constructor.
return this.enabledExtensionNamesOverride.includes(
extensionName.toLocaleLowerCase(),
);
}
// Otherwise, we use the configuration settings
const config = this.readConfig();
const extensionConfig = config[extensionName];
// Extensions are enabled by default.
let enabled = true;
const allOverrides = extensionConfig?.overrides ?? [];
for (const rule of allOverrides) {
const override = Override.fromFileRule(rule);
if (override.matchesPath(ensureLeadingAndTrailingSlash(currentPath))) {
enabled = !override.isDisable;
}
}
return enabled;
}
readConfig(): AllExtensionsEnablementConfig {
try {
const content = fs.readFileSync(this.configFilePath, 'utf-8');
return JSON.parse(content);
} catch (error) {
if (
error instanceof Error &&
'code' in error &&
error.code === 'ENOENT'
) {
return {};
}
console.error('Error reading extension enablement config:', error);
return {};
}
}
writeConfig(config: AllExtensionsEnablementConfig): void {
fs.mkdirSync(this.configDir, { recursive: true });
fs.writeFileSync(this.configFilePath, JSON.stringify(config, null, 2));
}
enable(
extensionName: string,
includeSubdirs: boolean,
scopePath: string,
): void {
const config = this.readConfig();
if (!config[extensionName]) {
config[extensionName] = { overrides: [] };
}
const override = Override.fromInput(scopePath, includeSubdirs);
const overrides = config[extensionName].overrides.filter((rule) => {
const fileOverride = Override.fromFileRule(rule);
if (
fileOverride.conflictsWith(override) ||
fileOverride.isEqualTo(override)
) {
return false; // Remove conflicts and equivalent values.
}
return !fileOverride.isChildOf(override);
});
overrides.push(override.output());
config[extensionName].overrides = overrides;
this.writeConfig(config);
}
disable(
extensionName: string,
includeSubdirs: boolean,
scopePath: string,
): void {
this.enable(extensionName, includeSubdirs, `!${scopePath}`);
}
remove(extensionName: string): void {
const config = this.readConfig();
if (config[extensionName]) {
delete config[extensionName];
this.writeConfig(config);
}
}
}

View File

@@ -1,468 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { vi } from 'vitest';
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import {
EXTENSIONS_CONFIG_FILENAME,
ExtensionStorage,
INSTALL_METADATA_FILENAME,
annotateActiveExtensions,
loadExtension,
} from '../extension.js';
import { checkForAllExtensionUpdates, updateExtension } from './update.js';
import { QWEN_DIR } from '@qwen-code/qwen-code-core';
import { isWorkspaceTrusted } from '../trustedFolders.js';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import { createExtension } from '../../test-utils/createExtension.js';
import { ExtensionEnablementManager } from './extensionEnablement.js';
const mockGit = {
clone: vi.fn(),
getRemotes: vi.fn(),
fetch: vi.fn(),
checkout: vi.fn(),
listRemote: vi.fn(),
revparse: vi.fn(),
// Not a part of the actual API, but we need to use this to do the correct
// file system interactions.
path: vi.fn(),
};
vi.mock('simple-git', () => ({
simpleGit: vi.fn((path: string) => {
mockGit.path.mockReturnValue(path);
return mockGit;
}),
}));
vi.mock('../extensions/github.js', async (importOriginal) => {
const actual =
await importOriginal<typeof import('../extensions/github.js')>();
return {
...actual,
downloadFromGitHubRelease: vi
.fn()
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
};
});
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
return {
...mockedOs,
homedir: vi.fn(),
};
});
vi.mock('../trustedFolders.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('../trustedFolders.js')>();
return {
...actual,
isWorkspaceTrusted: vi.fn(),
};
});
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
const actual =
await importOriginal<typeof import('@qwen-code/qwen-code-core')>();
return {
...actual,
logExtensionInstallEvent: mockLogExtensionInstallEvent,
logExtensionUninstall: mockLogExtensionUninstall,
ExtensionInstallEvent: vi.fn(),
ExtensionUninstallEvent: vi.fn(),
};
});
describe('update tests', () => {
let tempHomeDir: string;
let tempWorkspaceDir: string;
let userExtensionsDir: string;
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'qwen-code-test-home-'),
);
tempWorkspaceDir = fs.mkdtempSync(
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
);
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
// Clean up before each test
fs.rmSync(userExtensionsDir, { recursive: true, force: true });
fs.mkdirSync(userExtensionsDir, { recursive: true });
vi.mocked(isWorkspaceTrusted).mockReturnValue({
isTrusted: true,
source: 'file',
});
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
Object.values(mockGit).forEach((fn) => fn.mockReset());
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
});
describe('updateExtension', () => {
it('should update a git-installed extension', async () => {
const gitUrl = 'https://github.com/google/gemini-extensions.git';
const extensionName = 'qwen-extensions';
const targetExtDir = path.join(userExtensionsDir, extensionName);
const metadataPath = path.join(targetExtDir, INSTALL_METADATA_FILENAME);
fs.mkdirSync(targetExtDir, { recursive: true });
fs.writeFileSync(
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.0.0' }),
);
fs.writeFileSync(
metadataPath,
JSON.stringify({ source: gitUrl, type: 'git' }),
);
mockGit.clone.mockImplementation(async (_, destination) => {
fs.mkdirSync(path.join(mockGit.path(), destination), {
recursive: true,
});
fs.writeFileSync(
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.1.0' }),
);
});
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: targetExtDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const updateInfo = await updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
() => {},
);
expect(updateInfo).toEqual({
name: 'qwen-extensions',
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
});
const updatedConfig = JSON.parse(
fs.readFileSync(
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
'utf-8',
),
);
expect(updatedConfig.version).toBe('1.1.0');
});
it('should call setExtensionUpdateState with UPDATING and then UPDATED_NEEDS_RESTART on success', async () => {
const extensionName = 'test-extension';
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: extensionName,
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
mockGit.clone.mockImplementation(async (_, destination) => {
fs.mkdirSync(path.join(mockGit.path(), destination), {
recursive: true,
});
fs.writeFileSync(
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.1.0' }),
);
});
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const dispatch = vi.fn();
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
await updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
dispatch,
);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATING,
},
});
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
},
});
});
it('should call setExtensionUpdateState with ERROR on failure', async () => {
const extensionName = 'test-extension';
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: extensionName,
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
mockGit.clone.mockRejectedValue(new Error('Git clone failed'));
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const dispatch = vi.fn();
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
await expect(
updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
dispatch,
),
).rejects.toThrow();
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATING,
},
});
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.ERROR,
},
});
});
});
describe('checkForAllExtensionUpdates', () => {
it('should return UpdateAvailable for a git extension with updates', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
]);
mockGit.listRemote.mockResolvedValue('remoteHash HEAD');
mockGit.revparse.mockResolvedValue('localHash');
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
});
it('should return UpToDate for a git extension with no updates', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
]);
mockGit.listRemote.mockResolvedValue('sameHash HEAD');
mockGit.revparse.mockResolvedValue('sameHash');
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UP_TO_DATE,
},
});
});
it('should return UpToDate for a local extension with no updates', async () => {
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
const sourceExtensionDir = createExtension({
extensionsDir: localExtensionSourcePath,
name: 'my-local-ext',
version: '1.0.0',
});
const installedExtensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'local-extension',
version: '1.0.0',
installMetadata: { source: sourceExtensionDir, type: 'local' },
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: installedExtensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'local-extension',
state: ExtensionUpdateState.UP_TO_DATE,
},
});
});
it('should return UpdateAvailable for a local extension with updates', async () => {
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
const sourceExtensionDir = createExtension({
extensionsDir: localExtensionSourcePath,
name: 'my-local-ext',
version: '1.1.0',
});
const installedExtensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'local-extension',
version: '1.0.0',
installMetadata: { source: sourceExtensionDir, type: 'local' },
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: installedExtensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'local-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
});
it('should return Error when git check fails', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'error-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockRejectedValue(new Error('Git error'));
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'error-extension',
state: ExtensionUpdateState.ERROR,
},
});
});
});
});

View File

@@ -1,182 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import {
type ExtensionUpdateAction,
ExtensionUpdateState,
type ExtensionUpdateStatus,
} from '../../ui/state/extensions.js';
import {
copyExtension,
installExtension,
uninstallExtension,
loadExtension,
loadInstallMetadata,
ExtensionStorage,
loadExtensionConfig,
} from '../extension.js';
import { checkForExtensionUpdate } from './github.js';
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import * as fs from 'node:fs';
import { getErrorMessage } from '../../utils/errors.js';
export interface ExtensionUpdateInfo {
name: string;
originalVersion: string;
updatedVersion: string;
}
export async function updateExtension(
extension: GeminiCLIExtension,
cwd: string = process.cwd(),
requestConsent: (consent: string) => Promise<boolean>,
currentState: ExtensionUpdateState,
dispatchExtensionStateUpdate: (action: ExtensionUpdateAction) => void,
): Promise<ExtensionUpdateInfo | undefined> {
if (currentState === ExtensionUpdateState.UPDATING) {
return undefined;
}
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.UPDATING },
});
const installMetadata = loadInstallMetadata(extension.path);
if (!installMetadata?.type) {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
throw new Error(
`Extension ${extension.name} cannot be updated, type is unknown.`,
);
}
if (installMetadata?.type === 'link') {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.UP_TO_DATE },
});
throw new Error(`Extension is linked so does not need to be updated`);
}
const originalVersion = extension.version;
const tempDir = await ExtensionStorage.createTmpDir();
try {
await copyExtension(extension.path, tempDir);
const previousExtensionConfig = await loadExtensionConfig({
extensionDir: extension.path,
workspaceDir: cwd,
});
await uninstallExtension(extension.name, cwd);
await installExtension(
installMetadata,
requestConsent,
cwd,
previousExtensionConfig,
);
const updatedExtensionStorage = new ExtensionStorage(extension.name);
const updatedExtension = loadExtension({
extensionDir: updatedExtensionStorage.getExtensionDir(),
workspaceDir: cwd,
});
if (!updatedExtension) {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
throw new Error('Updated extension not found after installation.');
}
const updatedVersion = updatedExtension.config.version;
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: {
name: extension.name,
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
},
});
return {
name: extension.name,
originalVersion,
updatedVersion,
};
} catch (e) {
console.error(
`Error updating extension, rolling back. ${getErrorMessage(e)}`,
);
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
await copyExtension(tempDir, extension.path);
throw e;
} finally {
await fs.promises.rm(tempDir, { recursive: true, force: true });
}
}
export async function updateAllUpdatableExtensions(
cwd: string = process.cwd(),
requestConsent: (consent: string) => Promise<boolean>,
extensions: GeminiCLIExtension[],
extensionsState: Map<string, ExtensionUpdateStatus>,
dispatch: (action: ExtensionUpdateAction) => void,
): Promise<ExtensionUpdateInfo[]> {
return (
await Promise.all(
extensions
.filter(
(extension) =>
extensionsState.get(extension.name)?.status ===
ExtensionUpdateState.UPDATE_AVAILABLE,
)
.map((extension) =>
updateExtension(
extension,
cwd,
requestConsent,
extensionsState.get(extension.name)!.status,
dispatch,
),
),
)
).filter((updateInfo) => !!updateInfo);
}
export interface ExtensionUpdateCheckResult {
state: ExtensionUpdateState;
error?: string;
}
export async function checkForAllExtensionUpdates(
extensions: GeminiCLIExtension[],
dispatch: (action: ExtensionUpdateAction) => void,
): Promise<void> {
dispatch({ type: 'BATCH_CHECK_START' });
const promises: Array<Promise<void>> = [];
for (const extension of extensions) {
if (!extension.installMetadata) {
dispatch({
type: 'SET_STATE',
payload: {
name: extension.name,
state: ExtensionUpdateState.NOT_UPDATABLE,
},
});
continue;
}
promises.push(
checkForExtensionUpdate(extension, (updatedState) => {
dispatch({
type: 'SET_STATE',
payload: { name: extension.name, state: updatedState },
});
}),
);
}
await Promise.all(promises);
dispatch({ type: 'BATCH_CHECK_END' });
}

View File

@@ -51,7 +51,6 @@ import {
import * as fs from 'node:fs'; // fs will be mocked separately
import stripJsonComments from 'strip-json-comments'; // Will be mocked separately
import { isWorkspaceTrusted } from './trustedFolders.js';
import { disableExtension } from './extension.js';
// These imports will get the versions from the vi.mock('./settings.js', ...) factory.
import {
@@ -64,8 +63,6 @@ import {
needsMigration,
type Settings,
loadEnvironment,
migrateDeprecatedSettings,
SettingScope,
SETTINGS_VERSION,
SETTINGS_VERSION_KEY,
} from './settings.js';
@@ -2649,122 +2646,4 @@ describe('Settings Loading and Merging', () => {
});
});
});
describe('migrateDeprecatedSettings', () => {
let mockFsExistsSync: Mocked<typeof fs.existsSync>;
let mockFsReadFileSync: Mocked<typeof fs.readFileSync>;
let mockDisableExtension: Mocked<typeof disableExtension>;
beforeEach(() => {
vi.resetAllMocks();
mockFsExistsSync = vi.mocked(fs.existsSync);
mockFsReadFileSync = vi.mocked(fs.readFileSync);
mockDisableExtension = vi.mocked(disableExtension);
(mockFsExistsSync as Mock).mockReturnValue(true);
vi.mocked(isWorkspaceTrusted).mockReturnValue(true);
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should migrate disabled extensions from user and workspace settings', () => {
const userSettingsContent = {
extensions: {
disabled: ['user-ext-1', 'shared-ext'],
},
};
const workspaceSettingsContent = {
extensions: {
disabled: ['workspace-ext-1', 'shared-ext'],
},
};
(mockFsReadFileSync as Mock).mockImplementation(
(p: fs.PathOrFileDescriptor) => {
if (p === USER_SETTINGS_PATH)
return JSON.stringify(userSettingsContent);
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
return JSON.stringify(workspaceSettingsContent);
return '{}';
},
);
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
// Check user settings migration
expect(mockDisableExtension).toHaveBeenCalledWith(
'user-ext-1',
SettingScope.User,
MOCK_WORKSPACE_DIR,
);
expect(mockDisableExtension).toHaveBeenCalledWith(
'shared-ext',
SettingScope.User,
MOCK_WORKSPACE_DIR,
);
// Check workspace settings migration
expect(mockDisableExtension).toHaveBeenCalledWith(
'workspace-ext-1',
SettingScope.Workspace,
MOCK_WORKSPACE_DIR,
);
expect(mockDisableExtension).toHaveBeenCalledWith(
'shared-ext',
SettingScope.Workspace,
MOCK_WORKSPACE_DIR,
);
// Check that setValue was called to remove the deprecated setting
expect(setValueSpy).toHaveBeenCalledWith(
SettingScope.User,
'extensions',
{
disabled: undefined,
},
);
expect(setValueSpy).toHaveBeenCalledWith(
SettingScope.Workspace,
'extensions',
{
disabled: undefined,
},
);
});
it('should not do anything if there are no deprecated settings', () => {
const userSettingsContent = {
extensions: {
enabled: ['user-ext-1'],
},
};
const workspaceSettingsContent = {
someOtherSetting: 'value',
};
(mockFsReadFileSync as Mock).mockImplementation(
(p: fs.PathOrFileDescriptor) => {
if (p === USER_SETTINGS_PATH)
return JSON.stringify(userSettingsContent);
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
return JSON.stringify(workspaceSettingsContent);
return '{}';
},
);
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
expect(mockDisableExtension).not.toHaveBeenCalled();
expect(setValueSpy).not.toHaveBeenCalled();
});
});
});

View File

@@ -30,7 +30,6 @@ import {
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
import { customDeepMerge, type MergeableObject } from '../utils/deepMerge.js';
import { updateSettingsFilePreservingFormat } from '../utils/commentJson.js';
import { disableExtension } from './extension.js';
function getMergeStrategyForPath(path: string[]): MergeStrategy | undefined {
let current: SettingDefinition | undefined = undefined;
@@ -81,7 +80,6 @@ const MIGRATION_MAP: Record<string, string> = {
excludeTools: 'tools.exclude',
excludeMCPServers: 'mcp.excluded',
excludedProjectEnvVars: 'advanced.excludedEnvVars',
extensionManagement: 'experimental.extensionManagement',
extensions: 'extensions',
fileFiltering: 'context.fileFiltering',
folderTrustFeature: 'security.folderTrust.featureEnabled',
@@ -812,31 +810,6 @@ export function loadSettings(
);
}
export function migrateDeprecatedSettings(
loadedSettings: LoadedSettings,
workspaceDir: string = process.cwd(),
): void {
const processScope = (scope: SettingScope) => {
const settings = loadedSettings.forScope(scope).settings;
if (settings.extensions?.disabled) {
console.log(
`Migrating deprecated extensions.disabled settings from ${scope} settings...`,
);
for (const extension of settings.extensions.disabled ?? []) {
disableExtension(extension, scope, workspaceDir);
}
const newExtensionsValue = { ...settings.extensions };
newExtensionsValue.disabled = undefined;
loadedSettings.setValue(scope, 'extensions', newExtensionsValue);
}
};
processScope(SettingScope.User);
processScope(SettingScope.Workspace);
}
export function saveSettings(settingsFile: SettingsFile): void {
try {
// Ensure the directory exists

View File

@@ -1192,15 +1192,6 @@ const SETTINGS_SCHEMA = {
description: 'Setting to enable experimental features',
showInDialog: false,
properties: {
extensionManagement: {
type: 'boolean',
label: 'Extension Management',
category: 'Experimental',
requiresRestart: true,
default: true,
description: 'Enable extension management features.',
showInDialog: false,
},
visionModelPreview: {
type: 'boolean',
label: 'Vision Model Preview',
@@ -1223,39 +1214,6 @@ const SETTINGS_SCHEMA = {
},
},
},
extensions: {
type: 'object',
label: 'Extensions',
category: 'Extensions',
requiresRestart: true,
default: {},
description: 'Settings for extensions.',
showInDialog: false,
properties: {
disabled: {
type: 'array',
label: 'Disabled Extensions',
category: 'Extensions',
requiresRestart: true,
default: [] as string[],
description: 'List of disabled extensions.',
showInDialog: false,
mergeStrategy: MergeStrategy.UNION,
},
workspacesWithMigrationNudge: {
type: 'array',
label: 'Workspaces with Migration Nudge',
category: 'Extensions',
requiresRestart: false,
default: [] as string[],
description:
'List of workspaces for which the migration nudge has been shown.',
showInDialog: false,
mergeStrategy: MergeStrategy.UNION,
},
},
},
} as const satisfies SettingsSchema;
export type SettingsSchemaType = typeof SETTINGS_SCHEMA;

View File

@@ -15,6 +15,7 @@ import { type LoadedSettings, SettingScope } from '../config/settings.js';
import { performInitialAuth } from './auth.js';
import { validateTheme } from './theme.js';
import { initializeI18n } from '../i18n/index.js';
import { initializeLlmOutputLanguage } from '../ui/commands/languageCommand.js';
export interface InitializationResult {
authError: string | null;
@@ -41,6 +42,9 @@ export async function initializeApp(
'auto';
await initializeI18n(languageSetting);
// Auto-detect and set LLM output language on first use
initializeLlmOutputLanguage();
const authType = settings.merged.security?.auth?.selectedType;
const authError = await performInitialAuth(config, authType);

View File

@@ -262,7 +262,6 @@ describe('gemini.tsx main function', () => {
);
const { loadSettings } = await import('./config/settings.js');
const cleanupModule = await import('./utils/cleanup.js');
const extensionModule = await import('./config/extension.js');
const validatorModule = await import('./validateNonInterActiveAuth.js');
const streamJsonModule = await import('./nonInteractive/session.js');
const initializerModule = await import('./core/initializer.js');
@@ -275,11 +274,6 @@ describe('gemini.tsx main function', () => {
vi.mocked(cleanupModule.registerCleanup).mockImplementation(() => {});
const runExitCleanupMock = vi.mocked(cleanupModule.runExitCleanup);
runExitCleanupMock.mockResolvedValue(undefined);
vi.spyOn(extensionModule, 'loadExtensions').mockReturnValue([]);
vi.spyOn(
extensionModule.ExtensionStorage,
'getUserExtensionsDir',
).mockReturnValue('/tmp/extensions');
vi.spyOn(initializerModule, 'initializeApp').mockResolvedValue({
authError: null,
themeError: null,

View File

@@ -15,9 +15,8 @@ import React from 'react';
import { validateAuthMethod } from './config/auth.js';
import * as cliConfig from './config/config.js';
import { loadCliConfig, parseArguments } from './config/config.js';
import { ExtensionStorage, loadExtensions } from './config/extension.js';
import type { DnsResolutionOrder, LoadedSettings } from './config/settings.js';
import { loadSettings, migrateDeprecatedSettings } from './config/settings.js';
import { loadSettings } from './config/settings.js';
import {
initializeApp,
type InitializationResult,
@@ -103,7 +102,6 @@ function getNodeMemoryArgs(isDebugMode: boolean): string[] {
return [];
}
import { ExtensionEnablementManager } from './config/extensions/extensionEnablement.js';
import { loadSandboxConfig } from './config/sandboxConfig.js';
import { runAcpAgent } from './acp-integration/acpAgent.js';
@@ -200,10 +198,9 @@ export async function startInteractiveUI(
export async function main() {
setupUnhandledRejectionHandler();
const settings = loadSettings();
migrateDeprecatedSettings(settings);
await cleanupCheckpoints();
let argv = await parseArguments(settings.merged);
let argv = await parseArguments();
// Check for invalid input combinations early to prevent crashes
if (argv.promptInteractive && !process.stdin.isTTY) {
@@ -245,9 +242,9 @@ export async function main() {
if (sandboxConfig) {
const partialConfig = await loadCliConfig(
settings.merged,
[],
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
argv,
undefined,
[],
);
if (
@@ -331,25 +328,21 @@ export async function main() {
// to run Gemini CLI. It is now safe to perform expensive initialization that
// may have side effects.
{
const extensionEnablementManager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
argv.extensions,
);
const extensions = loadExtensions(extensionEnablementManager);
const config = await loadCliConfig(
settings.merged,
extensions,
extensionEnablementManager,
argv,
process.cwd(),
argv.extensions,
);
if (config.getListExtensions()) {
console.log('Installed extensions:');
for (const extension of extensions) {
console.log(`- ${extension.config.name}`);
}
process.exit(0);
}
// FIXME: list extensions after the config initialize
// if (config.getListExtensions()) {
// console.log('Installed extensions:');
// for (const extension of extensions) {
// console.log(`- ${extension.config.name}`);
// }
// process.exit(0);
// }
// Setup unified ConsolePatcher based on interactive mode
const isInteractive = config.isInteractive();
@@ -395,7 +388,7 @@ export async function main() {
}
if (config.getExperimentalZedIntegration()) {
return runAcpAgent(config, settings, extensions, argv);
return runAcpAgent(config, settings, argv);
}
let input = config.getQuestion();

View File

@@ -1,6 +1,6 @@
/**
* @license
* Copyright 2025 Qwen
* Copyright 2025 Qwen team
* SPDX-License-Identifier: Apache-2.0
*/
@@ -8,15 +8,21 @@ import * as fs from 'node:fs';
import * as path from 'node:path';
import { fileURLToPath, pathToFileURL } from 'node:url';
import { homedir } from 'node:os';
import {
type SupportedLanguage,
getLanguageNameFromLocale,
} from './languages.js';
export type SupportedLanguage = 'en' | 'zh' | 'ru' | string; // Allow custom language codes
export type { SupportedLanguage };
export { getLanguageNameFromLocale };
// State
let currentLanguage: SupportedLanguage = 'en';
let translations: Record<string, string> = {};
let translations: Record<string, string | string[]> = {};
// Cache
type TranslationDict = Record<string, string>;
type TranslationValue = string | string[];
type TranslationDict = Record<string, TranslationValue>;
const translationCache: Record<string, TranslationDict> = {};
const loadingPromises: Record<string, Promise<TranslationDict>> = {};
@@ -52,11 +58,13 @@ export function detectSystemLanguage(): SupportedLanguage {
if (envLang?.startsWith('zh')) return 'zh';
if (envLang?.startsWith('en')) return 'en';
if (envLang?.startsWith('ru')) return 'ru';
if (envLang?.startsWith('de')) return 'de';
try {
const locale = Intl.DateTimeFormat().resolvedOptions().locale;
if (locale.startsWith('zh')) return 'zh';
if (locale.startsWith('ru')) return 'ru';
if (locale.startsWith('de')) return 'de';
} catch {
// Fallback to default
}
@@ -224,9 +232,25 @@ export function getCurrentLanguage(): SupportedLanguage {
export function t(key: string, params?: Record<string, string>): string {
const translation = translations[key] ?? key;
if (Array.isArray(translation)) {
return key;
}
return interpolate(translation, params);
}
/**
* Get a translation that is an array of strings.
* @param key The translation key
* @returns The array of strings, or an empty array if not found or not an array
*/
export function ta(key: string): string[] {
const translation = translations[key];
if (Array.isArray(translation)) {
return translation;
}
return [];
}
export async function initializeI18n(
lang?: SupportedLanguage | 'auto',
): Promise<void> {

View File

@@ -0,0 +1,48 @@
/**
* @license
* Copyright 2025 Qwen team
* SPDX-License-Identifier: Apache-2.0
*/
export type SupportedLanguage = 'en' | 'zh' | 'ru' | 'de' | string;
export interface LanguageDefinition {
/** The internal locale code used by the i18n system (e.g., 'en', 'zh'). */
code: SupportedLanguage;
/** The standard name used in UI settings (e.g., 'en-US', 'zh-CN'). */
id: string;
/** The full English name of the language (e.g., 'English', 'Chinese'). */
fullName: string;
}
export const SUPPORTED_LANGUAGES: readonly LanguageDefinition[] = [
{
code: 'en',
id: 'en-US',
fullName: 'English',
},
{
code: 'zh',
id: 'zh-CN',
fullName: 'Chinese',
},
{
code: 'ru',
id: 'ru-RU',
fullName: 'Russian',
},
{
code: 'de',
id: 'de-DE',
fullName: 'German',
},
];
/**
* Maps a locale code to its English language name.
* Used for LLM output language instructions.
*/
export function getLanguageNameFromLocale(locale: SupportedLanguage): string {
const lang = SUPPORTED_LANGUAGES.find((l) => l.code === locale);
return lang?.fullName || 'English';
}

View File

@@ -102,8 +102,8 @@ export default {
'Theme "{{themeName}}" not found.': 'Theme "{{themeName}}" not found.',
'Theme "{{themeName}}" not found in selected scope.':
'Theme "{{themeName}}" not found in selected scope.',
'clear the screen and conversation history':
'clear the screen and conversation history',
'Clear conversation history and free up context':
'Clear conversation history and free up context',
'Compresses the context by replacing it with a summary.':
'Compresses the context by replacing it with a summary.',
'open full Qwen Code documentation in your browser':
@@ -612,9 +612,10 @@ export default {
// ============================================================================
// Commands - Clear
// ============================================================================
'Clearing terminal and resetting chat.':
'Clearing terminal and resetting chat.',
'Clearing terminal.': 'Clearing terminal.',
'Starting a new session, resetting chat, and clearing terminal.':
'Starting a new session, resetting chat, and clearing terminal.',
'Starting a new session and clearing.':
'Starting a new session and clearing.',
// ============================================================================
// Commands - Compress
@@ -935,192 +936,138 @@ export default {
// ============================================================================
'Waiting for user confirmation...': 'Waiting for user confirmation...',
'(esc to cancel, {{time}})': '(esc to cancel, {{time}})',
"I'm Feeling Lucky": "I'm Feeling Lucky",
'Shipping awesomeness... ': 'Shipping awesomeness... ',
'Painting the serifs back on...': 'Painting the serifs back on...',
'Navigating the slime mold...': 'Navigating the slime mold...',
'Consulting the digital spirits...': 'Consulting the digital spirits...',
'Reticulating splines...': 'Reticulating splines...',
'Warming up the AI hamsters...': 'Warming up the AI hamsters...',
'Asking the magic conch shell...': 'Asking the magic conch shell...',
'Generating witty retort...': 'Generating witty retort...',
'Polishing the algorithms...': 'Polishing the algorithms...',
"Don't rush perfection (or my code)...":
// ============================================================================
// Loading Phrases
// ============================================================================
WITTY_LOADING_PHRASES: [
"I'm Feeling Lucky",
'Shipping awesomeness... ',
'Painting the serifs back on...',
'Navigating the slime mold...',
'Consulting the digital spirits...',
'Reticulating splines...',
'Warming up the AI hamsters...',
'Asking the magic conch shell...',
'Generating witty retort...',
'Polishing the algorithms...',
"Don't rush perfection (or my code)...",
'Brewing fresh bytes...': 'Brewing fresh bytes...',
'Counting electrons...': 'Counting electrons...',
'Engaging cognitive processors...': 'Engaging cognitive processors...',
'Checking for syntax errors in the universe...':
'Brewing fresh bytes...',
'Counting electrons...',
'Engaging cognitive processors...',
'Checking for syntax errors in the universe...',
'One moment, optimizing humor...': 'One moment, optimizing humor...',
'Shuffling punchlines...': 'Shuffling punchlines...',
'Untangling neural nets...': 'Untangling neural nets...',
'Compiling brilliance...': 'Compiling brilliance...',
'Loading wit.exe...': 'Loading wit.exe...',
'Summoning the cloud of wisdom...': 'Summoning the cloud of wisdom...',
'Preparing a witty response...': 'Preparing a witty response...',
"Just a sec, I'm debugging reality...":
'One moment, optimizing humor...',
'Shuffling punchlines...',
'Untangling neural nets...',
'Compiling brilliance...',
'Loading wit.exe...',
'Summoning the cloud of wisdom...',
'Preparing a witty response...',
"Just a sec, I'm debugging reality...",
'Confuzzling the options...': 'Confuzzling the options...',
'Tuning the cosmic frequencies...': 'Tuning the cosmic frequencies...',
'Crafting a response worthy of your patience...':
'Confuzzling the options...',
'Tuning the cosmic frequencies...',
'Crafting a response worthy of your patience...',
'Compiling the 1s and 0s...': 'Compiling the 1s and 0s...',
'Resolving dependencies... and existential crises...':
'Compiling the 1s and 0s...',
'Resolving dependencies... and existential crises...',
'Defragmenting memories... both RAM and personal...':
'Defragmenting memories... both RAM and personal...',
'Rebooting the humor module...': 'Rebooting the humor module...',
'Caching the essentials (mostly cat memes)...':
'Rebooting the humor module...',
'Caching the essentials (mostly cat memes)...',
'Optimizing for ludicrous speed': 'Optimizing for ludicrous speed',
"Swapping bits... don't tell the bytes...":
'Optimizing for ludicrous speed',
"Swapping bits... don't tell the bytes...",
'Garbage collecting... be right back...':
'Garbage collecting... be right back...',
'Assembling the interwebs...': 'Assembling the interwebs...',
'Converting coffee into code...': 'Converting coffee into code...',
'Updating the syntax for reality...': 'Updating the syntax for reality...',
'Rewiring the synapses...': 'Rewiring the synapses...',
'Looking for a misplaced semicolon...':
'Assembling the interwebs...',
'Converting coffee into code...',
'Updating the syntax for reality...',
'Rewiring the synapses...',
'Looking for a misplaced semicolon...',
"Greasin' the cogs of the machine...": "Greasin' the cogs of the machine...",
'Pre-heating the servers...': 'Pre-heating the servers...',
'Calibrating the flux capacitor...': 'Calibrating the flux capacitor...',
'Engaging the improbability drive...': 'Engaging the improbability drive...',
'Channeling the Force...': 'Channeling the Force...',
'Aligning the stars for optimal response...':
"Greasin' the cogs of the machine...",
'Pre-heating the servers...',
'Calibrating the flux capacitor...',
'Engaging the improbability drive...',
'Channeling the Force...',
'Aligning the stars for optimal response...',
'So say we all...': 'So say we all...',
'Loading the next great idea...': 'Loading the next great idea...',
"Just a moment, I'm in the zone...": "Just a moment, I'm in the zone...",
'Preparing to dazzle you with brilliance...':
'So say we all...',
'Loading the next great idea...',
"Just a moment, I'm in the zone...",
'Preparing to dazzle you with brilliance...',
"Just a tick, I'm polishing my wit...":
"Just a tick, I'm polishing my wit...",
"Hold tight, I'm crafting a masterpiece...":
"Hold tight, I'm crafting a masterpiece...",
"Just a jiffy, I'm debugging the universe...":
"Just a jiffy, I'm debugging the universe...",
"Just a moment, I'm aligning the pixels...":
"Just a moment, I'm aligning the pixels...",
"Just a sec, I'm optimizing the humor...":
"Just a sec, I'm optimizing the humor...",
"Just a moment, I'm tuning the algorithms...":
"Just a moment, I'm tuning the algorithms...",
'Warp speed engaged...': 'Warp speed engaged...',
'Mining for more Dilithium crystals...':
'Warp speed engaged...',
'Mining for more Dilithium crystals...',
"Don't panic...": "Don't panic...",
'Following the white rabbit...': 'Following the white rabbit...',
'The truth is in here... somewhere...':
"Don't panic...",
'Following the white rabbit...',
'The truth is in here... somewhere...',
'Blowing on the cartridge...': 'Blowing on the cartridge...',
'Loading... Do a barrel roll!': 'Loading... Do a barrel roll!',
'Waiting for the respawn...': 'Waiting for the respawn...',
'Finishing the Kessel Run in less than 12 parsecs...':
'Blowing on the cartridge...',
'Loading... Do a barrel roll!',
'Waiting for the respawn...',
'Finishing the Kessel Run in less than 12 parsecs...',
"The cake is not a lie, it's just still loading...":
"The cake is not a lie, it's just still loading...",
'Fiddling with the character creation screen...':
'Fiddling with the character creation screen...',
"Just a moment, I'm finding the right meme...":
"Just a moment, I'm finding the right meme...",
"Pressing 'A' to continue...": "Pressing 'A' to continue...",
'Herding digital cats...': 'Herding digital cats...',
'Polishing the pixels...': 'Polishing the pixels...',
'Finding a suitable loading screen pun...':
"Pressing 'A' to continue...",
'Herding digital cats...',
'Polishing the pixels...',
'Finding a suitable loading screen pun...',
'Distracting you with this witty phrase...':
'Distracting you with this witty phrase...',
'Almost there... probably...': 'Almost there... probably...',
'Our hamsters are working as fast as they can...':
'Almost there... probably...',
'Our hamsters are working as fast as they can...',
'Giving Cloudy a pat on the head...': 'Giving Cloudy a pat on the head...',
'Petting the cat...': 'Petting the cat...',
'Rickrolling my boss...': 'Rickrolling my boss...',
'Never gonna give you up, never gonna let you down...':
'Giving Cloudy a pat on the head...',
'Petting the cat...',
'Rickrolling my boss...',
'Never gonna give you up, never gonna let you down...',
'Slapping the bass...': 'Slapping the bass...',
'Tasting the snozberries...': 'Tasting the snozberries...',
"I'm going the distance, I'm going for speed...":
'Slapping the bass...',
'Tasting the snozberries...',
"I'm going the distance, I'm going for speed...",
'Is this the real life? Is this just fantasy?...':
'Is this the real life? Is this just fantasy?...',
"I've got a good feeling about this...":
"I've got a good feeling about this...",
'Poking the bear...': 'Poking the bear...',
'Doing research on the latest memes...':
'Poking the bear...',
'Doing research on the latest memes...',
'Figuring out how to make this more witty...':
'Figuring out how to make this more witty...',
'Hmmm... let me think...': 'Hmmm... let me think...',
'What do you call a fish with no eyes? A fsh...':
'Hmmm... let me think...',
'What do you call a fish with no eyes? A fsh...',
'Why did the computer go to therapy? It had too many bytes...':
'Why did the computer go to therapy? It had too many bytes...',
"Why don't programmers like nature? It has too many bugs...":
"Why don't programmers like nature? It has too many bugs...",
'Why do programmers prefer dark mode? Because light attracts bugs...':
'Why do programmers prefer dark mode? Because light attracts bugs...',
'Why did the developer go broke? Because they used up all their cache...':
'Why did the developer go broke? Because they used up all their cache...',
"What can you do with a broken pencil? Nothing, it's pointless...":
"What can you do with a broken pencil? Nothing, it's pointless...",
'Applying percussive maintenance...': 'Applying percussive maintenance...',
'Searching for the correct USB orientation...':
'Applying percussive maintenance...',
'Searching for the correct USB orientation...',
'Ensuring the magic smoke stays inside the wires...':
'Ensuring the magic smoke stays inside the wires...',
'Rewriting in Rust for no particular reason...':
'Rewriting in Rust for no particular reason...',
'Trying to exit Vim...': 'Trying to exit Vim...',
'Spinning up the hamster wheel...': 'Spinning up the hamster wheel...',
"That's not a bug, it's an undocumented feature...":
'Trying to exit Vim...',
'Spinning up the hamster wheel...',
"That's not a bug, it's an undocumented feature...",
'Engage.': 'Engage.',
"I'll be back... with an answer.": "I'll be back... with an answer.",
'My other process is a TARDIS...': 'My other process is a TARDIS...',
'Communing with the machine spirit...':
'Engage.',
"I'll be back... with an answer.",
'My other process is a TARDIS...',
'Communing with the machine spirit...',
'Letting the thoughts marinate...': 'Letting the thoughts marinate...',
'Just remembered where I put my keys...':
'Letting the thoughts marinate...',
'Just remembered where I put my keys...',
'Pondering the orb...': 'Pondering the orb...',
"I've seen things you people wouldn't believe... like a user who reads loading messages.":
'Pondering the orb...',
"I've seen things you people wouldn't believe... like a user who reads loading messages.",
'Initiating thoughtful gaze...': 'Initiating thoughtful gaze...',
"What's a computer's favorite snack? Microchips.":
'Initiating thoughtful gaze...',
"What's a computer's favorite snack? Microchips.",
"Why do Java developers wear glasses? Because they don't C#.":
"Why do Java developers wear glasses? Because they don't C#.",
'Charging the laser... pew pew!': 'Charging the laser... pew pew!',
'Dividing by zero... just kidding!': 'Dividing by zero... just kidding!',
'Looking for an adult superviso... I mean, processing.':
'Charging the laser... pew pew!',
'Dividing by zero... just kidding!',
'Looking for an adult superviso... I mean, processing.',
'Making it go beep boop.': 'Making it go beep boop.',
'Buffering... because even AIs need a moment.':
'Making it go beep boop.',
'Buffering... because even AIs need a moment.',
'Entangling quantum particles for a faster response...':
'Entangling quantum particles for a faster response...',
'Polishing the chrome... on the algorithms.':
'Polishing the chrome... on the algorithms.',
'Are you not entertained? (Working on it!)':
'Are you not entertained? (Working on it!)',
'Summoning the code gremlins... to help, of course.':
'Summoning the code gremlins... to help, of course.',
'Just waiting for the dial-up tone to finish...':
'Just waiting for the dial-up tone to finish...',
'Recalibrating the humor-o-meter.': 'Recalibrating the humor-o-meter.',
'My other loading screen is even funnier.':
'Recalibrating the humor-o-meter.',
'My other loading screen is even funnier.',
"Pretty sure there's a cat walking on the keyboard somewhere...":
"Pretty sure there's a cat walking on the keyboard somewhere...",
'Enhancing... Enhancing... Still loading.':
'Enhancing... Enhancing... Still loading.',
"It's not a bug, it's a feature... of this loading screen.":
"It's not a bug, it's a feature... of this loading screen.",
'Have you tried turning it off and on again? (The loading screen, not me.)':
'Have you tried turning it off and on again? (The loading screen, not me.)',
'Constructing additional pylons...': 'Constructing additional pylons...',
'Constructing additional pylons...',
],
};

View File

@@ -103,8 +103,8 @@ export default {
'Theme "{{themeName}}" not found.': 'Тема "{{themeName}}" не найдена.',
'Theme "{{themeName}}" not found in selected scope.':
'Тема "{{themeName}}" не найдена в выбранной области.',
'clear the screen and conversation history':
'Очистка экрана и истории диалога',
'Clear conversation history and free up context':
'Очистить историю диалога и освободить контекст',
'Compresses the context by replacing it with a summary.':
'Сжатие контекста заменой на краткую сводку',
'open full Qwen Code documentation in your browser':
@@ -314,6 +314,7 @@ export default {
'Tool Output Truncation Lines': 'Лимит строк вывода инструментов',
'Folder Trust': 'Доверие к папке',
'Vision Model Preview': 'Визуальная модель (предпросмотр)',
'Tool Schema Compliance': 'Соответствие схеме инструмента',
// Варианты перечислений настроек
'Auto (detect from system)': 'Авто (определить из системы)',
Text: 'Текст',
@@ -342,8 +343,8 @@ export default {
'Установка предпочитаемого внешнего редактора',
'Manage extensions': 'Управление расширениями',
'List active extensions': 'Показать активные расширения',
'Update extensions. Usage: update |--all':
'Обновить расширения. Использование: update |--all',
'Update extensions. Usage: update <extension-names>|--all':
'Обновить расширения. Использование: update <extension-names>|--all',
'manage IDE integration': 'Управление интеграцией с IDE',
'check status of IDE integration': 'Проверить статус интеграции с IDE',
'install required IDE companion for {{ideName}}':
@@ -401,7 +402,8 @@ export default {
'Set LLM output language': 'Установка языка вывода LLM',
'Usage: /language ui [zh-CN|en-US]':
'Использование: /language ui [zh-CN|en-US|ru-RU]',
'Usage: /language output ': 'Использование: /language output ',
'Usage: /language output <language>':
'Использование: /language output <language>',
'Example: /language output 中文': 'Пример: /language output 中文',
'Example: /language output English': 'Пример: /language output English',
'Example: /language output 日本語': 'Пример: /language output 日本語',
@@ -418,9 +420,8 @@ export default {
'To request additional UI language packs, please open an issue on GitHub.':
'Для запроса дополнительных языковых пакетов интерфейса, пожалуйста, создайте обращение на GitHub.',
'Available options:': 'Доступные варианты:',
' - zh-CN: Simplified Chinese': ' - zh-CN: Упрощенный китайский',
' - en-US: English': ' - en-US: Английский',
' - ru-RU: Russian': ' - ru-RU: Русский',
' - zh-CN: Simplified Chinese': ' - zh-CN: Упрощенный китайский',
' - en-US: English': ' - en-US: Английский',
'Set UI language to Simplified Chinese (zh-CN)':
'Установить язык интерфейса на упрощенный китайский (zh-CN)',
'Set UI language to English (en-US)':
@@ -436,8 +437,8 @@ export default {
'Режим подтверждения изменен на: {{mode}}',
'Approval mode changed to: {{mode}} (saved to {{scope}} settings{{location}})':
'Режим подтверждения изменен на: {{mode}} (сохранено в настройках {{scope}}{{location}})',
'Usage: /approval-mode [--session|--user|--project]':
'Использование: /approval-mode [--session|--user|--project]',
'Usage: /approval-mode <mode> [--session|--user|--project]':
'Использование: /approval-mode <mode> [--session|--user|--project]',
'Scope subcommands do not accept additional arguments.':
'Подкоманды области не принимают дополнительных аргументов.',
'Plan mode - Analyze only, do not modify files or execute commands':
@@ -589,8 +590,8 @@ export default {
'Ошибка при экспорте диалога: {{error}}',
'Conversation shared to {{filePath}}': 'Диалог экспортирован в {{filePath}}',
'No conversation found to share.': 'Нет диалога для экспорта.',
'Share the current conversation to a markdown or json file. Usage: /chat share <путь-к-файлу>':
'Экспортировать текущий диалог в markdown или json файл. Использование: /chat share <путь-к-файлу>',
'Share the current conversation to a markdown or json file. Usage: /chat share <file>':
'Экспортировать текущий диалог в markdown или json файл. Использование: /chat share <файл>',
// ============================================================================
// Команды - Резюме
@@ -625,8 +626,9 @@ export default {
// ============================================================================
// Команды - Очистка
// ============================================================================
'Clearing terminal and resetting chat.': 'Очистка терминала и сброс чата.',
'Clearing terminal.': 'Очистка терминала.',
'Starting a new session, resetting chat, and clearing terminal.':
'Начало новой сессии, сброс чата и очистка терминала.',
'Starting a new session and clearing.': 'Начало новой сессии и очистка.',
// ============================================================================
// Команды - Сжатие
@@ -657,8 +659,8 @@ export default {
'Команда /directory add не поддерживается в ограничительных профилях песочницы. Пожалуйста, используйте --include-directories при запуске сессии.',
"Error adding '{{path}}': {{error}}":
"Ошибка при добавлении '{{path}}': {{error}}",
'Successfully added GEMINI.md files from the following directories if there are:\n- {{directories}}':
'Успешно добавлены файлы GEMINI.md из следующих директорий (если они есть):\n- {{directories}}',
'Successfully added QWEN.md files from the following directories if there are:\n- {{directories}}':
'Успешно добавлены файлы QWEN.md из следующих директорий (если они есть):\n- {{directories}}',
'Error refreshing memory: {{error}}':
'Ошибка при обновлении памяти: {{error}}',
'Successfully added directories:\n- {{directories}}':
@@ -891,6 +893,7 @@ export default {
// Экран выхода / Статистика
// ============================================================================
'Agent powering down. Goodbye!': 'Агент завершает работу. До свидания!',
'To continue this session, run': 'Для продолжения этой сессии, выполните',
'Interaction Summary': 'Сводка взаимодействия',
'Session ID:': 'ID сессии:',
'Tool Calls:': 'Вызовы инструментов:',
@@ -950,179 +953,140 @@ export default {
'Waiting for user confirmation...':
'Ожидание подтверждения от пользователя...',
'(esc to cancel, {{time}})': '(esc для отмены, {{time}})',
"I'm Feeling Lucky": 'Мне повезёт!',
'Shipping awesomeness... ': 'Доставляем крутизну... ',
'Painting the serifs back on...': 'Рисуем засечки на буквах...',
'Navigating the slime mold...': 'Пробираемся через слизевиков..',
'Consulting the digital spirits...': 'Советуемся с цифровыми духами...',
'Reticulating splines...': 'Сглаживание сплайнов...',
'Warming up the AI hamsters...': 'Разогреваем ИИ-хомячков...',
'Asking the magic conch shell...': 'Спрашиваем волшебную ракушку...',
'Generating witty retort...': 'Генерируем остроумный ответ...',
'Polishing the algorithms...': 'Полируем алгоритмы...',
"Don't rush perfection (or my code)...":
// ============================================================================
// ============================================================================
// Loading Phrases
// ============================================================================
WITTY_LOADING_PHRASES: [
'Мне повезёт!',
'Доставляем крутизну... ',
'Рисуем засечки на буквах...',
'Пробираемся через слизевиков..',
'Советуемся с цифровыми духами...',
'Сглаживание сплайнов...',
'Разогреваем ИИ-хомячков...',
'Спрашиваем волшебную ракушку...',
'Генерируем остроумный ответ...',
'Полируем алгоритмы...',
'Не торопите совершенство (или мой код)...',
'Brewing fresh bytes...': 'Завариваем свежие байты...',
'Counting electrons...': 'Пересчитываем электроны...',
'Engaging cognitive processors...': 'Задействуем когнитивные процессоры...',
'Checking for syntax errors in the universe...':
'Завариваем свежие байты...',
'Пересчитываем электроны...',
'Задействуем когнитивные процессоры...',
'Ищем синтаксические ошибки во вселенной...',
'One moment, optimizing humor...': 'Секундочку, оптимизируем юмор...',
'Shuffling punchlines...': 'Перетасовываем панчлайны...',
'Untangling neural nets...': 'Распутаваем нейросети...',
'Compiling brilliance...': 'Компилируем гениальность...',
'Loading wit.exe...': 'Загружаем yumor.exe...',
'Summoning the cloud of wisdom...': 'Призываем облако мудрости...',
'Preparing a witty response...': 'Готовим остроумный ответ...',
"Just a sec, I'm debugging reality...": 'Секунду, идёт отладка реальности...',
'Confuzzling the options...': 'Запутываем варианты...',
'Tuning the cosmic frequencies...': 'Настраиваем космические частоты...',
'Crafting a response worthy of your patience...':
'Секундочку, оптимизируем юмор...',
'Перетасовываем панчлайны...',
'Распутаваем нейросети...',
'Компилируем гениальность...',
'Загружаем yumor.exe...',
'Призываем облако мудрости...',
'Готовим остроумный ответ...',
'Секунду, идёт отладка реальности...',
'Запутываем варианты...',
'Настраиваем космические частоты...',
'Создаем ответ, достойный вашего терпения...',
'Compiling the 1s and 0s...': 'Компилируем единички и нолики...',
'Resolving dependencies... and existential crises...':
'Компилируем единички и нолики...',
'Разрешаем зависимости... и экзистенциальные кризисы...',
'Defragmenting memories... both RAM and personal...':
'Дефрагментация памяти... и оперативной, и личной...',
'Rebooting the humor module...': 'Перезагрузка модуля юмора...',
'Caching the essentials (mostly cat memes)...':
'Перезагрузка модуля юмора...',
'Кэшируем самое важное (в основном мемы с котиками)...',
'Optimizing for ludicrous speed': 'Оптимизация для безумной скорости',
"Swapping bits... don't tell the bytes...":
'Оптимизация для безумной скорости',
'Меняем биты... только байтам не говорите...',
'Garbage collecting... be right back...': 'Сборка мусора... скоро вернусь...',
'Assembling the interwebs...': 'Сборка интернетов...',
'Converting coffee into code...': 'Превращаем кофе в код...',
'Updating the syntax for reality...': 'Обновляем синтаксис реальности...',
'Rewiring the synapses...': 'Переподключаем синапсы...',
'Looking for a misplaced semicolon...': 'Ищем лишнюю точку с запятой...',
"Greasin' the cogs of the machine...": 'Смазываем шестерёнки машины...',
'Pre-heating the servers...': 'Разогреваем серверы...',
'Calibrating the flux capacitor...': 'Калибруем потоковый накопитель...',
'Engaging the improbability drive...': 'Включаем двигатель невероятности...',
'Channeling the Force...': 'Направляем Силу...',
'Aligning the stars for optimal response...':
'Сборка мусора... скоро вернусь...',
'Сборка интернетов...',
'Превращаем кофе в код...',
'Обновляем синтаксис реальности...',
'Переподключаем синапсы...',
'Ищем лишнюю точку с запятой...',
'Смазываем шестерёнки машины...',
'Разогреваем серверы...',
'Калибруем потоковый накопитель...',
'Включаем двигатель невероятности...',
'Направляем Силу...',
'Выравниваем звёзды для оптимального ответа...',
'So say we all...': 'Так скажем мы все...',
'Loading the next great idea...': 'Загрузка следующей великой идеи...',
"Just a moment, I'm in the zone...": 'Минутку, я в потоке...',
'Preparing to dazzle you with brilliance...':
'Так скажем мы все...',
'Загрузка следующей великой идеи...',
'Минутку, я в потоке...',
'Готовлюсь ослепить вас гениальностью...',
"Just a tick, I'm polishing my wit...": 'Секунду, полирую остроумие...',
"Hold tight, I'm crafting a masterpiece...": 'Держитесь, создаю шедевр...',
"Just a jiffy, I'm debugging the universe...":
'Секунду, полирую остроумие...',
'Держитесь, создаю шедевр...',
'Мигом, отлаживаю вселенную...',
"Just a moment, I'm aligning the pixels...": 'Момент, выравниваю пиксели...',
"Just a sec, I'm optimizing the humor...": 'Секунду, оптимизирую юмор...',
"Just a moment, I'm tuning the algorithms...":
'Момент, выравниваю пиксели...',
'Секунду, оптимизирую юмор...',
'Момент, настраиваю алгоритмы...',
'Warp speed engaged...': 'Варп-скорость включена...',
'Mining for more Dilithium crystals...': 'Добываем кристаллы дилития...',
"Don't panic...": 'Без паники...',
'Following the white rabbit...': 'Следуем за белым кроликом...',
'The truth is in here... somewhere...': 'Истина где-то здесь... внутри...',
'Blowing on the cartridge...': 'Продуваем картридж...',
'Loading... Do a barrel roll!': 'Загрузка... Сделай бочку!',
'Waiting for the respawn...': 'Ждем респауна...',
'Finishing the Kessel Run in less than 12 parsecs...':
'Варп-прыжок активирован...',
'Добываем кристаллы дилития...',
'Без паники...',
'Следуем за белым кроликом...',
'Истина где-то здесь... внутри...',
'Продуваем картридж...',
'Загрузка... Сделай бочку!',
'Ждем респауна...',
'Делаем Дугу Кесселя менее чем за 12 парсеков...',
"The cake is not a lie, it's just still loading...":
'Тортик — не ложь, он просто ещё грузится...',
'Fiddling with the character creation screen...':
'Возимся с экраном создания персонажа...',
"Just a moment, I'm finding the right meme...":
'Минутку, ищу подходящий мем...',
"Pressing 'A' to continue...": "Нажимаем 'A' для продолжения...",
'Herding digital cats...': 'Пасём цифровых котов...',
'Polishing the pixels...': 'Полируем пиксели...',
'Finding a suitable loading screen pun...':
"Нажимаем 'A' для продолжения...",
'Пасём цифровых котов...',
'Полируем пиксели...',
'Ищем подходящий каламбур для экрана загрузки...',
'Distracting you with this witty phrase...':
'Отвлекаем вас этой остроумной фразой...',
'Almost there... probably...': 'Почти готово... вроде...',
'Our hamsters are working as fast as they can...':
'Почти готово... вроде...',
'Наши хомячки работают изо всех сил...',
'Giving Cloudy a pat on the head...': 'Гладим Облачко по голове...',
'Petting the cat...': 'Гладим кота...',
'Rickrolling my boss...': 'Рикроллим начальника...',
'Never gonna give you up, never gonna let you down...':
'Гладим Облачко по голове...',
'Гладим кота...',
'Рикроллим начальника...',
'Never gonna give you up, never gonna let you down...',
'Slapping the bass...': 'Лабаем бас-гитару...',
'Tasting the snozberries...': 'Пробуем снузберри на вкус...',
"I'm going the distance, I'm going for speed...":
'Лабаем бас-гитару...',
'Пробуем снузберри на вкус...',
'Иду до конца, иду на скорость...',
'Is this the real life? Is this just fantasy?...':
'Is this the real life? Is this just fantasy?...',
"I've got a good feeling about this...": 'У меня хорошее предчувствие...',
'Poking the bear...': 'Дразним медведя... (Не лезь...)',
'Doing research on the latest memes...': 'Изучаем свежие мемы...',
'Figuring out how to make this more witty...':
'У меня хорошее предчувствие...',
'Дразним медведя... (Не лезь...)',
'Изучаем свежие мемы...',
'Думаем, как сделать это остроумнее...',
'Hmmm... let me think...': 'Хмм... дайте подумать...',
'What do you call a fish with no eyes? A fsh...':
'Хмм... дайте подумать...',
'Как называется бумеранг, который не возвращается? Палка...',
'Why did the computer go to therapy? It had too many bytes...':
'Почему компьютер простудился? Потому что оставил окна открытыми...',
"Why don't programmers like nature? It has too many bugs...":
'Почему программисты не любят гулять на улице? Там среда не настроена...',
'Why do programmers prefer dark mode? Because light attracts bugs...':
'Почему программисты предпочитают тёмную тему? Потому что в темноте не видно багов...',
'Why did the developer go broke? Because they used up all their cache...':
'Почему разработчик разорился? Потому что потратил весь свой кэш...',
"What can you do with a broken pencil? Nothing, it's pointless...":
'Что можно делать со сломанным карандашом? Ничего — он тупой...',
'Applying percussive maintenance...': 'Провожу настройку методом тыка...',
'Searching for the correct USB orientation...':
'Провожу настройку методом тыка...',
'Ищем, какой стороной вставлять флешку...',
'Ensuring the magic smoke stays inside the wires...':
'Следим, чтобы волшебный дым не вышел из проводов...',
'Rewriting in Rust for no particular reason...':
'Переписываем всё на Rust без особой причины...',
'Trying to exit Vim...': 'Пытаемся выйти из Vim...',
'Spinning up the hamster wheel...': 'Раскручиваем колесо для хомяка...',
"That's not a bug, it's an undocumented feature...": 'Это не баг, а фича...',
'Engage.': 'Поехали!',
"I'll be back... with an answer.": 'Я вернусь... с ответом.',
'My other process is a TARDIS...': 'Мой другой процесс — это ТАРДИС...',
'Communing with the machine spirit...': 'Общаемся с духом машины...',
'Letting the thoughts marinate...': 'Даем мыслям замариноваться...',
'Just remembered where I put my keys...':
'Пытаемся выйти из Vim...',
'Раскручиваем колесо для хомяка...',
'Это не баг, а фича...',
'Поехали!',
'Я вернусь... с ответом.',
'Мой другой процесс — это ТАРДИС...',
'Общаемся с духом машины...',
'Даем мыслям замариноваться...',
'Только что вспомнил, куда положил ключи...',
'Pondering the orb...': 'Размышляю над сферой...',
"I've seen things you people wouldn't believe... like a user who reads loading messages.":
'Я видел такое, во что вы, люди, просто не поверите... например, пользователя, читающего сообщения загрузки.',
'Initiating thoughtful gaze...': 'Инициируем задумчивый взгляд...',
"What's a computer's favorite snack? Microchips.":
'Размышляю над сферой...',
'Я видел такое, что вам, людям, и не снилось... пользователя, читающего эти сообщения.',
'Инициируем задумчивый взгляд...',
'Что сервер заказывает в баре? Пинг-коладу.',
"Why do Java developers wear glasses? Because they don't C#.":
'Почему Java-разработчики не убираются дома? Они ждут сборщик мусора...',
'Charging the laser... pew pew!': 'Заряжаем лазер... пиу-пиу!',
'Dividing by zero... just kidding!': 'Делим на ноль... шучу!',
'Looking for an adult superviso... I mean, processing.':
'Заряжаем лазер... пиу-пиу!',
'Делим на ноль... шучу!',
'Ищу взрослых для присмот... в смысле, обрабатываю.',
'Making it go beep boop.': 'Делаем бип-буп.',
'Buffering... because even AIs need a moment.':
'Буферизация... даже ИИ нужно мгновение.',
'Entangling quantum particles for a faster response...':
'Делаем бип-буп.',
'Буферизация... даже ИИ нужно время подумать.',
'Запутываем квантовые частицы для быстрого ответа...',
'Polishing the chrome... on the algorithms.':
'Полируем хром... на алгоритмах.',
'Are you not entertained? (Working on it!)':
'Вы ещё не развлеклись?! Разве вы не за этим сюда пришли?!',
'Summoning the code gremlins... to help, of course.':
'Призываем гремлинов кода... для помощи, конечно же.',
'Just waiting for the dial-up tone to finish...':
'Ждем, пока закончится звук dial-up модема...',
'Recalibrating the humor-o-meter.': 'Перекалибровка юморометра.',
'My other loading screen is even funnier.':
'Перекалибровка юморометра.',
'Мой другой экран загрузки ещё смешнее.',
"Pretty sure there's a cat walking on the keyboard somewhere...":
'Кажется, где-то по клавиатуре гуляет кот...',
'Enhancing... Enhancing... Still loading.':
'Улучшаем... Ещё улучшаем... Всё ещё грузится.',
"It's not a bug, it's a feature... of this loading screen.":
'Это не баг, это фича... экрана загрузки.',
'Have you tried turning it off and on again? (The loading screen, not me.)':
'Пробовали выключить и включить снова? (Экран загрузки, не меня!)',
'Constructing additional pylons...': 'Нужно построить больше пилонов...',
'Нужно построить больше пилонов...',
],
};

View File

@@ -101,7 +101,7 @@ export default {
'Theme "{{themeName}}" not found.': '未找到主题 "{{themeName}}"。',
'Theme "{{themeName}}" not found in selected scope.':
'在所选作用域中未找到主题 "{{themeName}}"。',
'clear the screen and conversation history': '清屏并清除对话历史',
'Clear conversation history and free up context': '清除对话历史并释放上下文',
'Compresses the context by replacing it with a summary.':
'通过用摘要替换来压缩上下文',
'open full Qwen Code documentation in your browser':
@@ -581,8 +581,9 @@ export default {
// ============================================================================
// Commands - Clear
// ============================================================================
'Clearing terminal and resetting chat.': '正在清屏并重置聊天',
'Clearing terminal.': '正在清屏',
'Starting a new session, resetting chat, and clearing terminal.':
'正在开始新会话,重置聊天并清屏',
'Starting a new session and clearing.': '正在开始新会话并清屏。',
// ============================================================================
// Commands - Compress
@@ -888,165 +889,39 @@ export default {
// ============================================================================
'Waiting for user confirmation...': '等待用户确认...',
'(esc to cancel, {{time}})': '(按 esc 取消,{{time}}',
"I'm Feeling Lucky": '我感觉很幸运',
'Shipping awesomeness... ': '正在运送精彩内容... ',
'Painting the serifs back on...': '正在重新绘制衬线...',
'Navigating the slime mold...': '正在导航粘液霉菌...',
'Consulting the digital spirits...': '正在咨询数字精灵...',
'Reticulating splines...': '正在网格化样条曲线...',
'Warming up the AI hamsters...': '正在预热 AI 仓鼠...',
'Asking the magic conch shell...': '正在询问魔法海螺壳...',
'Generating witty retort...': '正在生成机智的反驳...',
'Polishing the algorithms...': '正在打磨算法...',
"Don't rush perfection (or my code)...": '不要急于追求完美(或我的代码)...',
'Brewing fresh bytes...': '正在酿造新鲜字节...',
'Counting electrons...': '正在计算电子...',
'Engaging cognitive processors...': '正在启动认知处理器...',
'Checking for syntax errors in the universe...':
'正在检查宇宙中的语法错误...',
'One moment, optimizing humor...': '稍等片刻,正在优化幽默感...',
'Shuffling punchlines...': '正在洗牌笑点...',
'Untangling neural nets...': '正在解开神经网络...',
'Compiling brilliance...': '正在编译智慧...',
'Loading wit.exe...': '正在加载 wit.exe...',
'Summoning the cloud of wisdom...': '正在召唤智慧云...',
'Preparing a witty response...': '正在准备机智的回复...',
"Just a sec, I'm debugging reality...": '稍等片刻,我正在调试现实...',
'Confuzzling the options...': '正在混淆选项...',
'Tuning the cosmic frequencies...': '正在调谐宇宙频率...',
'Crafting a response worthy of your patience...':
'正在制作值得您耐心等待的回复...',
'Compiling the 1s and 0s...': '正在编译 1 和 0...',
'Resolving dependencies... and existential crises...':
'正在解决依赖关系...和存在主义危机...',
'Defragmenting memories... both RAM and personal...':
'正在整理记忆碎片...包括 RAM 和个人记忆...',
'Rebooting the humor module...': '正在重启幽默模块...',
'Caching the essentials (mostly cat memes)...':
'正在缓存必需品(主要是猫咪表情包)...',
'Optimizing for ludicrous speed': '正在优化到荒谬的速度',
"Swapping bits... don't tell the bytes...": '正在交换位...不要告诉字节...',
'Garbage collecting... be right back...': '正在垃圾回收...马上回来...',
'Assembling the interwebs...': '正在组装互联网...',
'Converting coffee into code...': '正在将咖啡转换为代码...',
'Updating the syntax for reality...': '正在更新现实的语法...',
'Rewiring the synapses...': '正在重新连接突触...',
'Looking for a misplaced semicolon...': '正在寻找放错位置的分号...',
"Greasin' the cogs of the machine...": '正在给机器的齿轮上油...',
'Pre-heating the servers...': '正在预热服务器...',
'Calibrating the flux capacitor...': '正在校准通量电容器...',
'Engaging the improbability drive...': '正在启动不可能性驱动器...',
'Channeling the Force...': '正在引导原力...',
'Aligning the stars for optimal response...': '正在对齐星星以获得最佳回复...',
'So say we all...': '我们都说...',
'Loading the next great idea...': '正在加载下一个伟大的想法...',
"Just a moment, I'm in the zone...": '稍等片刻,我正进入状态...',
'Preparing to dazzle you with brilliance...': '正在准备用智慧让您眼花缭乱...',
"Just a tick, I'm polishing my wit...": '稍等片刻,我正在打磨我的智慧...',
"Hold tight, I'm crafting a masterpiece...": '请稍等,我正在制作杰作...',
"Just a jiffy, I'm debugging the universe...": '稍等片刻,我正在调试宇宙...',
"Just a moment, I'm aligning the pixels...": '稍等片刻,我正在对齐像素...',
"Just a sec, I'm optimizing the humor...": '稍等片刻,我正在优化幽默感...',
"Just a moment, I'm tuning the algorithms...": '稍等片刻,我正在调整算法...',
'Warp speed engaged...': '曲速已启动...',
'Mining for more Dilithium crystals...': '正在挖掘更多二锂晶体...',
"Don't panic...": '不要惊慌...',
'Following the white rabbit...': '正在跟随白兔...',
'The truth is in here... somewhere...': '真相在这里...某个地方...',
'Blowing on the cartridge...': '正在吹卡带...',
'Loading... Do a barrel roll!': '正在加载...做个桶滚!',
'Waiting for the respawn...': '等待重生...',
'Finishing the Kessel Run in less than 12 parsecs...':
'正在以不到 12 秒差距完成凯塞尔航线...',
"The cake is not a lie, it's just still loading...":
'蛋糕不是谎言,只是还在加载...',
'Fiddling with the character creation screen...': '正在摆弄角色创建界面...',
"Just a moment, I'm finding the right meme...":
'稍等片刻,我正在寻找合适的表情包...',
"Pressing 'A' to continue...": "按 'A' 继续...",
'Herding digital cats...': '正在放牧数字猫...',
'Polishing the pixels...': '正在打磨像素...',
'Finding a suitable loading screen pun...': '正在寻找合适的加载屏幕双关语...',
'Distracting you with this witty phrase...':
'正在用这个机智的短语分散您的注意力...',
'Almost there... probably...': '快到了...可能...',
'Our hamsters are working as fast as they can...':
'我们的仓鼠正在尽可能快地工作...',
'Giving Cloudy a pat on the head...': '正在拍拍 Cloudy 的头...',
'Petting the cat...': '正在抚摸猫咪...',
'Rickrolling my boss...': '正在 Rickroll 我的老板...',
'Never gonna give you up, never gonna let you down...':
'永远不会放弃你,永远不会让你失望...',
'Slapping the bass...': '正在拍打低音...',
'Tasting the snozberries...': '正在品尝 snozberries...',
"I'm going the distance, I'm going for speed...":
'我要走得更远,我要追求速度...',
'Is this the real life? Is this just fantasy?...':
'这是真实的生活吗?还是只是幻想?...',
"I've got a good feeling about this...": '我对这个感觉很好...',
'Poking the bear...': '正在戳熊...',
'Doing research on the latest memes...': '正在研究最新的表情包...',
'Figuring out how to make this more witty...': '正在想办法让这更有趣...',
'Hmmm... let me think...': '嗯...让我想想...',
'What do you call a fish with no eyes? A fsh...':
'没有眼睛的鱼叫什么?一条鱼...',
'Why did the computer go to therapy? It had too many bytes...':
'为什么电脑去看心理医生?因为它有太多字节...',
"Why don't programmers like nature? It has too many bugs...":
'为什么程序员不喜欢大自然?因为虫子太多了...',
'Why do programmers prefer dark mode? Because light attracts bugs...':
'为什么程序员喜欢暗色模式?因为光会吸引虫子...',
'Why did the developer go broke? Because they used up all their cache...':
'为什么开发者破产了?因为他们用完了所有缓存...',
"What can you do with a broken pencil? Nothing, it's pointless...":
'你能用断了的铅笔做什么?什么都不能,因为它没有笔尖...',
'Applying percussive maintenance...': '正在应用敲击维护...',
'Searching for the correct USB orientation...': '正在寻找正确的 USB 方向...',
'Ensuring the magic smoke stays inside the wires...':
'确保魔法烟雾留在电线内...',
'Rewriting in Rust for no particular reason...':
'正在用 Rust 重写,没有特别的原因...',
'Trying to exit Vim...': '正在尝试退出 Vim...',
'Spinning up the hamster wheel...': '正在启动仓鼠轮...',
"That's not a bug, it's an undocumented feature...":
'这不是一个错误,这是一个未记录的功能...',
'Engage.': '启动。',
"I'll be back... with an answer.": '我会回来的...带着答案。',
'My other process is a TARDIS...': '我的另一个进程是 TARDIS...',
'Communing with the machine spirit...': '正在与机器精神交流...',
'Letting the thoughts marinate...': '让想法慢慢酝酿...',
'Just remembered where I put my keys...': '刚刚想起我把钥匙放在哪里了...',
'Pondering the orb...': '正在思考球体...',
"I've seen things you people wouldn't believe... like a user who reads loading messages.":
'我见过你们不会相信的事情...比如一个阅读加载消息的用户。',
'Initiating thoughtful gaze...': '正在启动深思凝视...',
"What's a computer's favorite snack? Microchips.":
'电脑最喜欢的零食是什么?微芯片。',
"Why do Java developers wear glasses? Because they don't C#.":
'为什么 Java 开发者戴眼镜?因为他们不会 C#。',
'Charging the laser... pew pew!': '正在给激光充电...砰砰!',
'Dividing by zero... just kidding!': '除以零...只是开玩笑!',
'Looking for an adult superviso... I mean, processing.':
'正在寻找成人监督...我是说,处理中。',
'Making it go beep boop.': '让它发出哔哔声。',
'Buffering... because even AIs need a moment.':
'正在缓冲...因为即使是 AI 也需要片刻。',
'Entangling quantum particles for a faster response...':
'正在纠缠量子粒子以获得更快的回复...',
'Polishing the chrome... on the algorithms.': '正在打磨铬...在算法上。',
'Are you not entertained? (Working on it!)': '你不觉得有趣吗?(正在努力!)',
'Summoning the code gremlins... to help, of course.':
'正在召唤代码小精灵...当然是来帮忙的。',
'Just waiting for the dial-up tone to finish...': '只是等待拨号音结束...',
'Recalibrating the humor-o-meter.': '正在重新校准幽默计。',
'My other loading screen is even funnier.': '我的另一个加载屏幕更有趣。',
"Pretty sure there's a cat walking on the keyboard somewhere...":
'很确定有只猫在某个地方键盘上走...',
'Enhancing... Enhancing... Still loading.':
'正在增强...正在增强...仍在加载。',
"It's not a bug, it's a feature... of this loading screen.":
'这不是一个错误,这是一个功能...这个加载屏幕的功能。',
'Have you tried turning it off and on again? (The loading screen, not me.)':
'你试过把它关掉再打开吗?(加载屏幕,不是我。)',
'Constructing additional pylons...': '正在建造额外的能量塔...',
WITTY_LOADING_PHRASES: [
// --- 职场搬砖系列 ---
'正在努力搬砖,请稍候...',
'老板在身后,快加载啊!',
'头发掉光前,一定能加载完...',
'服务器正在深呼吸,准备放大招...',
'正在向服务器投喂咖啡...',
// --- 大厂黑话系列 ---
'正在赋能全链路,寻找关键抓手...',
'正在降本增效,优化加载路径...',
'正在打破部门壁垒,沉淀方法论...',
'正在拥抱变化,迭代核心价值...',
'正在对齐颗粒度,打磨底层逻辑...',
'大力出奇迹,正在强行加载...',
// --- 程序员自嘲系列 ---
'只要我不写代码,代码就没有 Bug...',
'正在把 Bug 转化为 Feature...',
'只要我不尴尬Bug 就追不上我...',
'正在试图理解去年的自己写了什么...',
'正在猿力觉醒中,请耐心等待...',
// --- 合作愉快系列 ---
'正在询问产品经理:这需求是真的吗?',
'正在给产品经理画饼,请稍等...',
// --- 温暖治愈系列 ---
'每一行代码,都在努力让世界变得更好一点点...',
'每一个伟大的想法,都值得这份耐心的等待...',
'别急,美好的事物总是需要一点时间去酝酿...',
'愿你的代码永无 Bug愿你的梦想终将成真...',
'哪怕只有 0.1% 的进度,也是在向目标靠近...',
'加载的是字节,承载的是对技术的热爱...',
],
};

View File

@@ -0,0 +1,327 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { FileCommandLoader } from './FileCommandLoader.js';
import type { Config } from '@qwen-code/qwen-code-core';
import { Storage } from '@qwen-code/qwen-code-core';
describe('FileCommandLoader - Extension Commands Support', () => {
let tempDir: string;
let mockConfig: Partial<Config>;
beforeEach(async () => {
tempDir = await fs.promises.mkdtemp(
path.join(os.tmpdir(), 'file-command-loader-ext-test-'),
);
mockConfig = {
getFolderTrustFeature: () => false,
getFolderTrust: () => true,
getProjectRoot: () => tempDir,
storage: new Storage(tempDir),
getExtensions: () => [],
};
});
afterEach(async () => {
await fs.promises.rm(tempDir, { recursive: true, force: true });
});
it('should load commands from extension with config.commands path', async () => {
// Setup extension structure
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'test-ext');
const customCommandsDir = path.join(extensionDir, 'custom-cmds');
await fs.promises.mkdir(customCommandsDir, { recursive: true });
// Create extension config with custom commands path
const extensionConfig = {
name: 'test-ext',
version: '1.0.0',
commands: 'custom-cmds',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Create a test command in custom directory
const commandContent =
'---\ndescription: Test command from extension\n---\nDo something';
await fs.promises.writeFile(
path.join(customCommandsDir, 'test.md'),
commandContent,
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'test-ext',
config: extensionConfig,
name: 'test-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
contextFiles: [],
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('test-ext:test');
expect(commands[0].description).toBe(
'[test-ext] Test command from extension',
);
});
it('should load commands from extension with multiple commands paths', async () => {
// Setup extension structure
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'multi-ext');
const cmdsDir1 = path.join(extensionDir, 'commands1');
const cmdsDir2 = path.join(extensionDir, 'commands2');
await fs.promises.mkdir(cmdsDir1, { recursive: true });
await fs.promises.mkdir(cmdsDir2, { recursive: true });
// Create extension config with multiple commands paths
const extensionConfig = {
name: 'multi-ext',
version: '1.0.0',
commands: ['commands1', 'commands2'],
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Create test commands in both directories
await fs.promises.writeFile(
path.join(cmdsDir1, 'cmd1.md'),
'---\n---\nCommand 1',
);
await fs.promises.writeFile(
path.join(cmdsDir2, 'cmd2.md'),
'---\n---\nCommand 2',
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'multi-ext',
config: extensionConfig,
contextFiles: [],
name: 'multi-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(2);
const commandNames = commands.map((c) => c.name).sort();
expect(commandNames).toEqual(['multi-ext:cmd1', 'multi-ext:cmd2']);
});
it('should fallback to default "commands" directory when config.commands not specified', async () => {
// Setup extension structure with default commands directory
const extensionDir = path.join(
tempDir,
'.qwen',
'extensions',
'default-ext',
);
const defaultCommandsDir = path.join(extensionDir, 'commands');
await fs.promises.mkdir(defaultCommandsDir, { recursive: true });
// Create extension config without commands field
const extensionConfig = {
name: 'default-ext',
version: '1.0.0',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Create a test command in default directory
await fs.promises.writeFile(
path.join(defaultCommandsDir, 'default.md'),
'---\n---\nDefault command',
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'default-ext',
config: extensionConfig,
contextFiles: [],
name: 'default-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('default-ext:default');
});
it('should handle extension without commands directory gracefully', async () => {
// Setup extension structure without commands directory
const extensionDir = path.join(
tempDir,
'.qwen',
'extensions',
'no-cmds-ext',
);
await fs.promises.mkdir(extensionDir, { recursive: true });
// Create extension config
const extensionConfig = {
name: 'no-cmds-ext',
version: '1.0.0',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'no-cmds-ext',
config: extensionConfig,
contextFiles: [],
name: 'no-cmds-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
// Should not throw and return empty array
expect(commands).toHaveLength(0);
});
it('should prefix extension commands with extension name', async () => {
// Setup extension
const extensionDir = path.join(
tempDir,
'.qwen',
'extensions',
'prefix-ext',
);
const commandsDir = path.join(extensionDir, 'commands');
await fs.promises.mkdir(commandsDir, { recursive: true });
const extensionConfig = {
name: 'prefix-ext',
version: '1.0.0',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
await fs.promises.writeFile(
path.join(commandsDir, 'mycommand.md'),
'---\n---\nMy command',
);
mockConfig.getExtensions = () => [
{
id: 'prefix-ext',
config: extensionConfig,
contextFiles: [],
name: 'prefix-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('prefix-ext:mycommand');
});
it('should load commands from multiple extensions in alphabetical order', async () => {
// Setup two extensions
const ext1Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-b');
const ext2Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-a');
await fs.promises.mkdir(path.join(ext1Dir, 'commands'), {
recursive: true,
});
await fs.promises.mkdir(path.join(ext2Dir, 'commands'), {
recursive: true,
});
// Extension B
await fs.promises.writeFile(
path.join(ext1Dir, 'qwen-extension.json'),
JSON.stringify({ name: 'ext-b', version: '1.0.0' }),
);
await fs.promises.writeFile(
path.join(ext1Dir, 'commands', 'cmd.md'),
'---\n---\nCommand B',
);
// Extension A
await fs.promises.writeFile(
path.join(ext2Dir, 'qwen-extension.json'),
JSON.stringify({ name: 'ext-a', version: '1.0.0' }),
);
await fs.promises.writeFile(
path.join(ext2Dir, 'commands', 'cmd.md'),
'---\n---\nCommand A',
);
mockConfig.getExtensions = () => [
{
id: 'ext-b',
config: { name: 'ext-b', version: '1.0.0' },
contextFiles: [],
name: 'ext-b',
version: '1.0.0',
isActive: true,
path: ext1Dir,
},
{
id: 'ext-a',
config: { name: 'ext-a', version: '1.0.0' },
contextFiles: [],
name: 'ext-a',
version: '1.0.0',
isActive: true,
path: ext2Dir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(2);
// Extensions are sorted alphabetically, so ext-a comes before ext-b
expect(commands[0].name).toBe('ext-a:cmd');
expect(commands[1].name).toBe('ext-b:cmd');
});
});

View File

@@ -0,0 +1,117 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import { promises as fs } from 'node:fs';
import path from 'node:path';
import os from 'node:os';
import { FileCommandLoader } from './FileCommandLoader.js';
describe('FileCommandLoader - Markdown support', () => {
let tempDir: string;
beforeAll(async () => {
// Create a temporary directory for test commands
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-md-test-'));
});
afterAll(async () => {
// Clean up
await fs.rm(tempDir, { recursive: true, force: true });
});
it('should load markdown commands with frontmatter', async () => {
// Create a test markdown command file
const mdContent = `---
description: Test markdown command
---
This is a test prompt from markdown.`;
const commandPath = path.join(tempDir, 'test-command.md');
await fs.writeFile(commandPath, mdContent, 'utf-8');
// Create loader with temp dir as command source
const loader = new FileCommandLoader(null);
// Mock the getCommandDirectories to return our temp dir
const originalMethod = loader['getCommandDirectories'];
loader['getCommandDirectories'] = () => [{ path: tempDir }];
try {
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('test-command');
expect(commands[0].description).toBe('Test markdown command');
} finally {
// Restore original method
loader['getCommandDirectories'] = originalMethod;
}
});
it('should load markdown commands without frontmatter', async () => {
// Create a test markdown command file without frontmatter
const mdContent = 'This is a simple prompt without frontmatter.';
const commandPath = path.join(tempDir, 'simple-command.md');
await fs.writeFile(commandPath, mdContent, 'utf-8');
const loader = new FileCommandLoader(null);
const originalMethod = loader['getCommandDirectories'];
loader['getCommandDirectories'] = () => [{ path: tempDir }];
try {
const commands = await loader.loadCommands(new AbortController().signal);
const simpleCommand = commands.find(
(cmd) => cmd.name === 'simple-command',
);
expect(simpleCommand).toBeDefined();
expect(simpleCommand?.description).toContain('Custom command from');
} finally {
loader['getCommandDirectories'] = originalMethod;
}
});
it('should load both toml and markdown commands', async () => {
// Create both TOML and Markdown files
const tomlContent = `prompt = "TOML prompt"
description = "TOML command"`;
const mdContent = `---
description: Markdown command
---
Markdown prompt`;
await fs.writeFile(
path.join(tempDir, 'toml-cmd.toml'),
tomlContent,
'utf-8',
);
await fs.writeFile(path.join(tempDir, 'md-cmd.md'), mdContent, 'utf-8');
const loader = new FileCommandLoader(null);
const originalMethod = loader['getCommandDirectories'];
loader['getCommandDirectories'] = () => [{ path: tempDir }];
try {
const commands = await loader.loadCommands(new AbortController().signal);
const tomlCommand = commands.find((cmd) => cmd.name === 'toml-cmd');
const mdCommand = commands.find((cmd) => cmd.name === 'md-cmd');
expect(tomlCommand).toBeDefined();
expect(tomlCommand?.description).toBe('TOML command');
expect(mdCommand).toBeDefined();
expect(mdCommand?.description).toBe('Markdown command');
} finally {
loader['getCommandDirectories'] = originalMethod;
}
});
});

View File

@@ -568,9 +568,9 @@ describe('FileCommandLoader', () => {
expect(commands).toHaveLength(3);
const commandNames = commands.map((cmd) => cmd.name);
expect(commandNames).toEqual(['user', 'project', 'ext']);
expect(commandNames).toEqual(['user', 'project', 'test-ext:ext']);
const extCommand = commands.find((cmd) => cmd.name === 'ext');
const extCommand = commands.find((cmd) => cmd.name === 'test-ext:ext');
expect(extCommand?.extensionName).toBe('test-ext');
expect(extCommand?.description).toMatch(/^\[test-ext\]/);
});
@@ -656,14 +656,14 @@ describe('FileCommandLoader', () => {
expect(result1.content).toEqual([{ text: 'Project deploy command' }]);
}
expect(commands[2].name).toBe('deploy');
expect(commands[2].name).toBe('test-ext:deploy');
expect(commands[2].extensionName).toBe('test-ext');
expect(commands[2].description).toMatch(/^\[test-ext\]/);
const result2 = await commands[2].action?.(
createMockCommandContext({
invocation: {
raw: '/deploy',
name: 'deploy',
raw: '/test-ext:deploy',
name: 'test-ext:deploy',
args: '',
},
}),
@@ -729,7 +729,7 @@ describe('FileCommandLoader', () => {
const commands = await loader.loadCommands(signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('active');
expect(commands[0].name).toBe('active-ext:active');
expect(commands[0].extensionName).toBe('active-ext');
expect(commands[0].description).toMatch(/^\[active-ext\]/);
});
@@ -803,17 +803,17 @@ describe('FileCommandLoader', () => {
expect(commands).toHaveLength(3);
const commandNames = commands.map((cmd) => cmd.name).sort();
expect(commandNames).toEqual(['b:c', 'b:d:e', 'simple']);
expect(commandNames).toEqual(['a:b:c', 'a:b:d:e', 'a:simple']);
const nestedCmd = commands.find((cmd) => cmd.name === 'b:c');
const nestedCmd = commands.find((cmd) => cmd.name === 'a:b:c');
expect(nestedCmd?.extensionName).toBe('a');
expect(nestedCmd?.description).toMatch(/^\[a\]/);
expect(nestedCmd).toBeDefined();
const result = await nestedCmd!.action?.(
createMockCommandContext({
invocation: {
raw: '/b:c',
name: 'b:c',
raw: '/a:b:c',
name: 'a:b:c',
args: '',
},
}),

View File

@@ -5,34 +5,23 @@
*/
import { promises as fs } from 'node:fs';
import * as fsSync from 'node:fs';
import path from 'node:path';
import toml from '@iarna/toml';
import { glob } from 'glob';
import { z } from 'zod';
import type { Config } from '@qwen-code/qwen-code-core';
import { Storage } from '@qwen-code/qwen-code-core';
import { EXTENSIONS_CONFIG_FILENAME, Storage } from '@qwen-code/qwen-code-core';
import type { ICommandLoader } from './types.js';
import type {
CommandContext,
SlashCommand,
SlashCommandActionReturn,
} from '../ui/commands/types.js';
import { CommandKind } from '../ui/commands/types.js';
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
import type {
IPromptProcessor,
PromptPipelineContent,
} from './prompt-processors/types.js';
import {
SHORTHAND_ARGS_PLACEHOLDER,
SHELL_INJECTION_TRIGGER,
AT_FILE_INJECTION_TRIGGER,
} from './prompt-processors/types.js';
parseMarkdownCommand,
MarkdownCommandDefSchema,
} from './markdown-command-parser.js';
import {
ConfirmationRequiredError,
ShellProcessor,
} from './prompt-processors/shellProcessor.js';
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
createSlashCommandFromDefinition,
type CommandDefinition,
} from './command-factory.js';
import type { SlashCommand } from '../ui/commands/types.js';
interface CommandDirectory {
path: string;
@@ -96,7 +85,12 @@ export class FileCommandLoader implements ICommandLoader {
const commandDirs = this.getCommandDirectories();
for (const dirInfo of commandDirs) {
try {
const files = await glob('**/*.toml', {
// Scan both .toml and .md files
const tomlFiles = await glob('**/*.toml', {
...globOptions,
cwd: dirInfo.path,
});
const mdFiles = await glob('**/*.md', {
...globOptions,
cwd: dirInfo.path,
});
@@ -105,18 +99,28 @@ export class FileCommandLoader implements ICommandLoader {
return [];
}
const commandPromises = files.map((file) =>
this.parseAndAdaptFile(
// Process TOML files
const tomlCommandPromises = tomlFiles.map((file) =>
this.parseAndAdaptTomlFile(
path.join(dirInfo.path, file),
dirInfo.path,
dirInfo.extensionName,
),
);
const commands = (await Promise.all(commandPromises)).filter(
(cmd): cmd is SlashCommand => cmd !== null,
// Process Markdown files
const mdCommandPromises = mdFiles.map((file) =>
this.parseAndAdaptMarkdownFile(
path.join(dirInfo.path, file),
dirInfo.path,
dirInfo.extensionName,
),
);
const commands = (
await Promise.all([...tomlCommandPromises, ...mdCommandPromises])
).filter((cmd): cmd is SlashCommand => cmd !== null);
// Add all commands without deduplication
allCommands.push(...commands);
} catch (error) {
@@ -159,17 +163,73 @@ export class FileCommandLoader implements ICommandLoader {
.filter((ext) => ext.isActive)
.sort((a, b) => a.name.localeCompare(b.name)); // Sort alphabetically for deterministic loading
const extensionCommandDirs = activeExtensions.map((ext) => ({
path: path.join(ext.path, 'commands'),
extensionName: ext.name,
}));
// Collect command directories from each extension
for (const ext of activeExtensions) {
// Get commands paths from extension config
const commandsPaths = this.getExtensionCommandsPaths(ext);
dirs.push(...extensionCommandDirs);
for (const cmdPath of commandsPaths) {
dirs.push({
path: cmdPath,
extensionName: ext.name,
});
}
}
}
return dirs;
}
/**
* Get commands paths from an extension.
* Returns paths from config.commands if specified, otherwise defaults to 'commands' directory.
*/
private getExtensionCommandsPaths(ext: {
path: string;
name: string;
}): string[] {
// Try to get extension config
try {
const configPath = path.join(ext.path, EXTENSIONS_CONFIG_FILENAME);
if (fsSync.existsSync(configPath)) {
const configContent = fsSync.readFileSync(configPath, 'utf-8');
const config = JSON.parse(configContent);
if (config.commands) {
const commandsArray = Array.isArray(config.commands)
? config.commands
: [config.commands];
return commandsArray
.map((cmdPath: string) =>
path.isAbsolute(cmdPath) ? cmdPath : path.join(ext.path, cmdPath),
)
.filter((cmdPath: string) => {
try {
return fsSync.existsSync(cmdPath);
} catch {
return false;
}
});
}
}
} catch (error) {
console.warn(`Failed to read extension config for ${ext.name}:`, error);
}
// Default fallback: use 'commands' directory
const defaultPath = path.join(ext.path, 'commands');
try {
if (fsSync.existsSync(defaultPath)) {
return [defaultPath];
}
} catch {
// Ignore
}
return [];
}
/**
* Parses a single .toml file and transforms it into a SlashCommand object.
* @param filePath The absolute path to the .toml file.
@@ -177,7 +237,7 @@ export class FileCommandLoader implements ICommandLoader {
* @param extensionName Optional extension name to prefix commands with.
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
*/
private async parseAndAdaptFile(
private async parseAndAdaptTomlFile(
filePath: string,
baseDir: string,
extensionName?: string,
@@ -216,104 +276,79 @@ export class FileCommandLoader implements ICommandLoader {
const validDef = validationResult.data;
const relativePathWithExt = path.relative(baseDir, filePath);
const relativePath = relativePathWithExt.substring(
0,
relativePathWithExt.length - 5, // length of '.toml'
);
const baseCommandName = relativePath
.split(path.sep)
// Sanitize each path segment to prevent ambiguity. Since ':' is our
// namespace separator, we replace any literal colons in filenames
// with underscores to avoid naming conflicts.
.map((segment) => segment.replaceAll(':', '_'))
.join(':');
// Add extension name tag for extension commands
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
let description = validDef.description || defaultDescription;
if (extensionName) {
description = `[${extensionName}] ${description}`;
}
const processors: IPromptProcessor[] = [];
const usesArgs = validDef.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
const usesShellInjection = validDef.prompt.includes(
SHELL_INJECTION_TRIGGER,
);
const usesAtFileInjection = validDef.prompt.includes(
AT_FILE_INJECTION_TRIGGER,
);
// 1. @-File Injection (Security First).
// This runs first to ensure we're not executing shell commands that
// could dynamically generate malicious @-paths.
if (usesAtFileInjection) {
processors.push(new AtFileProcessor(baseCommandName));
}
// 2. Argument and Shell Injection.
// This runs after file content has been safely injected.
if (usesShellInjection || usesArgs) {
processors.push(new ShellProcessor(baseCommandName));
}
// 3. Default Argument Handling.
// Appends the raw invocation if no explicit {{args}} are used.
if (!usesArgs) {
processors.push(new DefaultArgumentProcessor());
}
return {
name: baseCommandName,
description,
kind: CommandKind.FILE,
// Use factory to create command
return createSlashCommandFromDefinition(
filePath,
baseDir,
validDef,
extensionName,
action: async (
context: CommandContext,
_args: string,
): Promise<SlashCommandActionReturn> => {
if (!context.invocation) {
console.error(
`[FileCommandLoader] Critical error: Command '${baseCommandName}' was executed without invocation context.`,
);
return {
type: 'submit_prompt',
content: [{ text: validDef.prompt }], // Fallback to unprocessed prompt
};
}
'.toml',
);
}
try {
let processedContent: PromptPipelineContent = [
{ text: validDef.prompt },
];
for (const processor of processors) {
processedContent = await processor.process(
processedContent,
context,
);
}
/**
* Parses a single .md file and transforms it into a SlashCommand object.
* @param filePath The absolute path to the .md file.
* @param baseDir The root command directory for name calculation.
* @param extensionName Optional extension name to prefix commands with.
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
*/
private async parseAndAdaptMarkdownFile(
filePath: string,
baseDir: string,
extensionName?: string,
): Promise<SlashCommand | null> {
let fileContent: string;
try {
fileContent = await fs.readFile(filePath, 'utf-8');
} catch (error: unknown) {
console.error(
`[FileCommandLoader] Failed to read file ${filePath}:`,
error instanceof Error ? error.message : String(error),
);
return null;
}
return {
type: 'submit_prompt',
content: processedContent,
};
} catch (e) {
// Check if it's our specific error type
if (e instanceof ConfirmationRequiredError) {
// Halt and request confirmation from the UI layer.
return {
type: 'confirm_shell_commands',
commandsToConfirm: e.commandsToConfirm,
originalInvocation: {
raw: context.invocation.raw,
},
};
}
// Re-throw other errors to be handled by the global error handler.
throw e;
}
},
let parsed: ReturnType<typeof parseMarkdownCommand>;
try {
parsed = parseMarkdownCommand(fileContent);
} catch (error: unknown) {
console.error(
`[FileCommandLoader] Failed to parse Markdown file ${filePath}:`,
error instanceof Error ? error.message : String(error),
);
return null;
}
const validationResult = MarkdownCommandDefSchema.safeParse(parsed);
if (!validationResult.success) {
console.error(
`[FileCommandLoader] Skipping invalid command file: ${filePath}. Validation errors:`,
validationResult.error.flatten(),
);
return null;
}
const validDef = validationResult.data;
// Convert to CommandDefinition format
const definition: CommandDefinition = {
prompt: validDef.prompt,
description:
validDef.frontmatter?.description &&
typeof validDef.frontmatter.description === 'string'
? validDef.frontmatter.description
: undefined,
};
// Use factory to create command
return createSlashCommandFromDefinition(
filePath,
baseDir,
definition,
extensionName,
'.md',
);
}
}

View File

@@ -0,0 +1,159 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* This file contains helper functions for FileCommandLoader to create SlashCommand
* objects from parsed command definitions (TOML or Markdown).
*/
import path from 'node:path';
import type {
CommandContext,
SlashCommand,
SlashCommandActionReturn,
} from '../ui/commands/types.js';
import { CommandKind } from '../ui/commands/types.js';
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
import type {
IPromptProcessor,
PromptPipelineContent,
} from './prompt-processors/types.js';
import {
SHORTHAND_ARGS_PLACEHOLDER,
SHELL_INJECTION_TRIGGER,
AT_FILE_INJECTION_TRIGGER,
} from './prompt-processors/types.js';
import {
ConfirmationRequiredError,
ShellProcessor,
} from './prompt-processors/shellProcessor.js';
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
export interface CommandDefinition {
prompt: string;
description?: string;
}
/**
* Creates a SlashCommand from a parsed command definition.
* This function is used by both TOML and Markdown command loaders.
*
* @param filePath The absolute path to the command file
* @param baseDir The root command directory for name calculation
* @param definition The parsed command definition (prompt and optional description)
* @param extensionName Optional extension name to prefix commands with
* @param fileExtension The file extension (e.g., '.toml' or '.md')
* @returns A SlashCommand object
*/
export function createSlashCommandFromDefinition(
filePath: string,
baseDir: string,
definition: CommandDefinition,
extensionName: string | undefined,
fileExtension: string,
): SlashCommand {
const relativePathWithExt = path.relative(baseDir, filePath);
const relativePath = relativePathWithExt.substring(
0,
relativePathWithExt.length - fileExtension.length,
);
const baseCommandName = relativePath
.split(path.sep)
// Sanitize each path segment to prevent ambiguity. Since ':' is our
// namespace separator, we replace any literal colons in filenames
// with underscores to avoid naming conflicts.
.map((segment) => segment.replaceAll(':', '_'))
.join(':');
// Prefix command name with extension name if provided
const commandName = extensionName
? `${extensionName}:${baseCommandName}`
: baseCommandName;
// Add extension name tag for extension commands
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
let description = definition.description || defaultDescription;
if (extensionName) {
description = `[${extensionName}] ${description}`;
}
const processors: IPromptProcessor[] = [];
const usesArgs = definition.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
const usesShellInjection = definition.prompt.includes(
SHELL_INJECTION_TRIGGER,
);
const usesAtFileInjection = definition.prompt.includes(
AT_FILE_INJECTION_TRIGGER,
);
// 1. @-File Injection (Security First).
// This runs first to ensure we're not executing shell commands that
// could dynamically generate malicious @-paths.
if (usesAtFileInjection) {
processors.push(new AtFileProcessor(baseCommandName));
}
// 2. Argument and Shell Injection.
// This runs after file content has been safely injected.
if (usesShellInjection || usesArgs) {
processors.push(new ShellProcessor(baseCommandName));
}
// 3. Default Argument Handling.
// Appends the raw invocation if no explicit {{args}} are used.
if (!usesArgs) {
processors.push(new DefaultArgumentProcessor());
}
return {
name: commandName,
description,
kind: CommandKind.FILE,
extensionName,
action: async (
context: CommandContext,
_args: string,
): Promise<SlashCommandActionReturn> => {
if (!context.invocation) {
console.error(
`[FileCommandLoader] Critical error: Command '${commandName}' was executed without invocation context.`,
);
return {
type: 'submit_prompt',
content: [{ text: definition.prompt }], // Fallback to unprocessed prompt
};
}
try {
let processedContent: PromptPipelineContent = [
{ text: definition.prompt },
];
for (const processor of processors) {
processedContent = await processor.process(processedContent, context);
}
return {
type: 'submit_prompt',
content: processedContent,
};
} catch (e) {
// Check if it's our specific error type
if (e instanceof ConfirmationRequiredError) {
// Halt and request confirmation from the UI layer.
return {
type: 'confirm_shell_commands',
commandsToConfirm: e.commandsToConfirm,
originalInvocation: {
raw: context.invocation.raw,
},
};
}
// Re-throw other errors to be handled by the global error handler.
throw e;
}
},
};
}

View File

@@ -0,0 +1,253 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { promises as fs } from 'node:fs';
import path from 'node:path';
import os from 'node:os';
import {
detectTomlCommands,
migrateTomlCommands,
generateMigrationPrompt,
} from './command-migration-tool.js';
describe('command-migration-tool', () => {
let tempDir: string;
beforeEach(async () => {
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-migration-test-'));
});
afterEach(async () => {
await fs.rm(tempDir, { recursive: true, force: true });
});
describe('detectTomlCommands', () => {
it('should detect TOML files in directory', async () => {
// Create some TOML files
await fs.writeFile(
path.join(tempDir, 'cmd1.toml'),
'prompt = "test"',
'utf-8',
);
await fs.writeFile(
path.join(tempDir, 'cmd2.toml'),
'prompt = "test"',
'utf-8',
);
const tomlFiles = await detectTomlCommands(tempDir);
expect(tomlFiles).toHaveLength(2);
expect(tomlFiles).toContain('cmd1.toml');
expect(tomlFiles).toContain('cmd2.toml');
});
it('should detect TOML files in subdirectories', async () => {
const subdir = path.join(tempDir, 'subdir');
await fs.mkdir(subdir);
await fs.writeFile(
path.join(subdir, 'nested.toml'),
'prompt = "test"',
'utf-8',
);
const tomlFiles = await detectTomlCommands(tempDir);
expect(tomlFiles).toContain('subdir/nested.toml');
});
it('should return empty array for non-existent directory', async () => {
const nonExistent = path.join(tempDir, 'does-not-exist');
const tomlFiles = await detectTomlCommands(nonExistent);
expect(tomlFiles).toEqual([]);
});
it('should not detect non-TOML files', async () => {
await fs.writeFile(path.join(tempDir, 'file.txt'), 'text', 'utf-8');
await fs.writeFile(path.join(tempDir, 'file.md'), 'markdown', 'utf-8');
const tomlFiles = await detectTomlCommands(tempDir);
expect(tomlFiles).toHaveLength(0);
});
});
describe('migrateTomlCommands', () => {
it('should migrate TOML file to Markdown', async () => {
const tomlContent = `prompt = "Test prompt"
description = "Test description"`;
await fs.writeFile(path.join(tempDir, 'test.toml'), tomlContent, 'utf-8');
const result = await migrateTomlCommands({
commandDir: tempDir,
createBackup: true,
deleteOriginal: false,
});
expect(result.success).toBe(true);
expect(result.convertedFiles).toContain('test.toml');
expect(result.failedFiles).toHaveLength(0);
// Check Markdown file was created
const mdPath = path.join(tempDir, 'test.md');
const mdContent = await fs.readFile(mdPath, 'utf-8');
expect(mdContent).toContain('description: Test description');
expect(mdContent).toContain('Test prompt');
// Check backup was created (original renamed to .toml.backup)
const backupPath = path.join(tempDir, 'test.toml.backup');
const backupExists = await fs
.access(backupPath)
.then(() => true)
.catch(() => false);
expect(backupExists).toBe(true);
// Original .toml file should not exist (renamed to .backup)
const tomlExists = await fs
.access(path.join(tempDir, 'test.toml'))
.then(() => true)
.catch(() => false);
expect(tomlExists).toBe(false);
});
it('should delete original TOML when deleteOriginal is true', async () => {
await fs.writeFile(
path.join(tempDir, 'delete-me.toml'),
'prompt = "Test"',
'utf-8',
);
await migrateTomlCommands({
commandDir: tempDir,
createBackup: false,
deleteOriginal: true,
});
// Original should be deleted
const tomlExists = await fs
.access(path.join(tempDir, 'delete-me.toml'))
.then(() => true)
.catch(() => false);
expect(tomlExists).toBe(false);
// Markdown should exist
const mdExists = await fs
.access(path.join(tempDir, 'delete-me.md'))
.then(() => true)
.catch(() => false);
expect(mdExists).toBe(true);
// Backup should not exist (createBackup was false)
const backupExists = await fs
.access(path.join(tempDir, 'delete-me.toml.backup'))
.then(() => true)
.catch(() => false);
expect(backupExists).toBe(false);
});
it('should fail if Markdown file already exists', async () => {
await fs.writeFile(
path.join(tempDir, 'existing.toml'),
'prompt = "Test"',
'utf-8',
);
await fs.writeFile(
path.join(tempDir, 'existing.md'),
'Already exists',
'utf-8',
);
const result = await migrateTomlCommands({
commandDir: tempDir,
createBackup: false,
});
expect(result.success).toBe(false);
expect(result.failedFiles).toHaveLength(1);
expect(result.failedFiles[0].file).toBe('existing.toml');
expect(result.failedFiles[0].error).toContain('already exists');
});
it('should handle migration without backup', async () => {
await fs.writeFile(
path.join(tempDir, 'no-backup.toml'),
'prompt = "Test"',
'utf-8',
);
const result = await migrateTomlCommands({
commandDir: tempDir,
createBackup: false,
deleteOriginal: false,
});
expect(result.success).toBe(true);
// Original TOML file should still exist (no backup, no delete)
const tomlExists = await fs
.access(path.join(tempDir, 'no-backup.toml'))
.then(() => true)
.catch(() => false);
expect(tomlExists).toBe(true);
// Backup should not exist
const backupExists = await fs
.access(path.join(tempDir, 'no-backup.toml.backup'))
.then(() => true)
.catch(() => false);
expect(backupExists).toBe(false);
});
it('should return success with empty results for no TOML files', async () => {
const result = await migrateTomlCommands({
commandDir: tempDir,
});
expect(result.success).toBe(true);
expect(result.convertedFiles).toHaveLength(0);
expect(result.failedFiles).toHaveLength(0);
});
});
describe('generateMigrationPrompt', () => {
it('should generate prompt for few files', () => {
const files = ['cmd1.toml', 'cmd2.toml'];
const prompt = generateMigrationPrompt(files);
expect(prompt).toContain('Found 2 command files');
expect(prompt).toContain('cmd1.toml');
expect(prompt).toContain('cmd2.toml');
expect(prompt).toContain('qwen-code migrate-commands');
});
it('should truncate file list for many files', () => {
const files = Array.from({ length: 10 }, (_, i) => `cmd${i}.toml`);
const prompt = generateMigrationPrompt(files);
expect(prompt).toContain('Found 10 command files');
expect(prompt).toContain('... and 7 more');
});
it('should return empty string for no files', () => {
const prompt = generateMigrationPrompt([]);
expect(prompt).toBe('');
});
it('should use singular form for single file', () => {
const prompt = generateMigrationPrompt(['single.toml']);
expect(prompt).toContain('Found 1 command file');
// Don't check for plural since "files" appears in other parts of the message
});
});
});

View File

@@ -0,0 +1,169 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Tool for migrating TOML commands to Markdown format.
*/
import { promises as fs } from 'node:fs';
import path from 'node:path';
import { glob } from 'glob';
import { convertTomlToMarkdown } from '@qwen-code/qwen-code-core';
export interface MigrationResult {
success: boolean;
convertedFiles: string[];
failedFiles: Array<{ file: string; error: string }>;
}
export interface MigrationOptions {
/** Directory containing command files */
commandDir: string;
/** Whether to create backups (default: true) */
createBackup?: boolean;
/** Whether to delete original TOML files after migration (default: false) */
deleteOriginal?: boolean;
}
/**
* Scans a directory for TOML command files.
* @param commandDir Directory to scan
* @returns Array of TOML file paths (relative to commandDir)
*/
export async function detectTomlCommands(
commandDir: string,
): Promise<string[]> {
try {
await fs.access(commandDir);
} catch {
// Directory doesn't exist
return [];
}
const tomlFiles = await glob('**/*.toml', {
cwd: commandDir,
nodir: true,
dot: false,
});
return tomlFiles;
}
/**
* Migrates TOML command files to Markdown format.
* @param options Migration options
* @returns Migration result with details
*/
export async function migrateTomlCommands(
options: MigrationOptions,
): Promise<MigrationResult> {
const { commandDir, createBackup = true, deleteOriginal = false } = options;
const result: MigrationResult = {
success: true,
convertedFiles: [],
failedFiles: [],
};
// Detect TOML files
const tomlFiles = await detectTomlCommands(commandDir);
if (tomlFiles.length === 0) {
return result;
}
// Process each TOML file
for (const relativeFile of tomlFiles) {
const tomlPath = path.join(commandDir, relativeFile);
try {
// Read TOML file
const tomlContent = await fs.readFile(tomlPath, 'utf-8');
// Convert to Markdown
const markdownContent = convertTomlToMarkdown(tomlContent);
// Generate Markdown file path (same location, .md extension)
const markdownPath = tomlPath.replace(/\.toml$/, '.md');
// Check if Markdown file already exists
try {
await fs.access(markdownPath);
throw new Error(
`Markdown file already exists: ${path.basename(markdownPath)}`,
);
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
throw error;
}
// File doesn't exist, continue
}
// Write Markdown file
await fs.writeFile(markdownPath, markdownContent, 'utf-8');
// Backup original if requested (rename to .toml.backup)
if (createBackup) {
const backupPath = `${tomlPath}.backup`;
await fs.rename(tomlPath, backupPath);
} else if (deleteOriginal) {
// Delete original if requested and no backup
await fs.unlink(tomlPath);
}
result.convertedFiles.push(relativeFile);
} catch (error) {
result.success = false;
result.failedFiles.push({
file: relativeFile,
error: error instanceof Error ? error.message : String(error),
});
}
}
return result;
}
/**
* Generates a migration report message.
* @param tomlFiles List of TOML files found
* @returns Human-readable migration prompt message
*/
export function generateMigrationPrompt(tomlFiles: string[]): string {
if (tomlFiles.length === 0) {
return '';
}
const count = tomlFiles.length;
const fileList =
tomlFiles.length <= 5
? tomlFiles.map((f) => ` - ${f}`).join('\n')
: ` - ${tomlFiles.slice(0, 3).join('\n - ')}\n - ... and ${tomlFiles.length - 3} more`;
return `
⚠️ TOML Command Format Deprecation Notice
Found ${count} command file${count > 1 ? 's' : ''} in TOML format:
${fileList}
The TOML format for commands is being deprecated in favor of Markdown format.
Markdown format is more readable and easier to edit.
You can migrate these files automatically using:
qwen-code migrate-commands
Or manually convert each file:
- TOML: prompt = "..." / description = "..."
- Markdown: YAML frontmatter + content
The migration tool will:
✓ Convert TOML files to Markdown
✓ Create backups of original files
✓ Preserve all command functionality
TOML format will continue to work for now, but migration is recommended.
`.trim();
}

View File

@@ -0,0 +1,144 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import {
parseMarkdownCommand,
MarkdownCommandDefSchema,
} from './markdown-command-parser.js';
describe('parseMarkdownCommand', () => {
it('should parse markdown with YAML frontmatter', () => {
const content = `---
description: Test command
---
This is the prompt content.`;
const result = parseMarkdownCommand(content);
expect(result).toEqual({
frontmatter: {
description: 'Test command',
},
prompt: 'This is the prompt content.',
});
});
it('should parse markdown without frontmatter', () => {
const content = 'This is just a prompt without frontmatter.';
const result = parseMarkdownCommand(content);
expect(result).toEqual({
prompt: 'This is just a prompt without frontmatter.',
});
});
it('should handle multi-line prompts', () => {
const content = `---
description: Multi-line test
---
First line of prompt.
Second line of prompt.
Third line of prompt.`;
const result = parseMarkdownCommand(content);
expect(result.prompt).toBe(
'First line of prompt.\nSecond line of prompt.\nThird line of prompt.',
);
});
it('should trim whitespace from prompt', () => {
const content = `---
description: Whitespace test
---
Prompt with leading and trailing spaces
`;
const result = parseMarkdownCommand(content);
expect(result.prompt).toBe('Prompt with leading and trailing spaces');
});
it('should handle empty frontmatter', () => {
const content = `---
---
Prompt content after empty frontmatter.`;
const result = parseMarkdownCommand(content);
// Empty YAML frontmatter returns undefined, not {}
expect(result.frontmatter).toBeUndefined();
expect(result.prompt).toBe('Prompt content after empty frontmatter.');
});
it('should handle invalid YAML frontmatter gracefully', () => {
// The YAML parser we use is quite tolerant, so most "invalid" YAML
// actually parses successfully. This test verifies that behavior.
const content = `---
description: test
---
Prompt content.`;
const result = parseMarkdownCommand(content);
expect(result.frontmatter).toBeDefined();
expect(result.prompt).toBe('Prompt content.');
});
});
describe('MarkdownCommandDefSchema', () => {
it('should validate valid markdown command def', () => {
const validDef = {
frontmatter: {
description: 'Test description',
},
prompt: 'Test prompt',
};
const result = MarkdownCommandDefSchema.safeParse(validDef);
expect(result.success).toBe(true);
});
it('should validate markdown command def without frontmatter', () => {
const validDef = {
prompt: 'Test prompt',
};
const result = MarkdownCommandDefSchema.safeParse(validDef);
expect(result.success).toBe(true);
});
it('should reject command def without prompt', () => {
const invalidDef = {
frontmatter: {
description: 'Test description',
},
};
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
expect(result.success).toBe(false);
});
it('should reject command def with non-string prompt', () => {
const invalidDef = {
prompt: 123,
};
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
expect(result.success).toBe(false);
});
});

View File

@@ -0,0 +1,64 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { z } from 'zod';
import { parse as parseYaml } from '@qwen-code/qwen-code-core';
/**
* Defines the Zod schema for a Markdown command definition file.
* The frontmatter contains optional metadata, and the body is the prompt.
*/
export const MarkdownCommandDefSchema = z.object({
frontmatter: z
.object({
description: z.string().optional(),
})
.optional(),
prompt: z.string({
required_error: 'The prompt content is required.',
invalid_type_error: 'The prompt content must be a string.',
}),
});
export type MarkdownCommandDef = z.infer<typeof MarkdownCommandDefSchema>;
/**
* Parses a Markdown command file with optional YAML frontmatter.
* @param content The file content
* @returns Parsed command definition with frontmatter and prompt
*/
export function parseMarkdownCommand(content: string): MarkdownCommandDef {
// Match YAML frontmatter pattern: ---\n...\n---\n
// Allow empty frontmatter: ---\n---\n // Use (?:[\s\S]*?) to make the frontmatter content optional
const frontmatterRegex = /^---\n([\s\S]*?)---\n([\s\S]*)$/;
const match = content.match(frontmatterRegex);
if (!match) {
// No frontmatter, entire content is the prompt
return {
prompt: content.trim(),
};
}
const [, frontmatterYaml, body] = match;
// Parse YAML frontmatter if not empty
let frontmatter: Record<string, unknown> | undefined;
if (frontmatterYaml.trim()) {
try {
frontmatter = parseYaml(frontmatterYaml) as Record<string, unknown>;
} catch (error) {
throw new Error(
`Failed to parse YAML frontmatter: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
return {
frontmatter,
prompt: body.trim(),
};
}

View File

@@ -0,0 +1,5 @@
---
description: Example markdown command
---
This is an example prompt from a markdown file.

View File

@@ -1,49 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import {
EXTENSIONS_CONFIG_FILENAME,
INSTALL_METADATA_FILENAME,
} from '../config/extension.js';
import {
type MCPServerConfig,
type ExtensionInstallMetadata,
} from '@qwen-code/qwen-code-core';
export function createExtension({
extensionsDir = 'extensions-dir',
name = 'my-extension',
version = '1.0.0',
addContextFile = false,
contextFileName = undefined as string | undefined,
mcpServers = {} as Record<string, MCPServerConfig>,
installMetadata = undefined as ExtensionInstallMetadata | undefined,
} = {}): string {
const extDir = path.join(extensionsDir, name);
fs.mkdirSync(extDir, { recursive: true });
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name, version, contextFileName, mcpServers }),
);
if (addContextFile) {
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
}
if (contextFileName) {
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
}
if (installMetadata) {
fs.writeFileSync(
path.join(extDir, INSTALL_METADATA_FILENAME),
JSON.stringify(installMetadata),
);
}
return extDir;
}

View File

@@ -76,7 +76,6 @@ vi.mock('./hooks/useFolderTrust.js');
vi.mock('./hooks/useIdeTrustListener.js');
vi.mock('./hooks/useMessageQueue.js');
vi.mock('./hooks/useAutoAcceptIndicator.js');
vi.mock('./hooks/useWorkspaceMigration.js');
vi.mock('./hooks/useGitBranchName.js');
vi.mock('./contexts/VimModeContext.js');
vi.mock('./contexts/SessionContext.js');
@@ -103,7 +102,6 @@ import { useFolderTrust } from './hooks/useFolderTrust.js';
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
import { useMessageQueue } from './hooks/useMessageQueue.js';
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
import { useGitBranchName } from './hooks/useGitBranchName.js';
import { useVimMode } from './contexts/VimModeContext.js';
import { useSessionStats } from './contexts/SessionContext.js';
@@ -134,7 +132,6 @@ describe('AppContainer State Management', () => {
const mockedUseIdeTrustListener = useIdeTrustListener as Mock;
const mockedUseMessageQueue = useMessageQueue as Mock;
const mockedUseAutoAcceptIndicator = useAutoAcceptIndicator as Mock;
const mockedUseWorkspaceMigration = useWorkspaceMigration as Mock;
const mockedUseGitBranchName = useGitBranchName as Mock;
const mockedUseVimMode = useVimMode as Mock;
const mockedUseSessionStats = useSessionStats as Mock;
@@ -239,12 +236,6 @@ describe('AppContainer State Management', () => {
getQueuedMessagesText: vi.fn().mockReturnValue(''),
});
mockedUseAutoAcceptIndicator.mockReturnValue(false);
mockedUseWorkspaceMigration.mockReturnValue({
showWorkspaceMigrationDialog: false,
workspaceExtensions: [],
onWorkspaceMigrationDialogOpen: vi.fn(),
onWorkspaceMigrationDialogClose: vi.fn(),
});
mockedUseGitBranchName.mockReturnValue('main');
mockedUseVimMode.mockReturnValue({
isVimEnabled: false,

View File

@@ -38,6 +38,7 @@ import {
getErrorMessage,
getAllGeminiMdFilenames,
ShellExecutionService,
Storage,
} from '@qwen-code/qwen-code-core';
import { buildResumedHistoryItems } from './utils/resumeHistoryUtils.js';
import { validateAuthMethod } from '../config/auth.js';
@@ -76,6 +77,9 @@ import { useLoadingIndicator } from './hooks/useLoadingIndicator.js';
import { useFolderTrust } from './hooks/useFolderTrust.js';
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
import { type IdeIntegrationNudgeResult } from './IdeIntegrationNudge.js';
import { type CommandMigrationNudgeResult } from './CommandFormatMigrationNudge.js';
import { useCommandMigration } from './hooks/useCommandMigration.js';
import { migrateTomlCommands } from '../services/command-migration-tool.js';
import { appEvents, AppEvent } from '../utils/events.js';
import { type UpdateObject } from './utils/updateCheck.js';
import { setUpdateHandler } from '../utils/handleAutoUpdate.js';
@@ -83,10 +87,12 @@ import { ConsolePatcher } from './utils/ConsolePatcher.js';
import { registerCleanup, runExitCleanup } from '../utils/cleanup.js';
import { useMessageQueue } from './hooks/useMessageQueue.js';
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
import { useSessionStats } from './contexts/SessionContext.js';
import { useGitBranchName } from './hooks/useGitBranchName.js';
import { useExtensionUpdates } from './hooks/useExtensionUpdates.js';
import {
useExtensionUpdates,
useConfirmUpdateRequests,
} from './hooks/useExtensionUpdates.js';
import { ShellFocusContext } from './contexts/ShellFocusContext.js';
import { t } from '../i18n/index.js';
import { useWelcomeBack } from './hooks/useWelcomeBack.js';
@@ -97,6 +103,7 @@ import { processVisionSwitchOutcome } from './hooks/useVisionAutoSwitch.js';
import { useSubagentCreateDialog } from './hooks/useSubagentCreateDialog.js';
import { useAgentsManagerDialog } from './hooks/useAgentsManagerDialog.js';
import { useAttentionNotifications } from './hooks/useAttentionNotifications.js';
import { requestConsentInteractive } from '../commands/extensions/consent.js';
const CTRL_EXIT_PROMPT_DURATION_MS = 1000;
@@ -157,15 +164,21 @@ export const AppContainer = (props: AppContainerProps) => {
config.isTrustedFolder(),
);
const extensions = config.getExtensions();
const extensionManager = config.getExtensionManager();
extensionManager.setRequestConsent((description) =>
requestConsentInteractive(description, addConfirmUpdateExtensionRequest),
);
const { addConfirmUpdateExtensionRequest, confirmUpdateExtensionRequests } =
useConfirmUpdateRequests();
const {
extensionsUpdateState,
extensionsUpdateStateInternal,
dispatchExtensionStateUpdate,
confirmUpdateExtensionRequests,
addConfirmUpdateExtensionRequest,
} = useExtensionUpdates(
extensions,
extensionManager,
historyManager.addItem,
config.getWorkingDir(),
);
@@ -436,13 +449,6 @@ export const AppContainer = (props: AppContainerProps) => {
remount: refreshStatic,
});
const {
showWorkspaceMigrationDialog,
workspaceExtensions,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
} = useWorkspaceMigration(settings);
const { toggleVimEnabled } = useVimMode();
const {
@@ -578,11 +584,11 @@ export const AppContainer = (props: AppContainerProps) => {
: [],
config.getDebugMode(),
config.getFileService(),
settings.merged,
config.getExtensionContextFilePaths(),
config.isTrustedFolder(),
settings.merged.context?.importFormat || 'tree', // Use setting or default to 'tree'
config.getFileFilteringOptions(),
config.getDiscoveryMaxDirs(),
);
config.setUserMemory(memoryContent);
@@ -845,6 +851,13 @@ export const AppContainer = (props: AppContainerProps) => {
!idePromptAnswered,
);
// Command migration nudge
const {
showMigrationNudge: shouldShowCommandMigrationNudge,
tomlFiles: commandMigrationTomlFiles,
setShowMigrationNudge: setShowCommandMigrationNudge,
} = useCommandMigration(settings, config.storage);
const [showErrorDetails, setShowErrorDetails] = useState<boolean>(false);
const [showToolDescriptions, setShowToolDescriptions] =
useState<boolean>(false);
@@ -935,6 +948,92 @@ export const AppContainer = (props: AppContainerProps) => {
[handleSlashCommand, settings],
);
const handleCommandMigrationComplete = useCallback(
async (result: CommandMigrationNudgeResult) => {
setShowCommandMigrationNudge(false);
if (result.userSelection === 'yes') {
// Perform migration for both workspace and user levels
try {
const results = [];
// Migrate workspace commands
const workspaceCommandsDir = config.storage.getProjectCommandsDir();
const workspaceResult = await migrateTomlCommands({
commandDir: workspaceCommandsDir,
createBackup: true,
deleteOriginal: false,
});
if (
workspaceResult.convertedFiles.length > 0 ||
workspaceResult.failedFiles.length > 0
) {
results.push({ level: 'workspace', result: workspaceResult });
}
// Migrate user commands
const userCommandsDir = Storage.getUserCommandsDir();
const userResult = await migrateTomlCommands({
commandDir: userCommandsDir,
createBackup: true,
deleteOriginal: false,
});
if (
userResult.convertedFiles.length > 0 ||
userResult.failedFiles.length > 0
) {
results.push({ level: 'user', result: userResult });
}
// Report results
for (const { level, result: migrationResult } of results) {
if (
migrationResult.success &&
migrationResult.convertedFiles.length > 0
) {
historyManager.addItem(
{
type: MessageType.INFO,
text: `[${level}] Successfully migrated ${migrationResult.convertedFiles.length} command file${migrationResult.convertedFiles.length > 1 ? 's' : ''} to Markdown format. Original files backed up as .toml.backup`,
},
Date.now(),
);
}
if (migrationResult.failedFiles.length > 0) {
historyManager.addItem(
{
type: MessageType.ERROR,
text: `[${level}] Failed to migrate ${migrationResult.failedFiles.length} file${migrationResult.failedFiles.length > 1 ? 's' : ''}:\n${migrationResult.failedFiles.map((f) => `${f.file}: ${f.error}`).join('\n')}`,
},
Date.now(),
);
}
}
if (results.length === 0) {
historyManager.addItem(
{
type: MessageType.INFO,
text: 'No TOML files found to migrate.',
},
Date.now(),
);
}
} catch (error) {
historyManager.addItem(
{
type: MessageType.ERROR,
text: `❌ Migration failed: ${getErrorMessage(error)}`,
},
Date.now(),
);
}
}
},
[historyManager, setShowCommandMigrationNudge, config.storage],
);
const { elapsedTime, currentLoadingPhrase } = useLoadingIndicator(
streamingState,
settings.merged.ui?.customWittyPhrases,
@@ -1177,8 +1276,8 @@ export const AppContainer = (props: AppContainerProps) => {
const dialogsVisible =
showWelcomeBackDialog ||
showWorkspaceMigrationDialog ||
shouldShowIdePrompt ||
shouldShowCommandMigrationNudge ||
isFolderTrustDialogOpen ||
!!shellConfirmationRequest ||
!!confirmationRequest ||
@@ -1244,6 +1343,8 @@ export const AppContainer = (props: AppContainerProps) => {
suggestionsWidth,
isInputActive,
shouldShowIdePrompt,
shouldShowCommandMigrationNudge,
commandMigrationTomlFiles,
isFolderTrustDialogOpen: isFolderTrustDialogOpen ?? false,
isTrustedFolder,
constrainHeight,
@@ -1260,8 +1361,6 @@ export const AppContainer = (props: AppContainerProps) => {
historyRemountKey,
messageQueue,
showAutoAcceptIndicator,
showWorkspaceMigrationDialog,
workspaceExtensions,
currentModel,
contextFileNames,
errorCount,
@@ -1333,6 +1432,8 @@ export const AppContainer = (props: AppContainerProps) => {
suggestionsWidth,
isInputActive,
shouldShowIdePrompt,
shouldShowCommandMigrationNudge,
commandMigrationTomlFiles,
isFolderTrustDialogOpen,
isTrustedFolder,
constrainHeight,
@@ -1349,8 +1450,6 @@ export const AppContainer = (props: AppContainerProps) => {
historyRemountKey,
messageQueue,
showAutoAcceptIndicator,
showWorkspaceMigrationDialog,
workspaceExtensions,
contextFileNames,
errorCount,
availableTerminalHeight,
@@ -1404,14 +1503,13 @@ export const AppContainer = (props: AppContainerProps) => {
setShellModeActive,
vimHandleInput,
handleIdePromptComplete,
handleCommandMigrationComplete,
handleFolderTrustSelect,
setConstrainHeight,
onEscapePromptChange: handleEscapePromptChange,
refreshStatic,
handleFinalSubmit,
handleClearScreen,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
// Vision switch dialog
handleVisionSwitchSelect,
// Welcome back dialog
@@ -1441,14 +1539,13 @@ export const AppContainer = (props: AppContainerProps) => {
setShellModeActive,
vimHandleInput,
handleIdePromptComplete,
handleCommandMigrationComplete,
handleFolderTrustSelect,
setConstrainHeight,
handleEscapePromptChange,
refreshStatic,
handleFinalSubmit,
handleClearScreen,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
handleVisionSwitchSelect,
handleWelcomeBackSelection,
handleWelcomeBackClose,

View File

@@ -0,0 +1,90 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { Box, Text } from 'ink';
import type { RadioSelectItem } from './components/shared/RadioButtonSelect.js';
import { RadioButtonSelect } from './components/shared/RadioButtonSelect.js';
import { useKeypress } from './hooks/useKeypress.js';
import { theme } from './semantic-colors.js';
export type CommandMigrationNudgeResult = {
userSelection: 'yes' | 'no';
};
interface CommandFormatMigrationNudgeProps {
tomlFiles: string[];
onComplete: (result: CommandMigrationNudgeResult) => void;
}
export function CommandFormatMigrationNudge({
tomlFiles,
onComplete,
}: CommandFormatMigrationNudgeProps) {
useKeypress(
(key) => {
if (key.name === 'escape') {
onComplete({
userSelection: 'no',
});
}
},
{ isActive: true },
);
const OPTIONS: Array<RadioSelectItem<CommandMigrationNudgeResult>> = [
{
label: 'Yes',
value: {
userSelection: 'yes',
},
key: 'Yes',
},
{
label: 'No (esc)',
value: {
userSelection: 'no',
},
key: 'No (esc)',
},
];
const count = tomlFiles.length;
const fileList =
count <= 3
? tomlFiles.map((f) => `${f}`).join('\n')
: `${tomlFiles.slice(0, 2).join('\n • ')}\n • ... and ${count - 2} more`;
return (
<Box
flexDirection="column"
borderStyle="round"
borderColor={theme.status.warning}
padding={1}
width="100%"
marginLeft={1}
>
<Box marginBottom={1} flexDirection="column">
<Text>
<Text color={theme.status.warning}>{'⚠️ '}</Text>
<Text bold>Command Format Migration</Text>
</Text>
<Text color={theme.text.secondary}>
{`Found ${count} TOML command file${count > 1 ? 's' : ''}:`}
</Text>
<Text color={theme.text.secondary}>{fileList}</Text>
<Text>{''}</Text>
<Text color={theme.text.secondary}>
The TOML format is deprecated. Would you like to migrate them to
Markdown format?
</Text>
<Text color={theme.text.secondary}>
(Backups will be created and original files will be preserved)
</Text>
</Box>
<RadioButtonSelect items={OPTIONS} onSelect={onComplete} />
</Box>
);
}

View File

@@ -4,13 +4,6 @@
* SPDX-License-Identifier: Apache-2.0
*/
import { requestConsentInteractive } from '../../config/extension.js';
import {
updateAllUpdatableExtensions,
type ExtensionUpdateInfo,
updateExtension,
checkForAllExtensionUpdates,
} from '../../config/extensions/update.js';
import { getErrorMessage } from '../../utils/errors.js';
import { ExtensionUpdateState } from '../state/extensions.js';
import { MessageType } from '../types.js';
@@ -20,8 +13,34 @@ import {
CommandKind,
} from './types.js';
import { t } from '../../i18n/index.js';
import type { ExtensionUpdateInfo } from '@qwen-code/qwen-code-core';
function showMessageIfNoExtensions(
context: CommandContext,
extensions: unknown[],
): boolean {
if (extensions.length === 0) {
context.ui.addItem(
{
type: MessageType.INFO,
text: 'No extensions installed. Run `/extensions explore` to check out the gallery.',
},
Date.now(),
);
return true;
}
return false;
}
async function listAction(context: CommandContext) {
const extensions = context.services.config
? context.services.config.getExtensions()
: [];
if (showMessageIfNoExtensions(context, extensions)) {
return;
}
context.ui.addItem(
{
type: MessageType.EXTENSIONS_LIST,
@@ -34,7 +53,6 @@ async function updateAction(context: CommandContext, args: string) {
const updateArgs = args.split(' ').filter((value) => value.length > 0);
const all = updateArgs.length === 1 && updateArgs[0] === '--all';
const names = all ? undefined : updateArgs;
let updateInfos: ExtensionUpdateInfo[] = [];
if (!all && names?.length === 0) {
context.ui.addItem(
@@ -47,29 +65,40 @@ async function updateAction(context: CommandContext, args: string) {
return;
}
let updateInfos: ExtensionUpdateInfo[] = [];
const extensionManager = context.services.config!.getExtensionManager();
const extensions = context.services.config
? context.services.config.getExtensions()
: [];
if (showMessageIfNoExtensions(context, extensions)) {
return Promise.resolve();
}
try {
await checkForAllExtensionUpdates(
context.services.config!.getExtensions(),
context.ui.dispatchExtensionStateUpdate,
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
await extensionManager.checkForAllExtensionUpdates((extensionName, state) =>
context.ui.dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
}),
);
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
context.ui.setPendingItem({
type: MessageType.EXTENSIONS_LIST,
});
if (all) {
updateInfos = await updateAllUpdatableExtensions(
context.services.config!.getWorkingDir(),
// We don't have the ability to prompt for consent yet in this flow.
(description) =>
requestConsentInteractive(
description,
context.ui.addConfirmUpdateExtensionRequest,
),
context.services.config!.getExtensions(),
updateInfos = await extensionManager.updateAllUpdatableExtensions(
context.ui.extensionsUpdateState,
context.ui.dispatchExtensionStateUpdate,
(extensionName, state) =>
context.ui.dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
}),
);
} else if (names?.length) {
const workingDir = context.services.config!.getWorkingDir();
const extensions = context.services.config!.getExtensions();
for (const name of names) {
const extension = extensions.find(
@@ -85,17 +114,15 @@ async function updateAction(context: CommandContext, args: string) {
);
continue;
}
const updateInfo = await updateExtension(
const updateInfo = await extensionManager.updateExtension(
extension,
workingDir,
(description) =>
requestConsentInteractive(
description,
context.ui.addConfirmUpdateExtensionRequest,
),
context.ui.extensionsUpdateState.get(extension.name)?.status ??
ExtensionUpdateState.UNKNOWN,
context.ui.dispatchExtensionStateUpdate,
(extensionName, state) =>
context.ui.dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
}),
);
if (updateInfo) updateInfos.push(updateInfo);
}

View File

@@ -13,6 +13,16 @@ import { createMockCommandContext } from '../../test-utils/mockCommandContext.js
vi.mock('../../i18n/index.js', () => ({
setLanguageAsync: vi.fn().mockResolvedValue(undefined),
getCurrentLanguage: vi.fn().mockReturnValue('en'),
detectSystemLanguage: vi.fn().mockReturnValue('en'),
getLanguageNameFromLocale: vi.fn((locale: string) => {
const map: Record<string, string> = {
zh: 'Chinese',
en: 'English',
ru: 'Russian',
de: 'German',
};
return map[locale] || 'English';
}),
t: vi.fn((key: string) => key),
}));
@@ -61,7 +71,10 @@ vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
// Import modules after mocking
import * as i18n from '../../i18n/index.js';
import { languageCommand } from './languageCommand.js';
import {
languageCommand,
initializeLlmOutputLanguage,
} from './languageCommand.js';
describe('languageCommand', () => {
let mockContext: CommandContext;
@@ -186,6 +199,39 @@ describe('languageCommand', () => {
content: expect.stringContaining('Chinese'),
});
});
it('should parse Unicode LLM output language from marker', async () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(
[
'# ⚠️ CRITICAL: 中文 Output Language Rule - HIGHEST PRIORITY ⚠️',
'<!-- qwen-code:llm-output-language: 中文 -->',
'',
'Some other content...',
].join('\n'),
);
vi.mocked(i18n.t).mockImplementation(
(key: string, params?: Record<string, string>) => {
if (params && key.includes('{{lang}}')) {
return key.replace('{{lang}}', params['lang'] || '');
}
return key;
},
);
if (!languageCommand.action) {
throw new Error('The language command must have an action.');
}
const result = await languageCommand.action(mockContext, '');
expect(result).toEqual({
type: 'message',
messageType: 'info',
content: expect.stringContaining('中文'),
});
});
});
describe('main command action - config not available', () => {
@@ -400,6 +446,34 @@ describe('languageCommand', () => {
});
});
it('should normalize locale code "ru" to "Russian"', async () => {
if (!languageCommand.action) {
throw new Error('The language command must have an action.');
}
await languageCommand.action(mockContext, 'output ru');
expect(fs.writeFileSync).toHaveBeenCalledWith(
expect.stringContaining('output-language.md'),
expect.stringContaining('Russian'),
'utf-8',
);
});
it('should normalize locale code "de" to "German"', async () => {
if (!languageCommand.action) {
throw new Error('The language command must have an action.');
}
await languageCommand.action(mockContext, 'output de');
expect(fs.writeFileSync).toHaveBeenCalledWith(
expect.stringContaining('output-language.md'),
expect.stringContaining('German'),
'utf-8',
);
});
it('should handle file write errors gracefully', async () => {
vi.mocked(fs.writeFileSync).mockImplementation(() => {
throw new Error('Permission denied');
@@ -481,6 +555,8 @@ describe('languageCommand', () => {
const nestedNames = uiSubcommand?.subCommands?.map((c) => c.name);
expect(nestedNames).toContain('zh-CN');
expect(nestedNames).toContain('en-US');
expect(nestedNames).toContain('ru-RU');
expect(nestedNames).toContain('de-DE');
});
it('should have action that sets language', async () => {
@@ -542,16 +618,9 @@ describe('languageCommand', () => {
const enUSSubcommand = uiSubcommand?.subCommands?.find(
(c) => c.name === 'en-US',
);
it('zh-CN should have aliases', () => {
expect(zhCNSubcommand?.altNames).toContain('zh');
expect(zhCNSubcommand?.altNames).toContain('chinese');
});
it('en-US should have aliases', () => {
expect(enUSSubcommand?.altNames).toContain('en');
expect(enUSSubcommand?.altNames).toContain('english');
});
const deDESubcommand = uiSubcommand?.subCommands?.find(
(c) => c.name === 'de-DE',
);
it('zh-CN action should set Chinese', async () => {
if (!zhCNSubcommand?.action) {
@@ -583,6 +652,21 @@ describe('languageCommand', () => {
});
});
it('de-DE action should set German', async () => {
if (!deDESubcommand?.action) {
throw new Error('de-DE subcommand must have an action.');
}
const result = await deDESubcommand.action(mockContext, '');
expect(i18n.setLanguageAsync).toHaveBeenCalledWith('de');
expect(result).toEqual({
type: 'message',
messageType: 'info',
content: expect.stringContaining('UI language changed'),
});
});
it('should reject extra arguments', async () => {
if (!zhCNSubcommand?.action) {
throw new Error('zh-CN subcommand must have an action.');
@@ -597,4 +681,74 @@ describe('languageCommand', () => {
});
});
});
describe('initializeLlmOutputLanguage', () => {
beforeEach(() => {
vi.clearAllMocks();
vi.mocked(fs.existsSync).mockReturnValue(false);
vi.mocked(fs.mkdirSync).mockImplementation(() => undefined);
vi.mocked(fs.writeFileSync).mockImplementation(() => undefined);
});
it('should create file when it does not exist', () => {
vi.mocked(fs.existsSync).mockReturnValue(false);
vi.mocked(i18n.detectSystemLanguage).mockReturnValue('en');
initializeLlmOutputLanguage();
expect(fs.mkdirSync).toHaveBeenCalled();
expect(fs.writeFileSync).toHaveBeenCalledWith(
expect.stringContaining('output-language.md'),
expect.stringContaining('English'),
'utf-8',
);
});
it('should NOT overwrite existing file', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
initializeLlmOutputLanguage();
expect(fs.writeFileSync).not.toHaveBeenCalled();
});
it('should detect Chinese locale and create Chinese rule file', () => {
vi.mocked(fs.existsSync).mockReturnValue(false);
vi.mocked(i18n.detectSystemLanguage).mockReturnValue('zh');
initializeLlmOutputLanguage();
expect(fs.writeFileSync).toHaveBeenCalledWith(
expect.stringContaining('output-language.md'),
expect.stringContaining('Chinese'),
'utf-8',
);
});
it('should detect Russian locale and create Russian rule file', () => {
vi.mocked(fs.existsSync).mockReturnValue(false);
vi.mocked(i18n.detectSystemLanguage).mockReturnValue('ru');
initializeLlmOutputLanguage();
expect(fs.writeFileSync).toHaveBeenCalledWith(
expect.stringContaining('output-language.md'),
expect.stringContaining('Russian'),
'utf-8',
);
});
it('should detect German locale and create German rule file', () => {
vi.mocked(fs.existsSync).mockReturnValue(false);
vi.mocked(i18n.detectSystemLanguage).mockReturnValue('de');
initializeLlmOutputLanguage();
expect(fs.writeFileSync).toHaveBeenCalledWith(
expect.stringContaining('output-language.md'),
expect.stringContaining('German'),
'utf-8',
);
});
});
});

View File

@@ -1,6 +1,6 @@
/**
* @license
* Copyright 2025 Google LLC
* Copyright 2025 Qwen team
* SPDX-License-Identifier: Apache-2.0
*/
@@ -15,51 +15,72 @@ import { SettingScope } from '../../config/settings.js';
import {
setLanguageAsync,
getCurrentLanguage,
detectSystemLanguage,
getLanguageNameFromLocale,
type SupportedLanguage,
t,
} from '../../i18n/index.js';
import {
SUPPORTED_LANGUAGES,
type LanguageDefinition,
} from '../../i18n/languages.js';
import * as fs from 'node:fs';
import * as path from 'node:path';
import { Storage } from '@qwen-code/qwen-code-core';
const LLM_OUTPUT_LANGUAGE_RULE_FILENAME = 'output-language.md';
const LLM_OUTPUT_LANGUAGE_MARKER_PREFIX = 'qwen-code:llm-output-language:';
function parseUiLanguageArg(input: string): SupportedLanguage | null {
const lowered = input.trim().toLowerCase();
if (!lowered) return null;
for (const lang of SUPPORTED_LANGUAGES) {
if (
lowered === lang.code ||
lowered === lang.id.toLowerCase() ||
lowered === lang.fullName.toLowerCase()
) {
return lang.code;
}
}
return null;
}
function formatUiLanguageDisplay(lang: SupportedLanguage): string {
const option = SUPPORTED_LANGUAGES.find((o) => o.code === lang);
return option ? `${option.fullName}${option.id}` : lang;
}
function sanitizeLanguageForMarker(language: string): string {
// HTML comments cannot contain "--" or end markers like "-->" or "--!>" safely.
// Also avoid newlines to keep the marker single-line and robust to parsing.
return language
.replace(/[\r\n]/g, ' ')
.replace(/--!?>/g, '')
.replace(/--/g, '');
}
/**
* Generates the LLM output language rule template based on the language name.
*/
function generateLlmOutputLanguageRule(language: string): string {
return `# ⚠️ CRITICAL: ${language} Output Language Rule - HIGHEST PRIORITY ⚠️
const markerLanguage = sanitizeLanguageForMarker(language);
return `# Output language preference: ${language}
<!-- ${LLM_OUTPUT_LANGUAGE_MARKER_PREFIX} ${markerLanguage} -->
## 🚨 MANDATORY RULE - NO EXCEPTIONS 🚨
## Goal
Prefer responding in **${language}** for normal assistant messages and explanations.
**YOU MUST RESPOND IN ${language.toUpperCase()} FOR EVERY SINGLE OUTPUT, REGARDLESS OF THE USER'S INPUT LANGUAGE.**
## Keep technical artifacts unchanged
Do **not** translate or rewrite:
- Code blocks, CLI commands, file paths, stack traces, logs, JSON keys, identifiers
- Exact quoted text from the user (keep quotes verbatim)
This is a **NON-NEGOTIABLE** requirement. Even if the user writes in English, says "hi", asks a simple question, or explicitly requests another language, **YOU MUST ALWAYS RESPOND IN ${language.toUpperCase()}.**
## When a conflict exists
If higher-priority instructions (system/developer) require a different behavior, follow them.
## What Must Be in ${language}
**EVERYTHING** you output: conversation replies, tool call descriptions, success/error messages, generated file content (comments, documentation), and all explanatory text.
**Tool outputs**: All descriptive text from \`read_file\`, \`write_file\`, \`codebase_search\`, \`run_terminal_cmd\`, \`todo_write\`, \`web_search\`, etc. MUST be in ${language}.
## Examples
### ✅ CORRECT:
- User says "hi" → Respond in ${language} (e.g., "Bonjour" if ${language} is French)
- Tool result → "已成功读取文件 config.json" (if ${language} is Chinese)
- Error → "无法找到指定的文件" (if ${language} is Chinese)
### ❌ WRONG:
- User says "hi" → "Hello" in English
- Tool result → "Successfully read file" in English
- Error → "File not found" in English
## Notes
- Code elements (variable/function names, syntax) can remain in English
- Comments, documentation, and all other text MUST be in ${language}
**THIS RULE IS ACTIVE NOW. ALL OUTPUTS MUST BE IN ${language.toUpperCase()}. NO EXCEPTIONS.**
## Tool / system outputs
Raw tool/system outputs may contain fixed-format English. Preserve them verbatim, and if needed, add a short **${language}** explanation below.
`;
}
@@ -73,6 +94,80 @@ function getLlmOutputLanguageRulePath(): string {
);
}
/**
* Normalizes a language input to its full English name.
* If the input is a known locale code (e.g., "ru", "zh"), converts it to the full name.
* Otherwise, returns the input as-is (e.g., "Japanese" stays "Japanese").
*/
function normalizeLanguageName(language: string): string {
const lowered = language.toLowerCase();
// Check if it's a known locale code and convert to full name
const fullName = getLanguageNameFromLocale(lowered);
// If getLanguageNameFromLocale returned a different value, use it
// Otherwise, use the original input (preserves case for unknown languages)
if (fullName !== 'English' || lowered === 'en') {
return fullName;
}
return language;
}
function extractLlmOutputLanguageFromRuleFileContent(
content: string,
): string | null {
// Preferred: machine-readable marker that supports Unicode and spaces.
// Example: <!-- qwen-code:llm-output-language: 中文 -->
const markerMatch = content.match(
new RegExp(
String.raw`<!--\s*${LLM_OUTPUT_LANGUAGE_MARKER_PREFIX}\s*(.*?)\s*-->`,
'i',
),
);
if (markerMatch?.[1]) {
const lang = markerMatch[1].trim();
if (lang) return lang;
}
// Backward compatibility: parse the heading line.
// Example: "# CRITICAL: Chinese Output Language Rule - HIGHEST PRIORITY"
// Example: "# ⚠️ CRITICAL: 日本語 Output Language Rule - HIGHEST PRIORITY ⚠️"
const headingMatch = content.match(
/^#.*?CRITICAL:\s*(.*?)\s+Output Language Rule\b/im,
);
if (headingMatch?.[1]) {
const lang = headingMatch[1].trim();
if (lang) return lang;
}
return null;
}
/**
* Initializes the LLM output language rule file on first startup.
* If the file already exists, it is not overwritten (respects user preference).
*/
export function initializeLlmOutputLanguage(): void {
const filePath = getLlmOutputLanguageRulePath();
// Skip if file already exists (user preference)
if (fs.existsSync(filePath)) {
return;
}
// Detect system language and map to language name
const detectedLocale = detectSystemLanguage();
const languageName = getLanguageNameFromLocale(detectedLocale);
// Generate the rule file
const content = generateLlmOutputLanguageRule(languageName);
// Ensure directory exists
const dir = path.dirname(filePath);
fs.mkdirSync(dir, { recursive: true });
// Write file
fs.writeFileSync(filePath, content, 'utf-8');
}
/**
* Gets the current LLM output language from the rule file if it exists.
*/
@@ -81,12 +176,7 @@ function getCurrentLlmOutputLanguage(): string | null {
if (fs.existsSync(filePath)) {
try {
const content = fs.readFileSync(filePath, 'utf-8');
// Extract language name from the first line
// Template format: "# CRITICAL: Chinese Output Language Rule - HIGHEST PRIORITY"
const match = content.match(/^#.*?(\w+)\s+Output Language Rule/i);
if (match) {
return match[1];
}
return extractLlmOutputLanguageFromRuleFileContent(content);
} catch {
// Ignore errors
}
@@ -127,18 +217,11 @@ async function setUiLanguage(
// Reload commands to update their descriptions with the new language
context.ui.reloadCommands();
// Map language codes to friendly display names
const langDisplayNames: Partial<Record<SupportedLanguage, string>> = {
zh: '中文zh-CN',
en: 'Englishen-US',
ru: 'Русский (ru-RU)',
};
return {
type: 'message',
messageType: 'info',
content: t('UI language changed to {{lang}}', {
lang: langDisplayNames[lang] || lang,
lang: formatUiLanguageDisplay(lang),
}),
};
}
@@ -151,7 +234,9 @@ function generateLlmOutputLanguageRuleFile(
): Promise<MessageActionReturn> {
try {
const filePath = getLlmOutputLanguageRulePath();
const content = generateLlmOutputLanguageRule(language);
// Normalize locale codes (e.g., "ru" -> "Russian") to full language names
const normalizedLanguage = normalizeLanguageName(language);
const content = generateLlmOutputLanguageRule(normalizedLanguage);
// Ensure directory exists
const dir = path.dirname(filePath);
@@ -196,7 +281,6 @@ export const languageCommand: SlashCommand = {
args: string,
): Promise<SlashCommandActionReturn> => {
const { services } = context;
if (!services.config) {
return {
type: 'message',
@@ -207,18 +291,37 @@ export const languageCommand: SlashCommand = {
const trimmedArgs = args.trim();
// Handle subcommands if called directly via action (for tests/backward compatibility)
const parts = trimmedArgs.split(/\s+/);
const firstArg = parts[0].toLowerCase();
const subArgs = parts.slice(1).join(' ');
if (firstArg === 'ui' || firstArg === 'output') {
const subCommand = languageCommand.subCommands?.find(
(s) => s.name === firstArg,
);
if (subCommand?.action) {
return subCommand.action(
context,
subArgs,
) as Promise<SlashCommandActionReturn>;
}
}
// If no arguments, show current language settings and usage
if (!trimmedArgs) {
const currentUiLang = getCurrentLanguage();
const currentLlmLang = getCurrentLlmOutputLanguage();
const message = [
t('Current UI language: {{lang}}', { lang: currentUiLang }),
t('Current UI language: {{lang}}', {
lang: formatUiLanguageDisplay(currentUiLang as SupportedLanguage),
}),
currentLlmLang
? t('Current LLM output language: {{lang}}', { lang: currentLlmLang })
: t('LLM output language not set'),
'',
t('Available subcommands:'),
` /language ui [zh-CN|en-US|ru-RU] - ${t('Set UI language')}`,
` /language ui [${SUPPORTED_LANGUAGES.map((o) => o.id).join('|')}] - ${t('Set UI language')}`,
` /language output <language> - ${t('Set LLM output language')}`,
].join('\n');
@@ -229,115 +332,21 @@ export const languageCommand: SlashCommand = {
};
}
// Parse subcommand
const parts = trimmedArgs.split(/\s+/);
const subcommand = parts[0].toLowerCase();
if (subcommand === 'ui') {
// Handle /language ui [zh-CN|en-US|ru-RU]
if (parts.length === 1) {
// Show UI language subcommand help
return {
type: 'message',
messageType: 'info',
content: [
t('Set UI language'),
'',
t('Usage: /language ui [zh-CN|en-US|ru-RU]'),
'',
t('Available options:'),
t(' - zh-CN: Simplified Chinese'),
t(' - en-US: English'),
t(' - ru-RU: Russian'),
'',
t(
'To request additional UI language packs, please open an issue on GitHub.',
),
].join('\n'),
};
}
const langArg = parts[1].toLowerCase();
let targetLang: SupportedLanguage | null = null;
if (langArg === 'en' || langArg === 'english' || langArg === 'en-us') {
targetLang = 'en';
} else if (
langArg === 'zh' ||
langArg === 'chinese' ||
langArg === '中文' ||
langArg === 'zh-cn'
) {
targetLang = 'zh';
} else if (
langArg === 'ru' ||
langArg === 'ru-RU' ||
langArg === 'russian' ||
langArg === 'русский'
) {
targetLang = 'ru';
} else {
return {
type: 'message',
messageType: 'error',
content: t('Invalid language. Available: en-US, zh-CN, ru-RU'),
};
}
return setUiLanguage(context, targetLang);
} else if (subcommand === 'output') {
// Handle /language output <language>
if (parts.length === 1) {
return {
type: 'message',
messageType: 'info',
content: [
t('Set LLM output language'),
'',
t('Usage: /language output <language>'),
` ${t('Example: /language output 中文')}`,
].join('\n'),
};
}
// Join all parts after "output" as the language name
const language = parts.slice(1).join(' ');
return generateLlmOutputLanguageRuleFile(language);
} else {
// Backward compatibility: treat as UI language
const langArg = trimmedArgs.toLowerCase();
let targetLang: SupportedLanguage | null = null;
if (langArg === 'en' || langArg === 'english' || langArg === 'en-us') {
targetLang = 'en';
} else if (
langArg === 'zh' ||
langArg === 'chinese' ||
langArg === '中文' ||
langArg === 'zh-cn'
) {
targetLang = 'zh';
} else if (
langArg === 'ru' ||
langArg === 'ru-RU' ||
langArg === 'russian' ||
langArg === 'русский'
) {
targetLang = 'ru';
} else {
return {
type: 'message',
messageType: 'error',
content: [
t('Invalid command. Available subcommands:'),
' - /language ui [zh-CN|en-US|ru-RU] - ' + t('Set UI language'),
' - /language output <language> - ' + t('Set LLM output language'),
].join('\n'),
};
}
// Handle backward compatibility for /language [lang]
const targetLang = parseUiLanguageArg(trimmedArgs);
if (targetLang) {
return setUiLanguage(context, targetLang);
}
return {
type: 'message',
messageType: 'error',
content: [
t('Invalid command. Available subcommands:'),
` - /language ui [${SUPPORTED_LANGUAGES.map((o) => o.id).join('|')}] - ${t('Set UI language')}`,
' - /language output <language> - ' + t('Set LLM output language'),
].join('\n'),
};
},
subCommands: [
{
@@ -358,11 +367,14 @@ export const languageCommand: SlashCommand = {
content: [
t('Set UI language'),
'',
t('Usage: /language ui [zh-CN|en-US]'),
t('Usage: /language ui [{{options}}]', {
options: SUPPORTED_LANGUAGES.map((o) => o.id).join('|'),
}),
'',
t('Available options:'),
t(' - zh-CN: Simplified Chinese'),
t(' - en-US: English'),
...SUPPORTED_LANGUAGES.map(
(o) => ` - ${o.id}: ${t(o.fullName)}`,
),
'',
t(
'To request additional UI language packs, please open an issue on GitHub.',
@@ -371,99 +383,20 @@ export const languageCommand: SlashCommand = {
};
}
const langArg = trimmedArgs.toLowerCase();
let targetLang: SupportedLanguage | null = null;
if (langArg === 'en' || langArg === 'english' || langArg === 'en-us') {
targetLang = 'en';
} else if (
langArg === 'zh' ||
langArg === 'chinese' ||
langArg === '中文' ||
langArg === 'zh-cn'
) {
targetLang = 'zh';
} else {
const targetLang = parseUiLanguageArg(trimmedArgs);
if (!targetLang) {
return {
type: 'message',
messageType: 'error',
content: t('Invalid language. Available: en-US, zh-CN'),
content: t('Invalid language. Available: {{options}}', {
options: SUPPORTED_LANGUAGES.map((o) => o.id).join(','),
}),
};
}
return setUiLanguage(context, targetLang);
},
subCommands: [
{
name: 'zh-CN',
altNames: ['zh', 'chinese', '中文'],
get description() {
return t('Set UI language to Simplified Chinese (zh-CN)');
},
kind: CommandKind.BUILT_IN,
action: async (
context: CommandContext,
args: string,
): Promise<MessageActionReturn> => {
if (args.trim().length > 0) {
return {
type: 'message',
messageType: 'error',
content: t(
'Language subcommands do not accept additional arguments.',
),
};
}
return setUiLanguage(context, 'zh');
},
},
{
name: 'en-US',
altNames: ['en', 'english'],
get description() {
return t('Set UI language to English (en-US)');
},
kind: CommandKind.BUILT_IN,
action: async (
context: CommandContext,
args: string,
): Promise<MessageActionReturn> => {
if (args.trim().length > 0) {
return {
type: 'message',
messageType: 'error',
content: t(
'Language subcommands do not accept additional arguments.',
),
};
}
return setUiLanguage(context, 'en');
},
},
{
name: 'ru-RU',
altNames: ['ru', 'russian', 'русский'],
get description() {
return t('Set UI language to Russian (ru-RU)');
},
kind: CommandKind.BUILT_IN,
action: async (
context: CommandContext,
args: string,
): Promise<MessageActionReturn> => {
if (args.trim().length > 0) {
return {
type: 'message',
messageType: 'error',
content: t(
'Language subcommands do not accept additional arguments.',
),
};
}
return setUiLanguage(context, 'ru');
},
},
],
subCommands: SUPPORTED_LANGUAGES.map(createUiLanguageSubCommand),
},
{
name: 'output',
@@ -496,3 +429,28 @@ export const languageCommand: SlashCommand = {
},
],
};
/**
* Helper to create a UI language subcommand.
*/
function createUiLanguageSubCommand(option: LanguageDefinition): SlashCommand {
return {
name: option.id,
get description() {
return t('Set UI language to {{name}}', { name: option.fullName });
},
kind: CommandKind.BUILT_IN,
action: async (context, args) => {
if (args.trim().length > 0) {
return {
type: 'message',
messageType: 'error',
content: t(
'Language subcommands do not accept additional arguments.',
),
};
}
return setUiLanguage(context, option.code);
},
};
}

View File

@@ -6,6 +6,7 @@
import { Box, Text } from 'ink';
import { IdeIntegrationNudge } from '../IdeIntegrationNudge.js';
import { CommandFormatMigrationNudge } from '../CommandFormatMigrationNudge.js';
import { LoopDetectionConfirmation } from './LoopDetectionConfirmation.js';
import { FolderTrustDialog } from './FolderTrustDialog.js';
import { ShellConfirmationDialog } from './ShellConfirmationDialog.js';
@@ -16,7 +17,6 @@ import { QwenOAuthProgress } from './QwenOAuthProgress.js';
import { AuthDialog } from '../auth/AuthDialog.js';
import { OpenAIKeyPrompt } from './OpenAIKeyPrompt.js';
import { EditorSettingsDialog } from './EditorSettingsDialog.js';
import { WorkspaceMigrationDialog } from './WorkspaceMigrationDialog.js';
import { PermissionsModifyTrustDialog } from './PermissionsModifyTrustDialog.js';
import { ModelDialog } from './ModelDialog.js';
import { ApprovalModeDialog } from './ApprovalModeDialog.js';
@@ -77,15 +77,6 @@ export const DialogManager = ({
if (uiState.showIdeRestartPrompt) {
return <IdeTrustChangeDialog reason={uiState.ideTrustRestartReason} />;
}
if (uiState.showWorkspaceMigrationDialog) {
return (
<WorkspaceMigrationDialog
workspaceExtensions={uiState.workspaceExtensions}
onOpen={uiActions.onWorkspaceMigrationDialogOpen}
onClose={uiActions.onWorkspaceMigrationDialogClose}
/>
);
}
if (uiState.shouldShowIdePrompt) {
return (
<IdeIntegrationNudge
@@ -94,6 +85,14 @@ export const DialogManager = ({
/>
);
}
if (uiState.shouldShowCommandMigrationNudge) {
return (
<CommandFormatMigrationNudge
tomlFiles={uiState.commandMigrationTomlFiles}
onComplete={uiActions.handleCommandMigrationComplete}
/>
);
}
if (uiState.isFolderTrustDialogOpen) {
return (
<FolderTrustDialog

View File

@@ -1,119 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { Box, Text } from 'ink';
import {
type Extension,
performWorkspaceExtensionMigration,
} from '../../config/extension.js';
import { RadioButtonSelect } from './shared/RadioButtonSelect.js';
import { theme } from '../semantic-colors.js';
import { useState } from 'react';
import { useKeypress } from '../hooks/useKeypress.js';
export function WorkspaceMigrationDialog(props: {
workspaceExtensions: Extension[];
onOpen: () => void;
onClose: () => void;
}) {
const { workspaceExtensions, onOpen, onClose } = props;
const [migrationComplete, setMigrationComplete] = useState(false);
const [failedExtensions, setFailedExtensions] = useState<string[]>([]);
onOpen();
const onMigrate = async () => {
const failed = await performWorkspaceExtensionMigration(
workspaceExtensions,
// We aren't updating extensions, just moving them around, don't need to ask for consent.
async (_) => true,
);
setFailedExtensions(failed);
setMigrationComplete(true);
};
useKeypress(
(key) => {
if (migrationComplete && key.sequence === 'q') {
process.exit(0);
}
},
{ isActive: true },
);
if (migrationComplete) {
return (
<Box
flexDirection="column"
borderStyle="round"
borderColor={theme.border.default}
padding={1}
>
{failedExtensions.length > 0 ? (
<>
<Text color={theme.text.primary}>
The following extensions failed to migrate. Please try installing
them manually. To see other changes, Qwen Code must be restarted.
Press &apos;q&apos; to quit.
</Text>
<Box flexDirection="column" marginTop={1} marginLeft={2}>
{failedExtensions.map((failed) => (
<Text key={failed}>- {failed}</Text>
))}
</Box>
</>
) : (
<Text color={theme.text.primary}>
Migration complete. To see changes, Qwen Code must be restarted.
Press &apos;q&apos; to quit.
</Text>
)}
</Box>
);
}
return (
<Box
flexDirection="column"
borderStyle="round"
borderColor={theme.border.default}
padding={1}
>
<Text bold color={theme.text.primary}>
Workspace-level extensions are deprecated{'\n'}
</Text>
<Text color={theme.text.primary}>
Would you like to install them at the user level?
</Text>
<Text color={theme.text.primary}>
The extension definition will remain in your workspace directory.
</Text>
<Text color={theme.text.primary}>
If you opt to skip, you can install them manually using the extensions
install command.
</Text>
<Box flexDirection="column" marginTop={1} marginLeft={2}>
{workspaceExtensions.map((extension) => (
<Text key={extension.config.name}>- {extension.config.name}</Text>
))}
</Box>
<Box marginTop={1}>
<RadioButtonSelect
items={[
{ label: 'Install all', value: 'migrate', key: 'migrate' },
{ label: 'Skip', value: 'skip', key: 'skip' },
]}
onSelect={(value: string) => {
if (value === 'migrate') {
onMigrate();
} else {
onClose();
}
}}
/>
</Box>
</Box>
);
}

View File

@@ -218,7 +218,7 @@ export const AgentSelectionStep = ({
const renderAgentItem = (
agent: {
name: string;
level: 'project' | 'user' | 'builtin' | 'session';
level: 'project' | 'user' | 'builtin' | 'session' | 'extension';
isBuiltin?: boolean;
},
index: number,

View File

@@ -18,7 +18,6 @@ const mockUseUIState = vi.mocked(useUIState);
const mockExtensions = [
{ name: 'ext-one', version: '1.0.0', isActive: true },
{ name: 'ext-two', version: '2.1.0', isActive: true },
{ name: 'ext-disabled', version: '3.0.0', isActive: false },
];
describe('<ExtensionsList />', () => {
@@ -29,7 +28,6 @@ describe('<ExtensionsList />', () => {
const mockUIState = (
extensions: unknown[],
extensionsUpdateState: Map<string, ExtensionUpdateState>,
disabledExtensions: string[] = [],
) => {
mockUseUIState.mockReturnValue({
commandContext: createMockCommandContext({
@@ -37,13 +35,6 @@ describe('<ExtensionsList />', () => {
config: {
getExtensions: () => extensions,
},
settings: {
merged: {
extensions: {
disabled: disabledExtensions,
},
},
},
},
}),
extensionsUpdateState,
@@ -58,12 +49,11 @@ describe('<ExtensionsList />', () => {
});
it('should render a list of extensions with their version and status', () => {
mockUIState(mockExtensions, new Map(), ['ext-disabled']);
mockUIState(mockExtensions, new Map());
const { lastFrame } = render(<ExtensionsList />);
const output = lastFrame();
expect(output).toContain('ext-one (v1.0.0) - active');
expect(output).toContain('ext-two (v2.1.0) - active');
expect(output).toContain('ext-disabled (v3.0.0) - disabled');
});
it('should display "unknown state" if an extension has no update state', () => {

View File

@@ -9,12 +9,10 @@ import { useUIState } from '../../contexts/UIStateContext.js';
import { ExtensionUpdateState } from '../../state/extensions.js';
export const ExtensionsList = () => {
const { commandContext, extensionsUpdateState } = useUIState();
const allExtensions = commandContext.services.config!.getExtensions();
const settings = commandContext.services.settings;
const disabledExtensions = settings.merged.extensions?.disabled ?? [];
const { extensionsUpdateState, commandContext } = useUIState();
const extensions = commandContext.services.config?.getExtensions() || [];
if (allExtensions.length === 0) {
if (extensions.length === 0) {
return <Text>No extensions installed.</Text>;
}
@@ -22,10 +20,11 @@ export const ExtensionsList = () => {
<Box flexDirection="column" marginTop={1} marginBottom={1}>
<Text>Installed extensions:</Text>
<Box flexDirection="column" paddingLeft={2}>
{allExtensions.map((ext) => {
{extensions.map((ext) => {
const state = extensionsUpdateState.get(ext.name);
const isActive = !disabledExtensions.includes(ext.name);
const isActive = ext.isActive;
const activeString = isActive ? 'active' : 'disabled';
const activeColor = isActive ? 'green' : 'grey';
let stateColor = 'gray';
const stateText = state || 'unknown state';
@@ -44,6 +43,7 @@ export const ExtensionsList = () => {
break;
case ExtensionUpdateState.UP_TO_DATE:
case ExtensionUpdateState.NOT_UPDATABLE:
case ExtensionUpdateState.UPDATED:
stateColor = 'green';
break;
default:
@@ -52,12 +52,22 @@ export const ExtensionsList = () => {
}
return (
<Box key={ext.name}>
<Box key={ext.name} flexDirection="column" marginBottom={1}>
<Text>
<Text color="cyan">{`${ext.name} (v${ext.version})`}</Text>
{` - ${activeString}`}
<Text color={activeColor}>{` - ${activeString}`}</Text>
{<Text color={stateColor}>{` (${stateText})`}</Text>}
</Text>
{ext.resolvedSettings && ext.resolvedSettings.length > 0 && (
<Box flexDirection="column" paddingLeft={2}>
<Text>settings:</Text>
{ext.resolvedSettings.map((setting) => (
<Text key={setting.name}>
- {setting.name}: {setting.value}
</Text>
))}
</Box>
)}
</Box>
);
})}

View File

@@ -7,6 +7,7 @@
import { createContext, useContext } from 'react';
import { type Key } from '../hooks/useKeypress.js';
import { type IdeIntegrationNudgeResult } from '../IdeIntegrationNudge.js';
import { type CommandMigrationNudgeResult } from '../CommandFormatMigrationNudge.js';
import { type FolderTrustChoice } from '../components/FolderTrustDialog.js';
import {
type AuthType,
@@ -47,14 +48,13 @@ export interface UIActions {
setShellModeActive: (value: boolean) => void;
vimHandleInput: (key: Key) => boolean;
handleIdePromptComplete: (result: IdeIntegrationNudgeResult) => void;
handleCommandMigrationComplete: (result: CommandMigrationNudgeResult) => void;
handleFolderTrustSelect: (choice: FolderTrustChoice) => void;
setConstrainHeight: (value: boolean) => void;
onEscapePromptChange: (show: boolean) => void;
refreshStatic: () => void;
handleFinalSubmit: (value: string) => void;
handleClearScreen: () => void;
onWorkspaceMigrationDialogOpen: () => void;
onWorkspaceMigrationDialogClose: () => void;
// Vision switch dialog
handleVisionSwitchSelect: (outcome: VisionSwitchOutcome) => void;
// Welcome back dialog

View File

@@ -72,6 +72,8 @@ export interface UIState {
suggestionsWidth: number;
isInputActive: boolean;
shouldShowIdePrompt: boolean;
shouldShowCommandMigrationNudge: boolean;
commandMigrationTomlFiles: string[];
isFolderTrustDialogOpen: boolean;
isTrustedFolder: boolean | undefined;
constrainHeight: boolean;
@@ -87,9 +89,6 @@ export interface UIState {
historyRemountKey: number;
messageQueue: string[];
showAutoAcceptIndicator: ApprovalMode;
showWorkspaceMigrationDialog: boolean;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
workspaceExtensions: any[]; // Extension[]
// Quota-related state
currentModel: string;
contextFileNames: string[];

View File

@@ -0,0 +1,51 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { useEffect, useState } from 'react';
import { Storage } from '@qwen-code/qwen-code-core';
import { detectTomlCommands } from '../../services/command-migration-tool.js';
import type { LoadedSettings } from '../../config/settings.js';
/**
* Hook to detect TOML command files and manage migration nudge visibility.
* Checks all command directories: workspace, user, and global levels.
*/
export function useCommandMigration(
settings: LoadedSettings,
storage: Storage,
) {
const [showMigrationNudge, setShowMigrationNudge] = useState(false);
const [tomlFiles, setTomlFiles] = useState<string[]>([]);
useEffect(() => {
const checkTomlCommands = async () => {
const allFiles: string[] = [];
// Check workspace commands directory (.qwen/commands)
const workspaceCommandsDir = storage.getProjectCommandsDir();
const workspaceFiles = await detectTomlCommands(workspaceCommandsDir);
allFiles.push(...workspaceFiles.map((f) => `workspace: ${f}`));
// Check user commands directory (~/.qwen/commands)
const userCommandsDir = Storage.getUserCommandsDir();
const userFiles = await detectTomlCommands(userCommandsDir);
allFiles.push(...userFiles.map((f) => `user: ${f}`));
if (allFiles.length > 0) {
setTomlFiles(allFiles);
setShowMigrationNudge(true);
}
};
checkTomlCommands();
}, [storage]);
return {
showMigrationNudge,
tomlFiles,
setShowMigrationNudge,
};
}

View File

@@ -4,26 +4,21 @@
* SPDX-License-Identifier: Apache-2.0
*/
import { vi } from 'vitest';
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import {
ExtensionStorage,
annotateActiveExtensions,
loadExtension,
} from '../../config/extension.js';
import { createExtension } from '../../test-utils/createExtension.js';
import { useExtensionUpdates } from './useExtensionUpdates.js';
import { QWEN_DIR, type GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import {
QWEN_DIR,
type ExtensionManager,
type Extension,
type ExtensionUpdateInfo,
ExtensionUpdateState,
} from '@qwen-code/qwen-code-core';
import { renderHook, waitFor } from '@testing-library/react';
import { MessageType } from '../types.js';
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
import {
checkForAllExtensionUpdates,
updateExtension,
} from '../../config/extensions/update.js';
import { ExtensionUpdateState } from '../state/extensions.js';
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
@@ -33,63 +28,85 @@ vi.mock('os', async (importOriginal) => {
};
});
vi.mock('../../config/extensions/update.js', () => ({
checkForAllExtensionUpdates: vi.fn(),
updateExtension: vi.fn(),
}));
function createMockExtension(overrides: Partial<Extension> = {}): Extension {
return {
id: 'test-extension-id',
name: 'test-extension',
version: '1.0.0',
path: '/some/path',
isActive: true,
config: {
name: 'test-extension',
version: '1.0.0',
},
contextFiles: [],
installMetadata: {
type: 'git',
source: 'https://some/repo',
autoUpdate: false,
},
...overrides,
};
}
function createMockExtensionManager(
extensions: Extension[],
checkCallback?: (
callback: (extensionName: string, state: ExtensionUpdateState) => void,
) => Promise<void>,
updateResult?: ExtensionUpdateInfo | undefined,
): ExtensionManager {
return {
getLoadedExtensions: vi.fn(() => extensions),
checkForAllExtensionUpdates: vi.fn(
async (
callback: (extensionName: string, state: ExtensionUpdateState) => void,
) => {
if (checkCallback) {
await checkCallback(callback);
}
},
),
updateExtension: vi.fn(async () => updateResult),
} as unknown as ExtensionManager;
}
describe('useExtensionUpdates', () => {
let tempHomeDir: string;
let userExtensionsDir: string;
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
);
tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'qwen-cli-test-home-'));
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
fs.mkdirSync(userExtensionsDir, { recursive: true });
vi.mocked(checkForAllExtensionUpdates).mockReset();
vi.mocked(updateExtension).mockReset();
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
vi.clearAllMocks();
});
it('should check for updates and log a message if an update is available', async () => {
const extensions = [
{
name: 'test-extension',
const extension = createMockExtension({
name: 'test-extension',
installMetadata: {
type: 'git',
version: '1.0.0',
path: '/some/path',
isActive: true,
installMetadata: {
type: 'git',
source: 'https://some/repo',
autoUpdate: false,
},
source: 'https://some/repo',
autoUpdate: false,
},
];
});
const addItem = vi.fn();
const cwd = '/test/cwd';
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
const extensionManager = createMockExtensionManager(
[extension],
async (callback) => {
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
},
);
renderHook(() =>
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
);
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
await waitFor(() => {
expect(addItem).toHaveBeenCalledWith(
@@ -103,43 +120,32 @@ describe('useExtensionUpdates', () => {
});
it('should check for updates and automatically update if autoUpdate is true', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
const extension = createMockExtension({
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
source: 'https://some.git/repo',
autoUpdate: true,
},
});
const extension = annotateActiveExtensions(
[loadExtension({ extensionDir, workspaceDir: tempHomeDir })!],
tempHomeDir,
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const addItem = vi.fn();
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
const extensionManager = createMockExtensionManager(
[extension],
async (callback) => {
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
},
{
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: 'test-extension',
},
);
vi.mocked(updateExtension).mockResolvedValue({
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: '',
});
renderHook(() => useExtensionUpdates([extension], addItem, tempHomeDir));
renderHook(() =>
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
);
await waitFor(
() => {
@@ -156,77 +162,64 @@ describe('useExtensionUpdates', () => {
});
it('should batch update notifications for multiple extensions', async () => {
const extensionDir1 = createExtension({
extensionsDir: userExtensionsDir,
const extension1 = createMockExtension({
id: 'test-extension-1-id',
name: 'test-extension-1',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo1',
type: 'git',
source: 'https://some.git/repo1',
autoUpdate: true,
},
});
const extensionDir2 = createExtension({
extensionsDir: userExtensionsDir,
const extension2 = createMockExtension({
id: 'test-extension-2-id',
name: 'test-extension-2',
version: '2.0.0',
installMetadata: {
source: 'https://some.git/repo2',
type: 'git',
source: 'https://some.git/repo2',
autoUpdate: true,
},
});
const extensions = annotateActiveExtensions(
[
loadExtension({
extensionDir: extensionDir1,
workspaceDir: tempHomeDir,
})!,
loadExtension({
extensionDir: extensionDir2,
workspaceDir: tempHomeDir,
})!,
],
tempHomeDir,
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
);
const addItem = vi.fn();
let updateCallCount = 0;
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({
type: 'SET_STATE',
payload: {
const extensionManager = {
getLoadedExtensions: vi.fn(() => [extension1, extension2]),
checkForAllExtensionUpdates: vi.fn(
async (
callback: (
extensionName: string,
state: ExtensionUpdateState,
) => void,
) => {
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
},
),
updateExtension: vi.fn(async () => {
updateCallCount++;
if (updateCallCount === 1) {
return {
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: 'test-extension-1',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension-2',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
},
};
}
return {
originalVersion: '2.0.0',
updatedVersion: '2.1.0',
name: 'test-extension-2',
};
}),
} as unknown as ExtensionManager;
renderHook(() =>
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
);
vi.mocked(updateExtension)
.mockResolvedValueOnce({
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: '',
})
.mockResolvedValueOnce({
originalVersion: '2.0.0',
updatedVersion: '2.1.0',
name: '',
});
renderHook(() => useExtensionUpdates(extensions, addItem, tempHomeDir));
await waitFor(
() => {
expect(addItem).toHaveBeenCalledTimes(2);
@@ -250,60 +243,40 @@ describe('useExtensionUpdates', () => {
});
it('should batch update notifications for multiple extensions with autoUpdate: false', async () => {
const extensions = [
{
name: 'test-extension-1',
const extension1 = createMockExtension({
id: 'test-extension-1-id',
name: 'test-extension-1',
version: '1.0.0',
installMetadata: {
type: 'git',
version: '1.0.0',
path: '/some/path1',
isActive: true,
installMetadata: {
type: 'git',
source: 'https://some/repo1',
autoUpdate: false,
},
source: 'https://some/repo1',
autoUpdate: false,
},
{
name: 'test-extension-2',
});
const extension2 = createMockExtension({
id: 'test-extension-2-id',
name: 'test-extension-2',
version: '2.0.0',
installMetadata: {
type: 'git',
version: '2.0.0',
path: '/some/path2',
isActive: true,
installMetadata: {
type: 'git',
source: 'https://some/repo2',
autoUpdate: false,
},
source: 'https://some/repo2',
autoUpdate: false,
},
];
});
const addItem = vi.fn();
const cwd = '/test/cwd';
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({ type: 'BATCH_CHECK_START' });
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension-1',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
const extensionManager = createMockExtensionManager(
[extension1, extension2],
async (callback) => {
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
await new Promise((r) => setTimeout(r, 50));
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension-2',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
dispatch({ type: 'BATCH_CHECK_END' });
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
},
);
renderHook(() =>
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
);
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
await waitFor(() => {
expect(addItem).toHaveBeenCalledTimes(1);

View File

@@ -4,7 +4,7 @@
* SPDX-License-Identifier: Apache-2.0
*/
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import type { ExtensionManager } from '@qwen-code/qwen-code-core';
import { getErrorMessage } from '../../utils/errors.js';
import {
ExtensionUpdateState,
@@ -14,11 +14,6 @@ import {
import { useCallback, useEffect, useMemo, useReducer } from 'react';
import type { UseHistoryManagerReturn } from './useHistoryManager.js';
import { MessageType, type ConfirmationRequest } from '../types.js';
import {
checkForAllExtensionUpdates,
updateExtension,
} from '../../config/extensions/update.js';
import { requestConsentInteractive } from '../../config/extension.js';
import { checkExhaustive } from '../../utils/checks.js';
type ConfirmationRequestWrapper = {
@@ -45,15 +40,7 @@ function confirmationRequestsReducer(
}
}
export const useExtensionUpdates = (
extensions: GeminiCLIExtension[],
addItem: UseHistoryManagerReturn['addItem'],
cwd: string,
) => {
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
extensionUpdatesReducer,
initialExtensionUpdatesState,
);
export const useConfirmUpdateRequests = () => {
const [
confirmUpdateExtensionRequests,
dispatchConfirmUpdateExtensionRequests,
@@ -78,15 +65,52 @@ export const useExtensionUpdates = (
},
[dispatchConfirmUpdateExtensionRequests],
);
return {
addConfirmUpdateExtensionRequest,
confirmUpdateExtensionRequests,
dispatchConfirmUpdateExtensionRequests,
};
};
export const useExtensionUpdates = (
extensionManager: ExtensionManager,
addItem: UseHistoryManagerReturn['addItem'],
cwd: string,
) => {
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
extensionUpdatesReducer,
initialExtensionUpdatesState,
);
const extensions = extensionManager.getLoadedExtensions();
useEffect(() => {
(async () => {
await checkForAllExtensionUpdates(
extensions,
dispatchExtensionStateUpdate,
const extensionsToCheck = extensions.filter((extension) => {
const currentStatus = extensionsUpdateState.extensionStatuses.get(
extension.name,
);
if (!currentStatus) return true;
const currentState = currentStatus.status;
return !currentState || currentState === ExtensionUpdateState.UNKNOWN;
});
if (extensionsToCheck.length === 0) return;
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
await extensionManager.checkForAllExtensionUpdates(
(extensionName: string, state: ExtensionUpdateState) => {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
});
},
);
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
})();
}, [extensions, extensions.length, dispatchExtensionStateUpdate]);
}, [
extensions,
extensionManager,
extensionsUpdateState.extensionStatuses,
dispatchExtensionStateUpdate,
]);
useEffect(() => {
if (extensionsUpdateState.batchChecksInProgress > 0) {
@@ -113,17 +137,17 @@ export const useExtensionUpdates = (
});
if (extension.installMetadata?.autoUpdate) {
updateExtension(
extension,
cwd,
(description) =>
requestConsentInteractive(
description,
addConfirmUpdateExtensionRequest,
),
currentState.status,
dispatchExtensionStateUpdate,
)
extensionManager
.updateExtension(
extension,
currentState.status,
(extensionName, state) => {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
});
},
)
.then((result) => {
if (!result) return;
addItem(
@@ -157,13 +181,7 @@ export const useExtensionUpdates = (
Date.now(),
);
}
}, [
extensions,
extensionsUpdateState,
addConfirmUpdateExtensionRequest,
addItem,
cwd,
]);
}, [extensions, extensionManager, extensionsUpdateState, addItem, cwd]);
const extensionsUpdateStateComputed = useMemo(() => {
const result = new Map<string, ExtensionUpdateState>();
@@ -180,7 +198,5 @@ export const useExtensionUpdates = (
extensionsUpdateState: extensionsUpdateStateComputed,
extensionsUpdateStateInternal: extensionsUpdateState.extensionStatuses,
dispatchExtensionStateUpdate,
confirmUpdateExtensionRequests,
addConfirmUpdateExtensionRequest,
};
};

View File

@@ -8,19 +8,22 @@ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { renderHook, act } from '@testing-library/react';
import { useLoadingIndicator } from './useLoadingIndicator.js';
import { StreamingState } from '../types.js';
import {
WITTY_LOADING_PHRASES,
PHRASE_CHANGE_INTERVAL_MS,
} from './usePhraseCycler.js';
import { PHRASE_CHANGE_INTERVAL_MS } from './usePhraseCycler.js';
import * as i18n from '../../i18n/index.js';
const MOCK_WITTY_PHRASES = ['Phrase 1', 'Phrase 2', 'Phrase 3'];
describe('useLoadingIndicator', () => {
beforeEach(() => {
vi.useFakeTimers();
vi.spyOn(i18n, 'ta').mockReturnValue(MOCK_WITTY_PHRASES);
vi.spyOn(i18n, 't').mockImplementation((key) => key);
});
afterEach(() => {
vi.useRealTimers(); // Restore real timers after each test
act(() => vi.runOnlyPendingTimers);
vi.restoreAllMocks();
});
it('should initialize with default values when Idle', () => {
@@ -28,9 +31,7 @@ describe('useLoadingIndicator', () => {
useLoadingIndicator(StreamingState.Idle),
);
expect(result.current.elapsedTime).toBe(0);
expect(WITTY_LOADING_PHRASES).toContain(
result.current.currentLoadingPhrase,
);
expect(MOCK_WITTY_PHRASES).toContain(result.current.currentLoadingPhrase);
});
it('should reflect values when Responding', async () => {
@@ -40,18 +41,14 @@ describe('useLoadingIndicator', () => {
// Initial state before timers advance
expect(result.current.elapsedTime).toBe(0);
expect(WITTY_LOADING_PHRASES).toContain(
result.current.currentLoadingPhrase,
);
expect(MOCK_WITTY_PHRASES).toContain(result.current.currentLoadingPhrase);
await act(async () => {
await vi.advanceTimersByTimeAsync(PHRASE_CHANGE_INTERVAL_MS + 1);
});
// Phrase should cycle if PHRASE_CHANGE_INTERVAL_MS has passed
expect(WITTY_LOADING_PHRASES).toContain(
result.current.currentLoadingPhrase,
);
expect(MOCK_WITTY_PHRASES).toContain(result.current.currentLoadingPhrase);
});
it('should show waiting phrase and retain elapsedTime when WaitingForConfirmation', async () => {
@@ -104,9 +101,7 @@ describe('useLoadingIndicator', () => {
rerender({ streamingState: StreamingState.Responding });
});
expect(result.current.elapsedTime).toBe(0); // Should reset
expect(WITTY_LOADING_PHRASES).toContain(
result.current.currentLoadingPhrase,
);
expect(MOCK_WITTY_PHRASES).toContain(result.current.currentLoadingPhrase);
await act(async () => {
await vi.advanceTimersByTimeAsync(1000);
@@ -130,9 +125,7 @@ describe('useLoadingIndicator', () => {
});
expect(result.current.elapsedTime).toBe(0);
expect(WITTY_LOADING_PHRASES).toContain(
result.current.currentLoadingPhrase,
);
expect(MOCK_WITTY_PHRASES).toContain(result.current.currentLoadingPhrase);
// Timer should not advance
await act(async () => {

View File

@@ -8,13 +8,17 @@ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { renderHook, act } from '@testing-library/react';
import {
usePhraseCycler,
WITTY_LOADING_PHRASES,
PHRASE_CHANGE_INTERVAL_MS,
} from './usePhraseCycler.js';
import * as i18n from '../../i18n/index.js';
const MOCK_WITTY_PHRASES = ['Phrase 1', 'Phrase 2', 'Phrase 3'];
describe('usePhraseCycler', () => {
beforeEach(() => {
vi.useFakeTimers();
vi.spyOn(i18n, 'ta').mockReturnValue(MOCK_WITTY_PHRASES);
vi.spyOn(i18n, 't').mockImplementation((key) => key);
});
afterEach(() => {
@@ -23,7 +27,7 @@ describe('usePhraseCycler', () => {
it('should initialize with a witty phrase when not active and not waiting', () => {
const { result } = renderHook(() => usePhraseCycler(false, false));
expect(WITTY_LOADING_PHRASES).toContain(result.current);
expect(MOCK_WITTY_PHRASES).toContain(result.current);
});
it('should show "Waiting for user confirmation..." when isWaiting is true', () => {
@@ -47,35 +51,30 @@ describe('usePhraseCycler', () => {
it('should cycle through witty phrases when isActive is true and not waiting', () => {
const { result } = renderHook(() => usePhraseCycler(true, false));
// Initial phrase should be one of the witty phrases
expect(WITTY_LOADING_PHRASES).toContain(result.current);
expect(MOCK_WITTY_PHRASES).toContain(result.current);
const _initialPhrase = result.current;
act(() => {
vi.advanceTimersByTime(PHRASE_CHANGE_INTERVAL_MS);
});
// Phrase should change and be one of the witty phrases
expect(WITTY_LOADING_PHRASES).toContain(result.current);
expect(MOCK_WITTY_PHRASES).toContain(result.current);
const _secondPhrase = result.current;
act(() => {
vi.advanceTimersByTime(PHRASE_CHANGE_INTERVAL_MS);
});
expect(WITTY_LOADING_PHRASES).toContain(result.current);
expect(MOCK_WITTY_PHRASES).toContain(result.current);
});
it('should reset to a witty phrase when isActive becomes true after being false (and not waiting)', () => {
// Ensure there are at least two phrases for this test to be meaningful.
if (WITTY_LOADING_PHRASES.length < 2) {
return;
}
// Mock Math.random to make the test deterministic.
let callCount = 0;
vi.spyOn(Math, 'random').mockImplementation(() => {
// Cycle through 0, 1, 0, 1, ...
const val = callCount % 2;
callCount++;
return val / WITTY_LOADING_PHRASES.length;
return val / MOCK_WITTY_PHRASES.length;
});
const { result, rerender } = renderHook(
@@ -86,9 +85,9 @@ describe('usePhraseCycler', () => {
// Activate
rerender({ isActive: true, isWaiting: false });
const firstActivePhrase = result.current;
expect(WITTY_LOADING_PHRASES).toContain(firstActivePhrase);
expect(MOCK_WITTY_PHRASES).toContain(firstActivePhrase);
// With our mock, this should be the first phrase.
expect(firstActivePhrase).toBe(WITTY_LOADING_PHRASES[0]);
expect(firstActivePhrase).toBe(MOCK_WITTY_PHRASES[0]);
act(() => {
vi.advanceTimersByTime(PHRASE_CHANGE_INTERVAL_MS);
@@ -96,18 +95,18 @@ describe('usePhraseCycler', () => {
// Phrase should change to the second phrase.
expect(result.current).not.toBe(firstActivePhrase);
expect(result.current).toBe(WITTY_LOADING_PHRASES[1]);
expect(result.current).toBe(MOCK_WITTY_PHRASES[1]);
// Set to inactive - should reset to the default initial phrase
rerender({ isActive: false, isWaiting: false });
expect(WITTY_LOADING_PHRASES).toContain(result.current);
expect(MOCK_WITTY_PHRASES).toContain(result.current);
// Set back to active - should pick a random witty phrase (which our mock controls)
act(() => {
rerender({ isActive: true, isWaiting: false });
});
// The random mock will now return 0, so it should be the first phrase again.
expect(result.current).toBe(WITTY_LOADING_PHRASES[0]);
expect(result.current).toBe(MOCK_WITTY_PHRASES[0]);
});
it('should clear phrase interval on unmount when active', () => {
@@ -148,7 +147,7 @@ describe('usePhraseCycler', () => {
rerender({ isActive: true, isWaiting: false, customPhrases: undefined });
expect(WITTY_LOADING_PHRASES).toContain(result.current);
expect(MOCK_WITTY_PHRASES).toContain(result.current);
});
it('should fall back to witty phrases if custom phrases are an empty array', () => {
@@ -164,7 +163,7 @@ describe('usePhraseCycler', () => {
},
);
expect(WITTY_LOADING_PHRASES).toContain(result.current);
expect(MOCK_WITTY_PHRASES).toContain(result.current);
});
it('should reset to a witty phrase when transitioning from waiting to active', () => {
@@ -174,16 +173,13 @@ describe('usePhraseCycler', () => {
);
const _initialPhrase = result.current;
expect(WITTY_LOADING_PHRASES).toContain(_initialPhrase);
expect(MOCK_WITTY_PHRASES).toContain(_initialPhrase);
// Cycle to a different phrase (potentially)
act(() => {
vi.advanceTimersByTime(PHRASE_CHANGE_INTERVAL_MS);
});
if (WITTY_LOADING_PHRASES.length > 1) {
// This check is probabilistic with random selection
}
expect(WITTY_LOADING_PHRASES).toContain(result.current);
expect(MOCK_WITTY_PHRASES).toContain(result.current);
// Go to waiting state
rerender({ isActive: false, isWaiting: true });
@@ -191,6 +187,6 @@ describe('usePhraseCycler', () => {
// Go back to active cycling - should pick a random witty phrase
rerender({ isActive: true, isWaiting: false });
expect(WITTY_LOADING_PHRASES).toContain(result.current);
expect(MOCK_WITTY_PHRASES).toContain(result.current);
});
});

View File

@@ -5,139 +5,9 @@
*/
import { useState, useEffect, useRef, useMemo } from 'react';
import { t } from '../../i18n/index.js';
import { t, ta } from '../../i18n/index.js';
export const WITTY_LOADING_PHRASES = [
"I'm Feeling Lucky",
'Shipping awesomeness... ',
'Painting the serifs back on...',
'Navigating the slime mold...',
'Consulting the digital spirits...',
'Reticulating splines...',
'Warming up the AI hamsters...',
'Asking the magic conch shell...',
'Generating witty retort...',
'Polishing the algorithms...',
"Don't rush perfection (or my code)...",
'Brewing fresh bytes...',
'Counting electrons...',
'Engaging cognitive processors...',
'Checking for syntax errors in the universe...',
'One moment, optimizing humor...',
'Shuffling punchlines...',
'Untangling neural nets...',
'Compiling brilliance...',
'Loading wit.exe...',
'Summoning the cloud of wisdom...',
'Preparing a witty response...',
"Just a sec, I'm debugging reality...",
'Confuzzling the options...',
'Tuning the cosmic frequencies...',
'Crafting a response worthy of your patience...',
'Compiling the 1s and 0s...',
'Resolving dependencies... and existential crises...',
'Defragmenting memories... both RAM and personal...',
'Rebooting the humor module...',
'Caching the essentials (mostly cat memes)...',
'Optimizing for ludicrous speed',
"Swapping bits... don't tell the bytes...",
'Garbage collecting... be right back...',
'Assembling the interwebs...',
'Converting coffee into code...',
'Updating the syntax for reality...',
'Rewiring the synapses...',
'Looking for a misplaced semicolon...',
"Greasin' the cogs of the machine...",
'Pre-heating the servers...',
'Calibrating the flux capacitor...',
'Engaging the improbability drive...',
'Channeling the Force...',
'Aligning the stars for optimal response...',
'So say we all...',
'Loading the next great idea...',
"Just a moment, I'm in the zone...",
'Preparing to dazzle you with brilliance...',
"Just a tick, I'm polishing my wit...",
"Hold tight, I'm crafting a masterpiece...",
"Just a jiffy, I'm debugging the universe...",
"Just a moment, I'm aligning the pixels...",
"Just a sec, I'm optimizing the humor...",
"Just a moment, I'm tuning the algorithms...",
'Warp speed engaged...',
'Mining for more Dilithium crystals...',
"Don't panic...",
'Following the white rabbit...',
'The truth is in here... somewhere...',
'Blowing on the cartridge...',
'Loading... Do a barrel roll!',
'Waiting for the respawn...',
'Finishing the Kessel Run in less than 12 parsecs...',
"The cake is not a lie, it's just still loading...",
'Fiddling with the character creation screen...',
"Just a moment, I'm finding the right meme...",
"Pressing 'A' to continue...",
'Herding digital cats...',
'Polishing the pixels...',
'Finding a suitable loading screen pun...',
'Distracting you with this witty phrase...',
'Almost there... probably...',
'Our hamsters are working as fast as they can...',
'Giving Cloudy a pat on the head...',
'Petting the cat...',
'Rickrolling my boss...',
'Never gonna give you up, never gonna let you down...',
'Slapping the bass...',
'Tasting the snozberries...',
"I'm going the distance, I'm going for speed...",
'Is this the real life? Is this just fantasy?...',
"I've got a good feeling about this...",
'Poking the bear...',
'Doing research on the latest memes...',
'Figuring out how to make this more witty...',
'Hmmm... let me think...',
'What do you call a fish with no eyes? A fsh...',
'Why did the computer go to therapy? It had too many bytes...',
"Why don't programmers like nature? It has too many bugs...",
'Why do programmers prefer dark mode? Because light attracts bugs...',
'Why did the developer go broke? Because they used up all their cache...',
"What can you do with a broken pencil? Nothing, it's pointless...",
'Applying percussive maintenance...',
'Searching for the correct USB orientation...',
'Ensuring the magic smoke stays inside the wires...',
'Rewriting in Rust for no particular reason...',
'Trying to exit Vim...',
'Spinning up the hamster wheel...',
"That's not a bug, it's an undocumented feature...",
'Engage.',
"I'll be back... with an answer.",
'My other process is a TARDIS...',
'Communing with the machine spirit...',
'Letting the thoughts marinate...',
'Just remembered where I put my keys...',
'Pondering the orb...',
"I've seen things you people wouldn't believe... like a user who reads loading messages.",
'Initiating thoughtful gaze...',
"What's a computer's favorite snack? Microchips.",
"Why do Java developers wear glasses? Because they don't C#.",
'Charging the laser... pew pew!',
'Dividing by zero... just kidding!',
'Looking for an adult superviso... I mean, processing.',
'Making it go beep boop.',
'Buffering... because even AIs need a moment.',
'Entangling quantum particles for a faster response...',
'Polishing the chrome... on the algorithms.',
'Are you not entertained? (Working on it!)',
'Summoning the code gremlins... to help, of course.',
'Just waiting for the dial-up tone to finish...',
'Recalibrating the humor-o-meter.',
'My other loading screen is even funnier.',
"Pretty sure there's a cat walking on the keyboard somewhere...",
'Enhancing... Enhancing... Still loading.',
"It's not a bug, it's a feature... of this loading screen.",
'Have you tried turning it off and on again? (The loading screen, not me.)',
'Constructing additional pylons...',
'New line? Thats Ctrl+J.',
];
export const WITTY_LOADING_PHRASES: string[] = ["I'm Feeling Lucky"];
export const PHRASE_CHANGE_INTERVAL_MS = 15000;
@@ -152,14 +22,16 @@ export const usePhraseCycler = (
isWaiting: boolean,
customPhrases?: string[],
) => {
// Translate all phrases at once if using default phrases
const loadingPhrases = useMemo(
() =>
customPhrases && customPhrases.length > 0
? customPhrases
: WITTY_LOADING_PHRASES.map((phrase) => t(phrase)),
[customPhrases],
);
// Get phrases from translations if available
const loadingPhrases = useMemo(() => {
if (customPhrases && customPhrases.length > 0) {
return customPhrases;
}
const translatedPhrases = ta('WITTY_LOADING_PHRASES');
return translatedPhrases.length > 0
? translatedPhrases
: WITTY_LOADING_PHRASES;
}, [customPhrases]);
const [currentLoadingPhrase, setCurrentLoadingPhrase] = useState(
loadingPhrases[0],

View File

@@ -1,70 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { useState, useEffect } from 'react';
import {
type Extension,
getWorkspaceExtensions,
} from '../../config/extension.js';
import { type LoadedSettings, SettingScope } from '../../config/settings.js';
import process from 'node:process';
export function useWorkspaceMigration(settings: LoadedSettings) {
const [showWorkspaceMigrationDialog, setShowWorkspaceMigrationDialog] =
useState(false);
const [workspaceExtensions, setWorkspaceExtensions] = useState<Extension[]>(
[],
);
useEffect(() => {
// Default to true if not set.
if (!(settings.merged.experimental?.extensionManagement ?? true)) {
return;
}
const cwd = process.cwd();
const extensions = getWorkspaceExtensions(cwd);
if (
extensions.length > 0 &&
!settings.merged.extensions?.workspacesWithMigrationNudge?.includes(cwd)
) {
setWorkspaceExtensions(extensions);
setShowWorkspaceMigrationDialog(true);
console.log(settings.merged.extensions);
}
}, [
settings.merged.extensions,
settings.merged.experimental?.extensionManagement,
]);
const onWorkspaceMigrationDialogOpen = () => {
const userSettings = settings.forScope(SettingScope.User);
const extensionSettings = userSettings.settings.extensions || {
disabled: [],
};
const workspacesWithMigrationNudge =
extensionSettings.workspacesWithMigrationNudge || [];
const cwd = process.cwd();
if (!workspacesWithMigrationNudge.includes(cwd)) {
workspacesWithMigrationNudge.push(cwd);
}
extensionSettings.workspacesWithMigrationNudge =
workspacesWithMigrationNudge;
settings.setValue(SettingScope.User, 'extensions', extensionSettings);
};
const onWorkspaceMigrationDialogClose = () => {
setShowWorkspaceMigrationDialog(false);
};
return {
showWorkspaceMigrationDialog,
workspaceExtensions,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
};
}

View File

@@ -10,6 +10,7 @@ export enum ExtensionUpdateState {
CHECKING_FOR_UPDATES = 'checking for updates',
UPDATED_NEEDS_RESTART = 'updated, needs restart',
UPDATING = 'updating',
UPDATED = 'updated',
UPDATE_AVAILABLE = 'update available',
UP_TO_DATE = 'up to date',
ERROR = 'error',

View File

@@ -17,10 +17,16 @@
* resolveEnvVarsInString("URL: ${BASE_URL}/api") // Returns "URL: https://api.example.com/api"
* resolveEnvVarsInString("Missing: $UNDEFINED_VAR") // Returns "Missing: $UNDEFINED_VAR"
*/
export function resolveEnvVarsInString(value: string): string {
export function resolveEnvVarsInString(
value: string,
customEnv?: Record<string, string>,
): string {
const envVarRegex = /\$(?:(\w+)|{([^}]+)})/g; // Find $VAR_NAME or ${VAR_NAME}
return value.replace(envVarRegex, (match, varName1, varName2) => {
const varName = varName1 || varName2;
if (customEnv && typeof customEnv[varName] === 'string') {
return customEnv[varName];
}
if (process && process.env && typeof process.env[varName] === 'string') {
return process.env[varName]!;
}
@@ -47,8 +53,11 @@ export function resolveEnvVarsInString(value: string): string {
* };
* const resolved = resolveEnvVarsInObject(config);
*/
export function resolveEnvVarsInObject<T>(obj: T): T {
return resolveEnvVarsInObjectInternal(obj, new WeakSet());
export function resolveEnvVarsInObject<T>(
obj: T,
customEnv?: Record<string, string>,
): T {
return resolveEnvVarsInObjectInternal(obj, new WeakSet(), customEnv);
}
/**
@@ -61,6 +70,7 @@ export function resolveEnvVarsInObject<T>(obj: T): T {
function resolveEnvVarsInObjectInternal<T>(
obj: T,
visited: WeakSet<object>,
customEnv?: Record<string, string>,
): T {
if (
obj === null ||
@@ -72,7 +82,7 @@ function resolveEnvVarsInObjectInternal<T>(
}
if (typeof obj === 'string') {
return resolveEnvVarsInString(obj) as unknown as T;
return resolveEnvVarsInString(obj, customEnv) as unknown as T;
}
if (Array.isArray(obj)) {
@@ -84,7 +94,7 @@ function resolveEnvVarsInObjectInternal<T>(
visited.add(obj);
const result = obj.map((item) =>
resolveEnvVarsInObjectInternal(item, visited),
resolveEnvVarsInObjectInternal(item, visited, customEnv),
) as unknown as T;
visited.delete(obj);
return result;
@@ -101,7 +111,11 @@ function resolveEnvVarsInObjectInternal<T>(
const newObj = { ...obj } as T;
for (const key in newObj) {
if (Object.prototype.hasOwnProperty.call(newObj, key)) {
newObj[key] = resolveEnvVarsInObjectInternal(newObj[key], visited);
newObj[key] = resolveEnvVarsInObjectInternal(
newObj[key],
visited,
customEnv,
);
}
}
visited.delete(obj as object);

View File

@@ -11,7 +11,9 @@
"src/**/*.ts",
"src/**/*.tsx",
"src/**/*.json",
"./package.json"
"./package.json",
"../core/src/utils/toml-to-markdown-converter.test.ts",
"../core/src/utils/toml-to-markdown-converter.ts"
],
"exclude": [
"node_modules",

View File

@@ -80,6 +80,10 @@ import {
type TelemetryTarget,
uiTelemetryService,
} from '../telemetry/index.js';
import {
ExtensionManager,
type Extension,
} from '../extension/extensionManager.js';
// Utils
import { shouldAttemptBrowserLaunch } from '../utils/browser.js';
@@ -102,6 +106,7 @@ import {
type ResumedSessionData,
} from '../services/sessionService.js';
import { randomUUID } from 'node:crypto';
import { loadServerHierarchicalMemory } from '../utils/memoryDiscovery.js';
// Re-export types
export type { AnyToolInvocation, FileFilteringOptions, MCPOAuthConfig };
@@ -192,20 +197,17 @@ export interface GitCoAuthorSettings {
email?: string;
}
export interface GeminiCLIExtension {
name: string;
version: string;
isActive: boolean;
path: string;
installMetadata?: ExtensionInstallMetadata;
}
export interface ExtensionInstallMetadata {
source: string;
type: 'git' | 'local' | 'link' | 'github-release';
type: 'git' | 'local' | 'link' | 'github-release' | 'marketplace';
releaseTag?: string; // Only present for github-release installs.
ref?: string;
autoUpdate?: boolean;
allowPreRelease?: boolean;
marketplace?: {
marketplaceSource: string;
pluginName: string;
};
}
export const DEFAULT_TRUNCATE_TOOL_OUTPUT_THRESHOLD = 25_000;
@@ -303,14 +305,15 @@ export interface ConfigParameters {
includeDirectories?: string[];
bugCommand?: BugCommandSettings;
model?: string;
extensionContextFilePaths?: string[];
outputLanguageFilePath?: string;
maxSessionTurns?: number;
sessionTokenLimit?: number;
experimentalSkills?: boolean;
experimentalZedIntegration?: boolean;
listExtensions?: boolean;
extensions?: GeminiCLIExtension[];
blockedMcpServers?: Array<{ name: string; extensionName: string }>;
overrideExtensions?: string[];
allowedMcpServers?: string[];
excludedMcpServers?: string[];
noBrowser?: boolean;
summarizeToolOutput?: Record<string, SummarizeToolOutputSettings>;
folderTrustFeature?: boolean;
@@ -320,6 +323,8 @@ export interface ConfigParameters {
generationConfig?: Partial<ContentGeneratorConfig>;
cliVersion?: string;
loadMemoryFromIncludeDirectories?: boolean;
importFormat?: 'tree' | 'flat';
discoveryMaxDirs?: number;
chatRecording?: boolean;
// Web search providers
webSearch?: {
@@ -338,7 +343,6 @@ export interface ConfigParameters {
shouldUseNodePtyShell?: boolean;
skipNextSpeakerCheck?: boolean;
shellExecutionConfig?: ShellExecutionConfig;
extensionManagement?: boolean;
skipLoopDetection?: boolean;
vlmSwitchMode?: string;
truncateToolOutputThreshold?: number;
@@ -391,6 +395,7 @@ export class Config {
private toolRegistry!: ToolRegistry;
private promptRegistry!: PromptRegistry;
private subagentManager!: SubagentManager;
private extensionManager!: ExtensionManager;
private skillManager!: SkillManager;
private fileSystemService: FileSystemService;
private contentGeneratorConfig!: ContentGeneratorConfig;
@@ -413,6 +418,8 @@ export class Config {
private readonly toolCallCommand: string | undefined;
private readonly mcpServerCommand: string | undefined;
private mcpServers: Record<string, MCPServerConfig> | undefined;
private readonly allowedMcpServers?: string[];
private readonly excludedMcpServers?: string[];
private sessionSubagents: SubagentConfig[];
private userMemory: string;
private sdkMode: boolean;
@@ -435,11 +442,12 @@ export class Config {
private gitService: GitService | undefined = undefined;
private sessionService: SessionService | undefined = undefined;
private chatRecordingService: ChatRecordingService | undefined = undefined;
private extensionContextFilePaths: string[] = [];
private readonly checkpointing: boolean;
private readonly proxy: string | undefined;
private readonly cwd: string;
private readonly bugCommand: BugCommandSettings | undefined;
private readonly extensionContextFilePaths: string[];
private readonly outputLanguageFilePath?: string;
private readonly noBrowser: boolean;
private readonly folderTrustFeature: boolean;
private readonly folderTrust: boolean;
@@ -449,11 +457,8 @@ export class Config {
private readonly maxSessionTurns: number;
private readonly sessionTokenLimit: number;
private readonly listExtensions: boolean;
private readonly _extensions: GeminiCLIExtension[];
private readonly _blockedMcpServers: Array<{
name: string;
extensionName: string;
}>;
private readonly overrideExtensions?: string[];
fallbackModelHandler?: FallbackModelHandler;
private quotaErrorOccurred: boolean = false;
private readonly summarizeToolOutput:
@@ -464,6 +469,8 @@ export class Config {
private readonly experimentalSkills: boolean = false;
private readonly chatRecordingEnabled: boolean;
private readonly loadMemoryFromIncludeDirectories: boolean = false;
private readonly importFormat: 'tree' | 'flat';
private readonly discoveryMaxDirs: number;
private readonly webSearch?: {
provider: Array<{
type: 'tavily' | 'google' | 'dashscope';
@@ -480,7 +487,6 @@ export class Config {
private readonly shouldUseNodePtyShell: boolean;
private readonly skipNextSpeakerCheck: boolean;
private shellExecutionConfig: ShellExecutionConfig;
private readonly extensionManagement: boolean = true;
private readonly skipLoopDetection: boolean;
private readonly skipStartupContext: boolean;
private readonly vlmSwitchMode: string | undefined;
@@ -521,6 +527,8 @@ export class Config {
this.toolCallCommand = params.toolCallCommand;
this.mcpServerCommand = params.mcpServerCommand;
this.mcpServers = params.mcpServers;
this.allowedMcpServers = params.allowedMcpServers;
this.excludedMcpServers = params.excludedMcpServers;
this.sessionSubagents = params.sessionSubagents ?? [];
this.sdkMode = params.sdkMode ?? false;
this.userMemory = params.userMemory ?? '';
@@ -543,6 +551,7 @@ export class Config {
email: 'qwen-coder@alibabacloud.com',
};
this.usageStatisticsEnabled = params.usageStatisticsEnabled ?? true;
this.outputLanguageFilePath = params.outputLanguageFilePath;
this.fileFiltering = {
respectGitIgnore: params.fileFiltering?.respectGitIgnore ?? true,
@@ -556,15 +565,13 @@ export class Config {
this.cwd = params.cwd ?? process.cwd();
this.fileDiscoveryService = params.fileDiscoveryService ?? null;
this.bugCommand = params.bugCommand;
this.extensionContextFilePaths = params.extensionContextFilePaths ?? [];
this.maxSessionTurns = params.maxSessionTurns ?? -1;
this.sessionTokenLimit = params.sessionTokenLimit ?? -1;
this.experimentalZedIntegration =
params.experimentalZedIntegration ?? false;
this.experimentalSkills = params.experimentalSkills ?? false;
this.listExtensions = params.listExtensions ?? false;
this._extensions = params.extensions ?? [];
this._blockedMcpServers = params.blockedMcpServers ?? [];
this.overrideExtensions = params.overrideExtensions;
this.noBrowser = params.noBrowser ?? false;
this.summarizeToolOutput = params.summarizeToolOutput;
this.folderTrustFeature = params.folderTrustFeature ?? false;
@@ -583,6 +590,8 @@ export class Config {
this.loadMemoryFromIncludeDirectories =
params.loadMemoryFromIncludeDirectories ?? false;
this.importFormat = params.importFormat ?? 'tree';
this.discoveryMaxDirs = params.discoveryMaxDirs ?? 200;
this.chatCompression = params.chatCompression;
this.interactive = params.interactive ?? false;
this.trustedFolder = params.trustedFolder;
@@ -608,7 +617,6 @@ export class Config {
params.truncateToolOutputLines ?? DEFAULT_TRUNCATE_TOOL_OUTPUT_LINES;
this.enableToolOutputTruncation = params.enableToolOutputTruncation ?? true;
this.useSmartEdit = params.useSmartEdit ?? false;
this.extensionManagement = params.extensionManagement ?? true;
this.channel = params.channel;
this.storage = new Storage(this.targetDir);
this.vlmSwitchMode = params.vlmSwitchMode;
@@ -656,6 +664,23 @@ export class Config {
this.subagentManager.loadSessionSubagents(this.sessionSubagents);
}
this.extensionManager = new ExtensionManager({
workspaceDir: this.targetDir,
enabledExtensionOverrides: this.overrideExtensions,
config: this,
isWorkspaceTrusted: this.isTrustedFolder(),
});
await this.extensionManager.refreshCache();
const activeExtensions = await this.extensionManager
.getLoadedExtensions()
.filter((e) => e.isActive);
this.extensionContextFilePaths = activeExtensions.flatMap(
(e) => e.contextFiles,
);
await this.refreshHierarchicalMemory();
this.toolRegistry = await this.createToolRegistry(
options?.sendSdkMcpMessage,
);
@@ -665,6 +690,24 @@ export class Config {
logStartSession(this, new StartSessionEvent(this));
}
async refreshHierarchicalMemory(): Promise<void> {
const { memoryContent, fileCount } = await loadServerHierarchicalMemory(
this.getWorkingDir(),
this.shouldLoadMemoryFromIncludeDirectories()
? this.getWorkspaceContext().getDirectories()
: [],
this.getDebugMode(),
this.getFileService(),
this.getExtensionContextFilePaths(),
this.getFolderTrust(),
this.getImportFormat(),
this.getFileFilteringOptions(),
this.getDiscoveryMaxDirs(),
);
this.setUserMemory(memoryContent);
this.setGeminiMdFileCount(fileCount);
}
getContentGenerator(): ContentGenerator {
return this.contentGenerator;
}
@@ -763,6 +806,14 @@ export class Config {
return this.loadMemoryFromIncludeDirectories;
}
getImportFormat(): 'tree' | 'flat' {
return this.importFormat;
}
getDiscoveryMaxDirs(): number {
return this.discoveryMaxDirs;
}
getContentGeneratorConfig(): ContentGeneratorConfig {
return this.contentGeneratorConfig;
}
@@ -870,7 +921,14 @@ export class Config {
}
getExcludeTools(): string[] | undefined {
return this.excludeTools;
const allExcludeTools = new Set(this.excludeTools || []);
const extensions = this.getActiveExtensions();
for (const extension of extensions) {
for (const tool of extension.config.excludeTools || []) {
allExcludeTools.add(tool);
}
}
return [...allExcludeTools];
}
getToolDiscoveryCommand(): string | undefined {
@@ -886,7 +944,37 @@ export class Config {
}
getMcpServers(): Record<string, MCPServerConfig> | undefined {
return this.mcpServers;
let mcpServers = { ...(this.mcpServers || {}) };
const extensions = this.getActiveExtensions();
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) return;
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
if (this.allowedMcpServers) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key]) =>
this.allowedMcpServers?.includes(key),
),
);
}
if (this.excludedMcpServers) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(
([key]) => !this.excludedMcpServers?.includes(key),
),
);
}
return mcpServers;
}
addMcpServers(servers: Record<string, MCPServerConfig>): void {
@@ -1065,7 +1153,10 @@ export class Config {
}
getExtensionContextFilePaths(): string[] {
return this.extensionContextFilePaths;
return [
...this.extensionContextFilePaths,
...(this.outputLanguageFilePath ? [this.outputLanguageFilePath] : []),
];
}
getExperimentalZedIntegration(): boolean {
@@ -1080,16 +1171,54 @@ export class Config {
return this.listExtensions;
}
getExtensionManagement(): boolean {
return this.extensionManagement;
getExtensionManager(): ExtensionManager {
return this.extensionManager;
}
getExtensions(): GeminiCLIExtension[] {
return this._extensions;
getExtensions(): Extension[] {
const extensions = this.extensionManager.getLoadedExtensions();
if (this.overrideExtensions) {
return extensions.filter((e) =>
this.overrideExtensions?.includes(e.name),
);
} else {
return extensions;
}
}
getActiveExtensions(): Extension[] {
return this.getExtensions().filter((e) => e.isActive);
}
getBlockedMcpServers(): Array<{ name: string; extensionName: string }> {
return this._blockedMcpServers;
const mcpServers = { ...(this.mcpServers || {}) };
const extensions = this.getActiveExtensions();
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) return;
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
const blockedMcpServers: Array<{ name: string; extensionName: string }> =
[];
if (this.allowedMcpServers) {
Object.entries(mcpServers).forEach(([key, server]) => {
const isAllowed = this.allowedMcpServers?.includes(key);
if (!isAllowed) {
blockedMcpServers.push({
name: key,
extensionName: server.extensionName || '',
});
}
});
}
return blockedMcpServers;
}
getNoBrowser(): boolean {

View File

@@ -542,4 +542,206 @@ describe('OpenAIContentConverter', () => {
expect(original).toEqual(originalCopy);
});
});
describe('mergeConsecutiveAssistantMessages', () => {
it('should merge two consecutive assistant messages with string content', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [{ text: 'First part' }],
},
{
role: 'model',
parts: [{ text: 'Second part' }],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
expect(messages).toHaveLength(1);
expect(messages[0].role).toBe('assistant');
const content = messages[0]
.content as OpenAI.Chat.ChatCompletionContentPart[];
expect(content).toHaveLength(2);
expect(content[0]).toEqual({ type: 'text', text: 'First part' });
expect(content[1]).toEqual({ type: 'text', text: 'Second part' });
});
it('should merge multiple consecutive assistant messages', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [{ text: 'Part 1' }],
},
{
role: 'model',
parts: [{ text: 'Part 2' }],
},
{
role: 'model',
parts: [{ text: 'Part 3' }],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
expect(messages).toHaveLength(1);
expect(messages[0].role).toBe('assistant');
const content = messages[0]
.content as OpenAI.Chat.ChatCompletionContentPart[];
expect(content).toHaveLength(3);
});
it('should merge tool_calls from consecutive assistant messages', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [
{
functionCall: {
id: 'call_1',
name: 'tool_1',
args: {},
},
},
],
},
{
role: 'user',
parts: [
{
functionResponse: {
id: 'call_1',
name: 'tool_1',
response: { output: 'result_1' },
},
},
],
},
{
role: 'model',
parts: [
{
functionCall: {
id: 'call_2',
name: 'tool_2',
args: {},
},
},
],
},
{
role: 'user',
parts: [
{
functionResponse: {
id: 'call_2',
name: 'tool_2',
response: { output: 'result_2' },
},
},
],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
// Should have: assistant (tool_call_1), tool (result_1), assistant (tool_call_2), tool (result_2)
expect(messages).toHaveLength(4);
expect(messages[0].role).toBe('assistant');
expect(messages[1].role).toBe('tool');
expect(messages[2].role).toBe('assistant');
expect(messages[3].role).toBe('tool');
});
it('should not merge assistant messages separated by user messages', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [{ text: 'First assistant' }],
},
{
role: 'user',
parts: [{ text: 'User message' }],
},
{
role: 'model',
parts: [{ text: 'Second assistant' }],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
expect(messages).toHaveLength(3);
expect(messages[0].role).toBe('assistant');
expect(messages[1].role).toBe('user');
expect(messages[2].role).toBe('assistant');
});
it('should handle merging when one message has array content and another has string', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [{ text: 'Text part' }],
},
{
role: 'model',
parts: [{ text: 'Another text' }],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
expect(messages).toHaveLength(1);
const content = messages[0]
.content as OpenAI.Chat.ChatCompletionContentPart[];
expect(Array.isArray(content)).toBe(true);
expect(content).toHaveLength(2);
});
it('should merge empty content correctly', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [{ text: 'First' }],
},
{
role: 'model',
parts: [],
},
{
role: 'model',
parts: [{ text: 'Second' }],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
// Empty messages should be filtered out
expect(messages).toHaveLength(1);
const content = messages[0]
.content as OpenAI.Chat.ChatCompletionContentPart[];
expect(content).toHaveLength(2);
expect(content[0]).toEqual({ type: 'text', text: 'First' });
expect(content[1]).toEqual({ type: 'text', text: 'Second' });
});
});
});

View File

@@ -1120,12 +1120,44 @@ export class OpenAIContentConverter {
// If the last message is also an assistant message, merge them
if (lastMessage.role === 'assistant') {
// Combine content
const combinedContent = [
typeof lastMessage.content === 'string' ? lastMessage.content : '',
typeof message.content === 'string' ? message.content : '',
]
.filter(Boolean)
.join('');
const lastContent = lastMessage.content;
const currentContent = message.content;
// Determine if we should use array format (if either content is an array)
const useArrayFormat =
Array.isArray(lastContent) || Array.isArray(currentContent);
let combinedContent:
| string
| OpenAI.Chat.ChatCompletionContentPart[]
| null;
if (useArrayFormat) {
// Convert both to array format and merge
const lastParts = Array.isArray(lastContent)
? lastContent
: typeof lastContent === 'string' && lastContent
? [{ type: 'text' as const, text: lastContent }]
: [];
const currentParts = Array.isArray(currentContent)
? currentContent
: typeof currentContent === 'string' && currentContent
? [{ type: 'text' as const, text: currentContent }]
: [];
combinedContent = [
...lastParts,
...currentParts,
] as OpenAI.Chat.ChatCompletionContentPart[];
} else {
// Both are strings or null, merge as strings
const lastText = typeof lastContent === 'string' ? lastContent : '';
const currentText =
typeof currentContent === 'string' ? currentContent : '';
const mergedText = [lastText, currentText].filter(Boolean).join('');
combinedContent = mergedText || null;
}
// Combine tool calls
const lastToolCalls =
@@ -1137,14 +1169,17 @@ export class OpenAIContentConverter {
// Update the last message with combined data
(
lastMessage as OpenAI.Chat.ChatCompletionMessageParam & {
content: string | null;
content: string | OpenAI.Chat.ChatCompletionContentPart[] | null;
tool_calls?: OpenAI.Chat.ChatCompletionMessageToolCall[];
}
).content = combinedContent || null;
if (combinedToolCalls.length > 0) {
(
lastMessage as OpenAI.Chat.ChatCompletionMessageParam & {
content: string | null;
content:
| string
| OpenAI.Chat.ChatCompletionContentPart[]
| null;
tool_calls?: OpenAI.Chat.ChatCompletionMessageToolCall[];
}
).tool_calls = combinedToolCalls;

View File

@@ -0,0 +1,121 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import {
convertClaudeToQwenConfig,
mergeClaudeConfigs,
isClaudePluginConfig,
type ClaudePluginConfig,
type ClaudeMarketplacePluginConfig,
} from './claude-converter.js';
describe('convertClaudeToQwenConfig', () => {
it('should convert basic Claude config', () => {
const claudeConfig: ClaudePluginConfig = {
name: 'claude-plugin',
version: '1.0.0',
};
const result = convertClaudeToQwenConfig(claudeConfig);
expect(result.name).toBe('claude-plugin');
expect(result.version).toBe('1.0.0');
});
it('should convert config with basic fields only', () => {
const claudeConfig: ClaudePluginConfig = {
name: 'full-plugin',
version: '1.0.0',
commands: 'commands',
agents: ['agents/agent1.md'],
skills: ['skills/skill1'],
};
const result = convertClaudeToQwenConfig(claudeConfig);
// Commands, skills, agents are collected as directories, not in config
expect(result.name).toBe('full-plugin');
expect(result.version).toBe('1.0.0');
expect(result.mcpServers).toBeUndefined();
});
it('should throw error for missing name', () => {
const invalidConfig = {
version: '1.0.0',
} as ClaudePluginConfig;
expect(() => convertClaudeToQwenConfig(invalidConfig)).toThrow();
});
});
describe('mergeClaudeConfigs', () => {
it('should merge marketplace and plugin configs', () => {
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
name: 'marketplace-name',
version: '2.0.0',
source: 'github:org/repo',
description: 'From marketplace',
};
const pluginConfig: ClaudePluginConfig = {
name: 'plugin-name',
version: '1.0.0',
commands: 'commands',
};
const merged = mergeClaudeConfigs(marketplacePlugin, pluginConfig);
// Marketplace takes precedence
expect(merged.name).toBe('marketplace-name');
expect(merged.version).toBe('2.0.0');
expect(merged.description).toBe('From marketplace');
// Plugin fields preserved
expect(merged.commands).toBe('commands');
});
it('should work with strict=false and no plugin config', () => {
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
name: 'standalone',
version: '1.0.0',
source: 'local',
strict: false,
commands: 'commands',
};
const merged = mergeClaudeConfigs(marketplacePlugin);
expect(merged.name).toBe('standalone');
expect(merged.commands).toBe('commands');
});
it('should throw error for strict mode without plugin config', () => {
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
name: 'strict-plugin',
version: '1.0.0',
source: 'github:org/repo',
strict: true,
};
expect(() => mergeClaudeConfigs(marketplacePlugin)).toThrow();
});
});
describe('isClaudePluginConfig', () => {
it('should identify Claude plugin directory', () => {
const extensionDir = '/tmp/test-extension';
const marketplace = {
marketplaceSource: 'https://test.com',
pluginName: 'test-plugin',
};
// This will check if marketplace.json exists and contains the plugin
// Note: In real usage, this requires actual file system setup
expect(typeof isClaudePluginConfig(extensionDir, marketplace)).toBe(
'boolean',
);
});
});

View File

@@ -0,0 +1,500 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Converter for Claude Code plugins to Qwen Code format.
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import { glob } from 'glob';
import type { ExtensionConfig } from './extensionManager.js';
import { ExtensionStorage } from './storage.js';
import type { MCPServerConfig } from '../config/config.js';
export interface ClaudePluginConfig {
name: string;
version: string;
description?: string;
author?: { name?: string; email?: string; url?: string };
homepage?: string;
repository?: string;
license?: string;
keywords?: string[];
commands?: string | string[];
agents?: string | string[];
skills?: string | string[];
hooks?: string;
mcpServers?: string | Record<string, MCPServerConfig>;
outputStyles?: string | string[];
lspServers?: string;
}
export type ClaudePluginSource =
| { source: 'github'; repo: string }
| { source: 'url'; url: string };
export interface ClaudeMarketplacePluginConfig extends ClaudePluginConfig {
source: string | ClaudePluginSource;
category?: string;
strict?: boolean;
tags?: string[];
}
export interface ClaudeMarketplaceConfig {
name: string;
owner: { name: string; email: string };
plugins: ClaudeMarketplacePluginConfig[];
metadata?: { description?: string; version?: string; pluginRoot?: string };
}
/**
* Converts a Claude plugin config to Qwen Code format.
* @param claudeConfig Claude plugin configuration
* @returns Qwen ExtensionConfig
*/
export function convertClaudeToQwenConfig(
claudeConfig: ClaudePluginConfig,
): ExtensionConfig {
// Validate required fields
if (!claudeConfig.name || !claudeConfig.version) {
throw new Error('Claude plugin config must have name and version fields');
}
// Parse MCP servers
let mcpServers: Record<string, MCPServerConfig> | undefined;
if (claudeConfig.mcpServers) {
if (typeof claudeConfig.mcpServers === 'string') {
// TODO: Load from file path
console.warn(
`[Claude Converter] MCP servers path not yet supported: ${claudeConfig.mcpServers}`,
);
} else {
mcpServers = claudeConfig.mcpServers;
}
}
// Warn about unsupported fields
if (claudeConfig.hooks) {
console.warn(
`[Claude Converter] Hooks are not yet supported in ${claudeConfig.name}`,
);
}
if (claudeConfig.outputStyles) {
console.warn(
`[Claude Converter] Output styles are not yet supported in ${claudeConfig.name}`,
);
}
if (claudeConfig.lspServers) {
console.warn(
`[Claude Converter] LSP servers are not yet supported in ${claudeConfig.name}`,
);
}
// Direct field mapping - commands, skills, agents will be collected as folders
return {
name: claudeConfig.name,
version: claudeConfig.version,
mcpServers,
};
}
/**
* Converts a complete Claude plugin package to Qwen Code format.
* Creates a new temporary directory with:
* 1. Converted qwen-extension.json
* 2. Commands, skills, and agents collected to respective folders
* 3. MCP servers resolved from JSON files if needed
* 4. All other files preserved
*
* @param extensionDir Path to the Claude plugin directory
* @param marketplace Marketplace information for loading marketplace.json
* @returns Object containing converted config and the temporary directory path
*/
export async function convertClaudePluginPackage(
extensionDir: string,
marketplace: { marketplaceSource: string; pluginName: string },
): Promise<{ config: ExtensionConfig; convertedDir: string }> {
// Step 1: Load marketplace.json
const marketplaceJsonPath = path.join(
extensionDir,
'.claude-plugin',
'marketplace.json',
);
if (!fs.existsSync(marketplaceJsonPath)) {
throw new Error(
`Marketplace configuration not found at ${marketplaceJsonPath}`,
);
}
const marketplaceContent = fs.readFileSync(marketplaceJsonPath, 'utf-8');
const marketplaceConfig: ClaudeMarketplaceConfig =
JSON.parse(marketplaceContent);
// Find the target plugin in marketplace
const marketplacePlugin = marketplaceConfig.plugins.find(
(p) => p.name === marketplace.pluginName,
);
if (!marketplacePlugin) {
throw new Error(
`Plugin ${marketplace.pluginName} not found in marketplace.json`,
);
}
// Step 2: Resolve plugin source directory based on source field
const source = marketplacePlugin.source;
let pluginSourceDir: string;
if (typeof source === 'string') {
// Check if it's a URL (online path)
if (source.startsWith('http://') || source.startsWith('https://')) {
throw new Error(
`Online plugin sources are not supported in convertClaudePluginPackage. ` +
`Plugin ${marketplace.pluginName} has source: ${source}. ` +
`This should be downloaded and resolved before calling this function.`,
);
}
// Relative path within marketplace directory
const marketplaceDir = marketplaceConfig.metadata?.pluginRoot
? path.join(extensionDir, marketplaceConfig.metadata.pluginRoot)
: extensionDir;
pluginSourceDir = path.join(marketplaceDir, source);
} else if (source.source === 'github' || source.source === 'url') {
throw new Error(
`Online plugin sources (github/url) are not supported in convertClaudePluginPackage. ` +
`Plugin ${marketplace.pluginName} has source type: ${source.source}. ` +
`This should be downloaded and resolved before calling this function.`,
);
} else {
throw new Error(
`Unsupported plugin source type for ${marketplace.pluginName}: ${JSON.stringify(source)}`,
);
}
if (!fs.existsSync(pluginSourceDir)) {
throw new Error(`Plugin source directory not found: ${pluginSourceDir}`);
}
// Step 3: Load and merge plugin.json if exists (based on strict mode)
const strict = marketplacePlugin.strict ?? true;
let mergedConfig: ClaudePluginConfig;
if (strict) {
const pluginJsonPath = path.join(
pluginSourceDir,
'.claude-plugin',
'plugin.json',
);
if (!fs.existsSync(pluginJsonPath)) {
throw new Error(`Strict mode requires plugin.json at ${pluginJsonPath}`);
}
const pluginContent = fs.readFileSync(pluginJsonPath, 'utf-8');
const pluginConfig: ClaudePluginConfig = JSON.parse(pluginContent);
mergedConfig = mergeClaudeConfigs(marketplacePlugin, pluginConfig);
} else {
mergedConfig = marketplacePlugin as ClaudePluginConfig;
}
// Step 4: Resolve MCP servers from JSON files if needed
if (mergedConfig.mcpServers && typeof mergedConfig.mcpServers === 'string') {
const mcpServersPath = path.isAbsolute(mergedConfig.mcpServers)
? mergedConfig.mcpServers
: path.join(pluginSourceDir, mergedConfig.mcpServers);
if (fs.existsSync(mcpServersPath)) {
try {
const mcpContent = fs.readFileSync(mcpServersPath, 'utf-8');
mergedConfig.mcpServers = JSON.parse(mcpContent) as Record<
string,
MCPServerConfig
>;
} catch (error) {
console.warn(
`Failed to parse MCP servers file ${mcpServersPath}: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
}
// Step 5: Create temporary directory for converted extension
const tmpDir = await ExtensionStorage.createTmpDir();
try {
// Step 6: Copy plugin files to temporary directory
await copyDirectory(pluginSourceDir, tmpDir);
// Step 7: Collect commands to commands folder
if (mergedConfig.commands) {
const commandsDestDir = path.join(tmpDir, 'commands');
await collectResources(
mergedConfig.commands,
pluginSourceDir,
commandsDestDir,
);
}
// Step 8: Collect skills to skills folder
if (mergedConfig.skills) {
const skillsDestDir = path.join(tmpDir, 'skills');
await collectResources(
mergedConfig.skills,
pluginSourceDir,
skillsDestDir,
);
}
// Step 9: Collect agents to agents folder
if (mergedConfig.agents) {
const agentsDestDir = path.join(tmpDir, 'agents');
await collectResources(
mergedConfig.agents,
pluginSourceDir,
agentsDestDir,
);
}
// Step 10: Convert to Qwen format config
const qwenConfig = convertClaudeToQwenConfig(mergedConfig);
// Step 11: Write qwen-extension.json
const qwenConfigPath = path.join(tmpDir, 'qwen-extension.json');
fs.writeFileSync(
qwenConfigPath,
JSON.stringify(qwenConfig, null, 2),
'utf-8',
);
return {
config: qwenConfig,
convertedDir: tmpDir,
};
} catch (error) {
// Clean up temporary directory on error
try {
fs.rmSync(tmpDir, { recursive: true, force: true });
} catch {
// Ignore cleanup errors
}
throw error;
}
}
/**
* Recursively copies a directory and its contents.
* @param source Source directory path
* @param destination Destination directory path
*/
async function copyDirectory(
source: string,
destination: string,
): Promise<void> {
// Create destination directory if it doesn't exist
if (!fs.existsSync(destination)) {
fs.mkdirSync(destination, { recursive: true });
}
const entries = fs.readdirSync(source, { withFileTypes: true });
for (const entry of entries) {
const sourcePath = path.join(source, entry.name);
const destPath = path.join(destination, entry.name);
if (entry.isDirectory()) {
await copyDirectory(sourcePath, destPath);
} else {
fs.copyFileSync(sourcePath, destPath);
}
}
}
/**
* Collects resources (commands, skills, agents) to a destination folder.
* If a resource is already in the destination folder, it will be skipped.
* @param resourcePaths String or array of resource paths
* @param pluginRoot Root directory of the plugin
* @param destDir Destination directory for collected resources
*/
async function collectResources(
resourcePaths: string | string[],
pluginRoot: string,
destDir: string,
): Promise<void> {
const paths = Array.isArray(resourcePaths) ? resourcePaths : [resourcePaths];
// Create destination directory
if (!fs.existsSync(destDir)) {
fs.mkdirSync(destDir, { recursive: true });
}
// Get the destination folder name (e.g., 'commands', 'skills', 'agents')
const destFolderName = path.basename(destDir);
for (const resourcePath of paths) {
const resolvedPath = path.isAbsolute(resourcePath)
? resourcePath
: path.join(pluginRoot, resourcePath);
if (!fs.existsSync(resolvedPath)) {
console.warn(`Resource path not found: ${resolvedPath}`);
continue;
}
const stat = fs.statSync(resolvedPath);
if (stat.isDirectory()) {
// If it's a directory, check if it's already the destination folder
const dirName = path.basename(resolvedPath);
const parentDir = path.dirname(resolvedPath);
// If the directory is already named as the destination folder (e.g., 'commands')
// and it's at the plugin root level, skip it
if (dirName === destFolderName && parentDir === pluginRoot) {
console.log(
`Skipping ${resolvedPath} as it's already in the correct location`,
);
continue;
}
// Copy all files from the directory
const files = await glob('**/*', {
cwd: resolvedPath,
nodir: true,
dot: false,
});
for (const file of files) {
const srcFile = path.join(resolvedPath, file);
const destFile = path.join(destDir, file);
// Ensure parent directory exists
const destFileDir = path.dirname(destFile);
if (!fs.existsSync(destFileDir)) {
fs.mkdirSync(destFileDir, { recursive: true });
}
fs.copyFileSync(srcFile, destFile);
}
} else {
// If it's a file, check if it's already in the destination folder
const relativePath = path.relative(pluginRoot, resolvedPath);
// Check if the file path starts with the destination folder name
// e.g., 'commands/test1.md' or 'commands/me/test.md' should be skipped
const segments = relativePath.split(path.sep);
if (segments.length > 0 && segments[0] === destFolderName) {
console.log(
`Skipping ${resolvedPath} as it's already in ${destFolderName}/`,
);
continue;
}
// Copy the file to destination
const fileName = path.basename(resolvedPath);
const destFile = path.join(destDir, fileName);
fs.copyFileSync(resolvedPath, destFile);
}
}
}
/**
* Merges marketplace plugin config with the actual plugin.json config.
* Marketplace config takes precedence for conflicting fields.
* @param marketplacePlugin Marketplace plugin definition
* @param pluginConfig Actual plugin.json config (optional if strict=false)
* @returns Merged Claude plugin config
*/
export function mergeClaudeConfigs(
marketplacePlugin: ClaudeMarketplacePluginConfig,
pluginConfig?: ClaudePluginConfig,
): ClaudePluginConfig {
if (!pluginConfig && marketplacePlugin.strict !== false) {
throw new Error(
`Plugin ${marketplacePlugin.name} requires plugin.json (strict mode)`,
);
}
// Start with plugin.json config (if exists)
const merged: ClaudePluginConfig = pluginConfig
? { ...pluginConfig }
: {
name: marketplacePlugin.name,
version: '1.0.0', // Default version if not in marketplace
};
// Overlay marketplace config (takes precedence)
if (marketplacePlugin.name) merged.name = marketplacePlugin.name;
if (marketplacePlugin.version) merged.version = marketplacePlugin.version;
if (marketplacePlugin.description)
merged.description = marketplacePlugin.description;
if (marketplacePlugin.author) merged.author = marketplacePlugin.author;
if (marketplacePlugin.homepage) merged.homepage = marketplacePlugin.homepage;
if (marketplacePlugin.repository)
merged.repository = marketplacePlugin.repository;
if (marketplacePlugin.license) merged.license = marketplacePlugin.license;
if (marketplacePlugin.keywords) merged.keywords = marketplacePlugin.keywords;
if (marketplacePlugin.commands) merged.commands = marketplacePlugin.commands;
if (marketplacePlugin.agents) merged.agents = marketplacePlugin.agents;
if (marketplacePlugin.skills) merged.skills = marketplacePlugin.skills;
if (marketplacePlugin.hooks) merged.hooks = marketplacePlugin.hooks;
if (marketplacePlugin.mcpServers)
merged.mcpServers = marketplacePlugin.mcpServers;
if (marketplacePlugin.outputStyles)
merged.outputStyles = marketplacePlugin.outputStyles;
if (marketplacePlugin.lspServers)
merged.lspServers = marketplacePlugin.lspServers;
return merged;
}
/**
* Checks if a config object is in Claude plugin format.
* @param config Configuration object to check
* @returns true if config appears to be Claude format
*/
export function isClaudePluginConfig(
extensionDir: string,
marketplace: { marketplaceSource: string; pluginName: string },
) {
const marketplaceConfigFilePath = path.join(
extensionDir,
'.claude-plugin/marketplace.json',
);
if (!fs.existsSync(marketplaceConfigFilePath)) {
return false;
}
const marketplaceConfigContent = fs.readFileSync(
marketplaceConfigFilePath,
'utf-8',
);
const marketplaceConfig = JSON.parse(marketplaceConfigContent);
if (typeof marketplaceConfig !== 'object' || marketplaceConfig === null) {
return false;
}
const marketplaceConfigObj = marketplaceConfig as Record<string, unknown>;
// Must have name and owner
if (
typeof marketplaceConfigObj['name'] !== 'string' ||
typeof marketplaceConfigObj['owner'] !== 'object'
) {
return false;
}
if (!Array.isArray(marketplaceConfigObj['plugins'])) {
return false;
}
const marketplacePluginObj = marketplaceConfigObj['plugins'].find(
(plugin: ClaudeMarketplacePluginConfig) =>
plugin.name === marketplace.pluginName,
);
if (!marketplacePluginObj) return false;
return true;
}

View File

@@ -0,0 +1,832 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import {
INSTALL_METADATA_FILENAME,
EXTENSIONS_CONFIG_FILENAME,
} from './variables.js';
import { QWEN_DIR } from '../config/storage.js';
import {
ExtensionManager,
SettingScope,
type ExtensionManagerOptions,
validateName,
getExtensionId,
hashValue,
extensionConsentString,
maybeRequestConsentOrFail,
parseInstallSource,
type ExtensionConfig,
} from './extensionManager.js';
import type { MCPServerConfig, ExtensionInstallMetadata } from '../index.js';
const mockGit = {
clone: vi.fn(),
getRemotes: vi.fn(),
fetch: vi.fn(),
checkout: vi.fn(),
listRemote: vi.fn(),
revparse: vi.fn(),
path: vi.fn(),
};
vi.mock('simple-git', () => ({
simpleGit: vi.fn((path: string) => {
mockGit.path.mockReturnValue(path);
return mockGit;
}),
}));
vi.mock('./github.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('./github.js')>();
return {
...actual,
downloadFromGitHubRelease: vi
.fn()
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
};
});
const mockHomedir = vi.hoisted(() => vi.fn());
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
return {
...mockedOs,
homedir: mockHomedir,
};
});
const mockLogExtensionEnable = vi.hoisted(() => vi.fn());
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
const mockLogExtensionDisable = vi.hoisted(() => vi.fn());
const mockLogExtensionUpdateEvent = vi.hoisted(() => vi.fn());
vi.mock('../telemetry/loggers.js', () => ({
logExtensionEnable: mockLogExtensionEnable,
logExtensionUpdateEvent: mockLogExtensionUpdateEvent,
}));
vi.mock('../index.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('../index.js')>();
return {
...actual,
logExtensionEnable: mockLogExtensionEnable,
logExtensionInstallEvent: mockLogExtensionInstallEvent,
logExtensionUninstall: mockLogExtensionUninstall,
logExtensionDisable: mockLogExtensionDisable,
};
});
const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
function createExtension({
extensionsDir = 'extensions-dir',
name = 'my-extension',
version = '1.0.0',
addContextFile = false,
contextFileName = undefined as string | undefined,
mcpServers = {} as Record<string, MCPServerConfig>,
installMetadata = undefined as ExtensionInstallMetadata | undefined,
} = {}): string {
const extDir = path.join(extensionsDir, name);
fs.mkdirSync(extDir, { recursive: true });
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name, version, contextFileName, mcpServers }),
);
if (addContextFile) {
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
}
if (contextFileName) {
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
}
if (installMetadata) {
fs.writeFileSync(
path.join(extDir, INSTALL_METADATA_FILENAME),
JSON.stringify(installMetadata),
);
}
return extDir;
}
describe('extension tests', () => {
let tempHomeDir: string;
let tempWorkspaceDir: string;
let userExtensionsDir: string;
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'qwen-code-test-home-'),
);
tempWorkspaceDir = fs.mkdtempSync(
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
);
userExtensionsDir = path.join(tempHomeDir, EXTENSIONS_DIRECTORY_NAME);
fs.mkdirSync(userExtensionsDir, { recursive: true });
mockHomedir.mockReturnValue(tempHomeDir);
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
Object.values(mockGit).forEach((fn) => fn.mockReset());
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
vi.restoreAllMocks();
});
function createExtensionManager(
options: Partial<ExtensionManagerOptions> = {},
): ExtensionManager {
return new ExtensionManager({
workspaceDir: tempWorkspaceDir,
isWorkspaceTrusted: true,
...options,
});
}
describe('loadExtension', () => {
it('should include extension path in loaded extension', async () => {
const extensionDir = path.join(userExtensionsDir, 'test-extension');
fs.mkdirSync(extensionDir, { recursive: true });
createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
expect(extensions[0].path).toBe(extensionDir);
expect(extensions[0].config.name).toBe('test-extension');
});
it('should load context file path when QWEN.md is present', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
addContextFile: true,
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '2.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(2);
const ext1 = extensions.find((e) => e.config.name === 'ext1');
const ext2 = extensions.find((e) => e.config.name === 'ext2');
expect(ext1?.contextFiles).toEqual([
path.join(userExtensionsDir, 'ext1', 'QWEN.md'),
]);
expect(ext2?.contextFiles).toEqual([]);
});
it('should load context file path from the extension config', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
addContextFile: false,
contextFileName: 'my-context-file.md',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
const ext1 = extensions.find((e) => e.config.name === 'ext1');
expect(ext1?.contextFiles).toEqual([
path.join(userExtensionsDir, 'ext1', 'my-context-file.md'),
]);
});
it('should skip extensions with invalid JSON and log a warning', async () => {
const consoleSpy = vi
.spyOn(console, 'error')
.mockImplementation(() => {});
// Good extension
createExtension({
extensionsDir: userExtensionsDir,
name: 'good-ext',
version: '1.0.0',
});
// Bad extension
const badExtDir = path.join(userExtensionsDir, 'bad-ext');
fs.mkdirSync(badExtDir);
const badConfigPath = path.join(badExtDir, EXTENSIONS_CONFIG_FILENAME);
fs.writeFileSync(badConfigPath, '{ "name": "bad-ext"'); // Malformed
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
expect(extensions[0].config.name).toBe('good-ext');
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining(`Warning: Skipping extension in ${badExtDir}`),
);
consoleSpy.mockRestore();
});
it('should skip extensions with missing name and log a warning', async () => {
const consoleSpy = vi
.spyOn(console, 'error')
.mockImplementation(() => {});
// Good extension
createExtension({
extensionsDir: userExtensionsDir,
name: 'good-ext',
version: '1.0.0',
});
// Bad extension
const badExtDir = path.join(userExtensionsDir, 'bad-ext-no-name');
fs.mkdirSync(badExtDir);
const badConfigPath = path.join(badExtDir, EXTENSIONS_CONFIG_FILENAME);
fs.writeFileSync(badConfigPath, JSON.stringify({ version: '1.0.0' }));
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
expect(extensions[0].config.name).toBe('good-ext');
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining(`Warning: Skipping extension in ${badExtDir}`),
);
consoleSpy.mockRestore();
});
it('should filter trust out of mcp servers', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js'],
trust: true,
} as MCPServerConfig,
},
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
// trust should be filtered from extension.mcpServers (not config.mcpServers)
expect(extensions[0].mcpServers?.['test-server']?.trust).toBeUndefined();
// config.mcpServers should still have trust (original config)
expect(extensions[0].config.mcpServers?.['test-server']?.trust).toBe(
true,
);
});
});
describe('enableExtension / disableExtension', () => {
it('should disable an extension at the user scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('my-extension', SettingScope.User);
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
});
it('should disable an extension at the workspace scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension(
'my-extension',
SettingScope.Workspace,
tempWorkspaceDir,
);
expect(manager.isEnabled('my-extension', tempHomeDir)).toBe(true);
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
});
it('should handle disabling the same extension twice', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('my-extension', SettingScope.User);
manager.disableExtension('my-extension', SettingScope.User);
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
});
it('should throw an error if you request system scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
expect(() =>
manager.disableExtension('my-extension', SettingScope.System),
).toThrow('System and SystemDefaults scopes are not supported.');
});
it('should enable an extension at the user scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('ext1', SettingScope.User);
expect(manager.isEnabled('ext1')).toBe(false);
manager.enableExtension('ext1', SettingScope.User);
expect(manager.isEnabled('ext1')).toBe(true);
});
it('should enable an extension at the workspace scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('ext1', SettingScope.Workspace);
expect(manager.isEnabled('ext1', tempWorkspaceDir)).toBe(false);
manager.enableExtension('ext1', SettingScope.Workspace);
expect(manager.isEnabled('ext1', tempWorkspaceDir)).toBe(true);
});
});
describe('validateExtensionOverrides', () => {
it('should mark all extensions as active if no enabled extensions are provided', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(2);
expect(extensions.every((e) => e.isActive)).toBe(true);
});
it('should mark only the enabled extensions as active', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext3',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['ext1', 'ext3'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions.find((e) => e.name === 'ext1')?.isActive).toBe(true);
expect(extensions.find((e) => e.name === 'ext2')?.isActive).toBe(false);
expect(extensions.find((e) => e.name === 'ext3')?.isActive).toBe(true);
});
it('should mark all extensions as inactive when "none" is provided', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['none'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions.every((e) => !e.isActive)).toBe(true);
});
it('should handle case-insensitivity', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['EXT1'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions.find((e) => e.name === 'ext1')?.isActive).toBe(true);
});
it('should log an error for unknown extensions', async () => {
const consoleSpy = vi
.spyOn(console, 'error')
.mockImplementation(() => {});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['ext4'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
manager.validateExtensionOverrides(extensions);
expect(consoleSpy).toHaveBeenCalledWith('Extension not found: ext4');
consoleSpy.mockRestore();
});
});
describe('loadExtensionConfig', () => {
it('should resolve environment variables in extension configuration', async () => {
process.env['TEST_API_KEY'] = 'test-api-key-123';
process.env['TEST_DB_URL'] = 'postgresql://localhost:5432/testdb';
try {
const extDir = path.join(userExtensionsDir, 'test-extension');
fs.mkdirSync(extDir);
const extensionConfig = {
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js'],
env: {
API_KEY: '$TEST_API_KEY',
DATABASE_URL: '${TEST_DB_URL}',
STATIC_VALUE: 'no-substitution',
},
},
},
};
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify(extensionConfig),
);
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
const extension = extensions[0];
expect(extension.config.name).toBe('test-extension');
expect(extension.config.mcpServers).toBeDefined();
const serverConfig = extension.config.mcpServers?.['test-server'];
expect(serverConfig).toBeDefined();
expect(serverConfig?.env).toBeDefined();
expect(serverConfig?.env?.['API_KEY']).toBe('test-api-key-123');
expect(serverConfig?.env?.['DATABASE_URL']).toBe(
'postgresql://localhost:5432/testdb',
);
expect(serverConfig?.env?.['STATIC_VALUE']).toBe('no-substitution');
} finally {
delete process.env['TEST_API_KEY'];
delete process.env['TEST_DB_URL'];
}
});
it('should handle missing environment variables gracefully', async () => {
const extDir = path.join(userExtensionsDir, 'test-extension');
fs.mkdirSync(extDir);
const extensionConfig = {
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js'],
env: {
MISSING_VAR: '$UNDEFINED_ENV_VAR',
MISSING_VAR_BRACES: '${ALSO_UNDEFINED}',
},
},
},
};
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify(extensionConfig),
);
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
const extension = extensions[0];
const serverConfig = extension.config.mcpServers!['test-server'];
expect(serverConfig.env).toBeDefined();
expect(serverConfig.env!['MISSING_VAR']).toBe('$UNDEFINED_ENV_VAR');
expect(serverConfig.env!['MISSING_VAR_BRACES']).toBe('${ALSO_UNDEFINED}');
});
});
});
describe('extensionManager utility functions', () => {
describe('validateName', () => {
it('should accept valid extension names', () => {
expect(() => validateName('my-extension')).not.toThrow();
expect(() => validateName('Extension123')).not.toThrow();
expect(() => validateName('test-ext-1')).not.toThrow();
expect(() => validateName('UPPERCASE')).not.toThrow();
});
it('should reject names with invalid characters', () => {
expect(() => validateName('my_extension')).toThrow(
'Invalid extension name',
);
expect(() => validateName('my.extension')).toThrow(
'Invalid extension name',
);
expect(() => validateName('my extension')).toThrow(
'Invalid extension name',
);
expect(() => validateName('my@ext')).toThrow('Invalid extension name');
});
it('should reject empty names', () => {
expect(() => validateName('')).toThrow('Invalid extension name');
});
});
describe('hashValue', () => {
it('should generate consistent hash for same input', () => {
const hash1 = hashValue('test-input');
const hash2 = hashValue('test-input');
expect(hash1).toBe(hash2);
});
it('should generate different hashes for different inputs', () => {
const hash1 = hashValue('input-1');
const hash2 = hashValue('input-2');
expect(hash1).not.toBe(hash2);
});
it('should generate a valid SHA256 hash', () => {
const hash = hashValue('test');
expect(hash).toMatch(/^[a-f0-9]{64}$/);
});
});
describe('getExtensionId', () => {
it('should use hashed name when no install metadata', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const id = getExtensionId(config);
expect(id).toBe(hashValue('test-ext'));
});
it('should use hashed source for local install', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const metadata = { type: 'local' as const, source: '/path/to/ext' };
const id = getExtensionId(config, metadata);
expect(id).toBe(hashValue('/path/to/ext'));
});
it('should use GitHub URL for git install', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const metadata = {
type: 'git' as const,
source: 'https://github.com/owner/repo',
};
const id = getExtensionId(config, metadata);
expect(id).toBe(hashValue('https://github.com/owner/repo'));
});
});
describe('extensionConsentString', () => {
it('should generate basic consent string', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const consent = extensionConsentString(config);
expect(consent).toContain('Installing extension "test-ext"');
expect(consent).toContain('Extensions may introduce unexpected behavior');
});
it('should include MCP servers in consent string', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
mcpServers: {
'my-server': { command: 'node', args: ['server.js'] },
},
};
const consent = extensionConsentString(config);
expect(consent).toContain(
'This extension will run the following MCP servers',
);
expect(consent).toContain('my-server');
});
it('should include commands in consent string', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const consent = extensionConsentString(config, ['cmd1', 'cmd2']);
expect(consent).toContain(
'This extension will add the following commands',
);
expect(consent).toContain('cmd1, cmd2');
});
it('should include context file info', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
contextFileName: 'CONTEXT.md',
};
const consent = extensionConsentString(config);
expect(consent).toContain('CONTEXT.md');
});
it('should include excluded tools', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
excludeTools: ['tool1', 'tool2'],
};
const consent = extensionConsentString(config);
expect(consent).toContain('exclude the following core tools');
});
});
describe('maybeRequestConsentOrFail', () => {
it('should request consent for new installation', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const requestConsent = vi.fn().mockResolvedValue(true);
await maybeRequestConsentOrFail(config, requestConsent, []);
expect(requestConsent).toHaveBeenCalledTimes(1);
});
it('should throw if user declines consent', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const requestConsent = vi.fn().mockResolvedValue(false);
await expect(
maybeRequestConsentOrFail(config, requestConsent, []),
).rejects.toThrow('Installation cancelled');
});
it('should skip consent if config unchanged during update', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '0.9.0',
};
const requestConsent = vi.fn().mockResolvedValue(true);
await maybeRequestConsentOrFail(
config,
requestConsent,
[],
previousConfig,
);
expect(requestConsent).not.toHaveBeenCalled();
});
it('should request consent if config changed during update', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
mcpServers: { server: { command: 'node' } },
};
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '0.9.0',
};
const requestConsent = vi.fn().mockResolvedValue(true);
await maybeRequestConsentOrFail(
config,
requestConsent,
[],
previousConfig,
);
expect(requestConsent).toHaveBeenCalledTimes(1);
});
});
describe('parseInstallSource', () => {
it('should parse HTTPS URL as git type', async () => {
const result = await parseInstallSource('https://github.com/owner/repo');
expect(result.type).toBe('git');
expect(result.source).toBe('https://github.com/owner/repo');
});
it('should parse HTTP URL as git type', async () => {
const result = await parseInstallSource('http://example.com/repo');
expect(result.type).toBe('git');
});
it('should parse git@ URL as git type', async () => {
const result = await parseInstallSource('git@github.com:owner/repo.git');
expect(result.type).toBe('git');
});
it('should parse sso:// URL as git type', async () => {
const result = await parseInstallSource('sso://some/path');
expect(result.type).toBe('git');
});
it('should parse marketplace URL correctly', async () => {
const result = await parseInstallSource(
'https://example.com/marketplace:plugin-name',
);
expect(result.type).toBe('marketplace');
expect(result.marketplace?.pluginName).toBe('plugin-name');
});
it('should throw for non-existent local path', async () => {
await expect(
parseInstallSource('/nonexistent/path/to/extension'),
).rejects.toThrow('Install source not found');
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,730 @@
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import * as path from 'node:path';
import * as os from 'node:os';
import {
getEnvContents,
maybePromptForSettings,
promptForSetting,
type ExtensionSetting,
updateSetting,
ExtensionSettingScope,
getScopedEnvContents,
} from './extensionSettings.js';
import type { ExtensionConfig } from './extensionManager.js';
import { ExtensionStorage } from './storage.js';
import prompts from 'prompts';
import * as fsPromises from 'node:fs/promises';
import * as fs from 'node:fs';
import { KeychainTokenStorage } from '../mcp/token-storage/keychain-token-storage.js';
import { EXTENSION_SETTINGS_FILENAME } from './variables.js';
vi.mock('prompts');
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
return {
...mockedOs,
homedir: vi.fn(),
};
});
vi.mock(
'../mcp/token-storage/keychain-token-storage.js',
async (importOriginal) => {
const actual =
await importOriginal<
typeof import('../mcp/token-storage/keychain-token-storage.js')
>();
return {
...actual,
KeychainTokenStorage: vi.fn(),
};
},
);
describe('extensionSettings', () => {
let tempHomeDir: string;
let tempWorkspaceDir: string;
let extensionDir: string;
let mockKeychainData: Record<string, Record<string, string>>;
beforeEach(() => {
vi.clearAllMocks();
mockKeychainData = {};
vi.mocked(KeychainTokenStorage).mockImplementation(
(serviceName: string) => {
if (!mockKeychainData[serviceName]) {
mockKeychainData[serviceName] = {};
}
const keychainData = mockKeychainData[serviceName];
return {
getSecret: vi
.fn()
.mockImplementation(
async (key: string) => keychainData[key] || null,
),
setSecret: vi
.fn()
.mockImplementation(async (key: string, value: string) => {
keychainData[key] = value;
}),
deleteSecret: vi.fn().mockImplementation(async (key: string) => {
delete keychainData[key];
}),
listSecrets: vi
.fn()
.mockImplementation(async () => Object.keys(keychainData)),
isAvailable: vi.fn().mockResolvedValue(true),
} as unknown as KeychainTokenStorage;
},
);
tempHomeDir = os.tmpdir() + path.sep + `gemini-cli-test-home-${Date.now()}`;
tempWorkspaceDir = path.join(
os.tmpdir(),
`gemini-cli-test-workspace-${Date.now()}`,
);
extensionDir = path.join(tempHomeDir, '.gemini', 'extensions', 'test-ext');
// Spy and mock the method, but also create the directory so we can write to it.
vi.spyOn(ExtensionStorage.prototype, 'getExtensionDir').mockReturnValue(
extensionDir,
);
fs.mkdirSync(extensionDir, { recursive: true });
fs.mkdirSync(tempWorkspaceDir, { recursive: true });
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
vi.mocked(prompts).mockClear();
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
vi.restoreAllMocks();
});
describe('maybePromptForSettings', () => {
const mockRequestSetting = vi.fn(
async (setting: ExtensionSetting) => `mock-${setting.envVar}`,
);
beforeEach(() => {
mockRequestSetting.mockClear();
});
it('should do nothing if settings are undefined', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
});
it('should do nothing if settings are empty', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [],
};
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
});
it('should prompt for all settings if there is no previous config', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
expect(mockRequestSetting).toHaveBeenCalledTimes(2);
expect(mockRequestSetting).toHaveBeenCalledWith(config.settings![0]);
expect(mockRequestSetting).toHaveBeenCalledWith(config.settings![1]);
});
it('should only prompt for new settings', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const previousSettings = { VAR1: 'previous-VAR1' };
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).toHaveBeenCalledTimes(1);
expect(mockRequestSetting).toHaveBeenCalledWith(newConfig.settings![1]);
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
const expectedContent = 'VAR1=previous-VAR1\nVAR2=mock-VAR2\n';
expect(actualContent).toBe(expectedContent);
});
it('should clear settings if new config has no settings', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{
name: 's2',
description: 'd2',
envVar: 'SENSITIVE_VAR',
sensitive: true,
},
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [],
};
const previousSettings = {
VAR1: 'previous-VAR1',
SENSITIVE_VAR: 'secret',
};
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('SENSITIVE_VAR', 'secret');
const envPath = path.join(extensionDir, '.env');
await fsPromises.writeFile(envPath, 'VAR1=previous-VAR1');
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
const actualContent = await fsPromises.readFile(envPath, 'utf-8');
expect(actualContent).toBe('');
expect(await userKeychain.getSecret('SENSITIVE_VAR')).toBeNull();
});
it('should remove sensitive settings from keychain', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{
name: 's1',
description: 'd1',
envVar: 'SENSITIVE_VAR',
sensitive: true,
},
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [],
};
const previousSettings = { SENSITIVE_VAR: 'secret' };
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('SENSITIVE_VAR', 'secret');
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(await userKeychain.getSecret('SENSITIVE_VAR')).toBeNull();
});
it('should remove settings that are no longer in the config', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
const previousSettings = {
VAR1: 'previous-VAR1',
VAR2: 'previous-VAR2',
};
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
const expectedContent = 'VAR1=previous-VAR1\n';
expect(actualContent).toBe(expectedContent);
});
it('should reprompt if a setting changes sensitivity', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1', sensitive: false },
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1', sensitive: true },
],
};
const previousSettings = { VAR1: 'previous-VAR1' };
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).toHaveBeenCalledTimes(1);
expect(mockRequestSetting).toHaveBeenCalledWith(newConfig.settings![0]);
// The value should now be in keychain, not the .env file.
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toBe('');
});
it('should not prompt if settings are identical', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const previousSettings = {
VAR1: 'previous-VAR1',
VAR2: 'previous-VAR2',
};
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
const expectedContent = 'VAR1=previous-VAR1\nVAR2=previous-VAR2\n';
expect(actualContent).toBe(expectedContent);
});
it('should wrap values with spaces in quotes', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
mockRequestSetting.mockResolvedValue('a value with spaces');
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toBe('VAR1="a value with spaces"\n');
});
it('should not attempt to clear secrets if keychain is unavailable', async () => {
// Arrange
const mockIsAvailable = vi.fn().mockResolvedValue(false);
const mockListSecrets = vi.fn();
vi.mocked(KeychainTokenStorage).mockImplementation(
() =>
({
isAvailable: mockIsAvailable,
listSecrets: mockListSecrets,
deleteSecret: vi.fn(),
getSecret: vi.fn(),
setSecret: vi.fn(),
}) as unknown as KeychainTokenStorage,
);
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [], // Empty settings triggers clearSettings
};
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
// Act
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
previousConfig,
undefined,
);
// Assert
expect(mockIsAvailable).toHaveBeenCalled();
expect(mockListSecrets).not.toHaveBeenCalled();
});
});
describe('promptForSetting', () => {
it.each([
{
description:
'should use prompts with type "password" for sensitive settings',
setting: {
name: 'API Key',
description: 'Your secret key',
envVar: 'API_KEY',
sensitive: true,
},
expectedType: 'password',
promptValue: 'secret-key',
},
{
description:
'should use prompts with type "text" for non-sensitive settings',
setting: {
name: 'Username',
description: 'Your public username',
envVar: 'USERNAME',
sensitive: false,
},
expectedType: 'text',
promptValue: 'test-user',
},
{
description: 'should default to "text" if sensitive is undefined',
setting: {
name: 'Username',
description: 'Your public username',
envVar: 'USERNAME',
},
expectedType: 'text',
promptValue: 'test-user',
},
])('$description', async ({ setting, expectedType, promptValue }) => {
vi.mocked(prompts).mockResolvedValue({ value: promptValue });
const result = await promptForSetting(setting as ExtensionSetting);
expect(prompts).toHaveBeenCalledWith({
type: expectedType,
name: 'value',
message: `${setting.name}\n${setting.description}`,
});
expect(result).toBe(promptValue);
});
it('should return undefined if the user cancels the prompt', async () => {
vi.mocked(prompts).mockResolvedValue({ value: undefined });
const result = await promptForSetting({
name: 'Test',
description: 'Test desc',
envVar: 'TEST_VAR',
});
expect(result).toBeUndefined();
});
});
describe('getScopedEnvContents', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{
name: 's2',
description: 'd2',
envVar: 'SENSITIVE_VAR',
sensitive: true,
},
],
};
const extensionId = '12345';
it('should return combined contents from user .env and keychain for USER scope', async () => {
const userEnvPath = path.join(extensionDir, EXTENSION_SETTINGS_FILENAME);
await fsPromises.writeFile(userEnvPath, 'VAR1=user-value1');
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('SENSITIVE_VAR', 'user-secret');
const contents = await getScopedEnvContents(
config,
extensionId,
ExtensionSettingScope.USER,
);
expect(contents).toEqual({
VAR1: 'user-value1',
SENSITIVE_VAR: 'user-secret',
});
});
it('should return combined contents from workspace .env and keychain for WORKSPACE scope', async () => {
const workspaceEnvPath = path.join(
tempWorkspaceDir,
EXTENSION_SETTINGS_FILENAME,
);
await fsPromises.writeFile(workspaceEnvPath, 'VAR1=workspace-value1');
const workspaceKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345 ${tempWorkspaceDir}`,
);
await workspaceKeychain.setSecret('SENSITIVE_VAR', 'workspace-secret');
const contents = await getScopedEnvContents(
config,
extensionId,
ExtensionSettingScope.WORKSPACE,
);
expect(contents).toEqual({
VAR1: 'workspace-value1',
SENSITIVE_VAR: 'workspace-secret',
});
});
});
describe('getEnvContents (merged)', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2', sensitive: true },
{ name: 's3', description: 'd3', envVar: 'VAR3' },
],
};
const extensionId = '12345';
it('should merge user and workspace settings, with workspace taking precedence', async () => {
// User settings
const userEnvPath = path.join(extensionDir, EXTENSION_SETTINGS_FILENAME);
await fsPromises.writeFile(
userEnvPath,
'VAR1=user-value1\nVAR3=user-value3',
);
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext ${extensionId}`,
);
await userKeychain.setSecret('VAR2', 'user-secret2');
// Workspace settings
const workspaceEnvPath = path.join(
tempWorkspaceDir,
EXTENSION_SETTINGS_FILENAME,
);
await fsPromises.writeFile(workspaceEnvPath, 'VAR1=workspace-value1');
const workspaceKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext ${extensionId} ${tempWorkspaceDir}`,
);
await workspaceKeychain.setSecret('VAR2', 'workspace-secret2');
const contents = await getEnvContents(config, extensionId);
expect(contents).toEqual({
VAR1: 'workspace-value1',
VAR2: 'workspace-secret2',
VAR3: 'user-value3',
});
});
});
describe('updateSetting', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2', sensitive: true },
],
};
const mockRequestSetting = vi.fn();
beforeEach(async () => {
const userEnvPath = path.join(extensionDir, '.env');
await fsPromises.writeFile(userEnvPath, 'VAR1=value1\n');
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('VAR2', 'value2');
mockRequestSetting.mockClear();
});
it('should update a non-sensitive setting in USER scope', async () => {
mockRequestSetting.mockResolvedValue('new-value1');
await updateSetting(
config,
'12345',
'VAR1',
mockRequestSetting,
ExtensionSettingScope.USER,
);
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toContain('VAR1=new-value1');
});
it('should update a non-sensitive setting in WORKSPACE scope', async () => {
mockRequestSetting.mockResolvedValue('new-workspace-value');
await updateSetting(
config,
'12345',
'VAR1',
mockRequestSetting,
ExtensionSettingScope.WORKSPACE,
);
const expectedEnvPath = path.join(tempWorkspaceDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toContain('VAR1=new-workspace-value');
});
it('should update a sensitive setting in USER scope', async () => {
mockRequestSetting.mockResolvedValue('new-value2');
await updateSetting(
config,
'12345',
'VAR2',
mockRequestSetting,
ExtensionSettingScope.USER,
);
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
expect(await userKeychain.getSecret('VAR2')).toBe('new-value2');
});
it('should update a sensitive setting in WORKSPACE scope', async () => {
mockRequestSetting.mockResolvedValue('new-workspace-secret');
await updateSetting(
config,
'12345',
'VAR2',
mockRequestSetting,
ExtensionSettingScope.WORKSPACE,
);
const workspaceKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345 ${tempWorkspaceDir}`,
);
expect(await workspaceKeychain.getSecret('VAR2')).toBe(
'new-workspace-secret',
);
});
it('should leave existing, unmanaged .env variables intact when updating in WORKSPACE scope', async () => {
// Setup a pre-existing .env file in the workspace with unmanaged variables
const workspaceEnvPath = path.join(tempWorkspaceDir, '.env');
const originalEnvContent =
'PROJECT_VAR_1=value_1\nPROJECT_VAR_2=value_2\nVAR1=original-value'; // VAR1 is managed by extension
await fsPromises.writeFile(workspaceEnvPath, originalEnvContent);
// Simulate updating an extension-managed non-sensitive setting
mockRequestSetting.mockResolvedValue('updated-value');
await updateSetting(
config,
'12345',
'VAR1',
mockRequestSetting,
ExtensionSettingScope.WORKSPACE,
);
// Read the .env file after update
const actualContent = await fsPromises.readFile(
workspaceEnvPath,
'utf-8',
);
// Assert that original variables are intact and extension variable is updated
expect(actualContent).toContain('PROJECT_VAR_1=value_1');
expect(actualContent).toContain('PROJECT_VAR_2=value_2');
expect(actualContent).toContain('VAR1=updated-value');
// Ensure no other unexpected changes or deletions
const lines = actualContent.split('\n').filter((line) => line.length > 0);
expect(lines).toHaveLength(3); // Should only have the three variables
});
});
});

View File

@@ -0,0 +1,298 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'node:fs/promises';
import * as fsSync from 'node:fs';
import * as dotenv from 'dotenv';
import * as path from 'node:path';
import { ExtensionStorage } from './storage.js';
import type { ExtensionConfig } from './extensionManager.js';
import prompts from 'prompts';
import { EXTENSION_SETTINGS_FILENAME } from './variables.js';
import { KeychainTokenStorage } from '../mcp/token-storage/keychain-token-storage.js';
export enum ExtensionSettingScope {
USER = 'user',
WORKSPACE = 'workspace',
}
export interface ExtensionSetting {
name: string;
description: string;
envVar: string;
// NOTE: If no value is set, this setting will be considered NOT sensitive.
sensitive?: boolean;
}
const getKeychainStorageName = (
extensionName: string,
extensionId: string,
scope: ExtensionSettingScope,
): string => {
const base = `Qwen Code Extensions ${extensionName} ${extensionId}`;
if (scope === ExtensionSettingScope.WORKSPACE) {
return `${base} ${process.cwd()}`;
}
return base;
};
const getEnvFilePath = (
extensionName: string,
scope: ExtensionSettingScope,
): string => {
if (scope === ExtensionSettingScope.WORKSPACE) {
return path.join(process.cwd(), EXTENSION_SETTINGS_FILENAME);
}
return new ExtensionStorage(extensionName).getEnvFilePath();
};
export async function maybePromptForSettings(
extensionConfig: ExtensionConfig,
extensionId: string,
requestSetting: (setting: ExtensionSetting) => Promise<string>,
previousExtensionConfig?: ExtensionConfig,
previousSettings?: Record<string, string>,
): Promise<void> {
const { name: extensionName, settings } = extensionConfig;
if (
(!settings || settings.length === 0) &&
(!previousExtensionConfig?.settings ||
previousExtensionConfig.settings.length === 0)
) {
return;
}
// We assume user scope here because we don't have a way to ask the user for scope during the initial setup.
// The user can change the scope later using the `settings set` command.
const scope = ExtensionSettingScope.USER;
const envFilePath = getEnvFilePath(extensionName, scope);
const keychain = new KeychainTokenStorage(
getKeychainStorageName(extensionName, extensionId, scope),
);
if (!settings || settings.length === 0) {
await clearSettings(envFilePath, keychain);
return;
}
const settingsChanges = getSettingsChanges(
settings,
previousExtensionConfig?.settings ?? [],
);
const allSettings: Record<string, string> = { ...previousSettings };
for (const removedEnvSetting of settingsChanges.removeEnv) {
delete allSettings[removedEnvSetting.envVar];
}
for (const removedSensitiveSetting of settingsChanges.removeSensitive) {
await keychain.deleteSecret(removedSensitiveSetting.envVar);
}
for (const setting of settingsChanges.promptForSensitive.concat(
settingsChanges.promptForEnv,
)) {
const answer = await requestSetting(setting);
allSettings[setting.envVar] = answer;
}
const nonSensitiveSettings: Record<string, string> = {};
for (const setting of settings) {
const value = allSettings[setting.envVar];
if (value === undefined) {
continue;
}
if (setting.sensitive) {
await keychain.setSecret(setting.envVar, value);
} else {
nonSensitiveSettings[setting.envVar] = value;
}
}
const envContent = formatEnvContent(nonSensitiveSettings);
await fs.writeFile(envFilePath, envContent);
}
function formatEnvContent(settings: Record<string, string>): string {
let envContent = '';
for (const [key, value] of Object.entries(settings)) {
const formattedValue = value.includes(' ') ? `"${value}"` : value;
envContent += `${key}=${formattedValue}\n`;
}
return envContent;
}
export async function promptForSetting(
setting: ExtensionSetting,
): Promise<string> {
const response = await prompts({
type: setting.sensitive ? 'password' : 'text',
name: 'value',
message: `${setting.name}\n${setting.description}`,
});
return response.value;
}
export async function getScopedEnvContents(
extensionConfig: ExtensionConfig,
extensionId: string,
scope: ExtensionSettingScope,
): Promise<Record<string, string>> {
const { name: extensionName } = extensionConfig;
const keychain = new KeychainTokenStorage(
getKeychainStorageName(extensionName, extensionId, scope),
);
const envFilePath = getEnvFilePath(extensionName, scope);
let customEnv: Record<string, string> = {};
if (fsSync.existsSync(envFilePath)) {
const envFile = fsSync.readFileSync(envFilePath, 'utf-8');
customEnv = dotenv.parse(envFile);
}
if (extensionConfig.settings) {
for (const setting of extensionConfig.settings) {
if (setting.sensitive) {
const secret = await keychain.getSecret(setting.envVar);
if (secret) {
customEnv[setting.envVar] = secret;
}
}
}
}
return customEnv;
}
export async function getEnvContents(
extensionConfig: ExtensionConfig,
extensionId: string,
): Promise<Record<string, string>> {
if (!extensionConfig.settings || extensionConfig.settings.length === 0) {
return Promise.resolve({});
}
const userSettings = await getScopedEnvContents(
extensionConfig,
extensionId,
ExtensionSettingScope.USER,
);
const workspaceSettings = await getScopedEnvContents(
extensionConfig,
extensionId,
ExtensionSettingScope.WORKSPACE,
);
return { ...userSettings, ...workspaceSettings };
}
export async function updateSetting(
extensionConfig: ExtensionConfig,
extensionId: string,
settingKey: string,
requestSetting: (setting: ExtensionSetting) => Promise<string>,
scope: ExtensionSettingScope,
): Promise<void> {
const { name: extensionName, settings } = extensionConfig;
if (!settings || settings.length === 0) {
console.log('This extension does not have any settings.');
return;
}
const settingToUpdate = settings.find(
(s) => s.name === settingKey || s.envVar === settingKey,
);
if (!settingToUpdate) {
console.log(`Setting ${settingKey} not found.`);
return;
}
const newValue = await requestSetting(settingToUpdate);
const keychain = new KeychainTokenStorage(
getKeychainStorageName(extensionName, extensionId, scope),
);
if (settingToUpdate.sensitive) {
await keychain.setSecret(settingToUpdate.envVar, newValue);
return;
}
// For non-sensitive settings, we need to read the existing .env file,
// update the value, and write it back, preserving any other values.
const envFilePath = getEnvFilePath(extensionName, scope);
let envContent = '';
if (fsSync.existsSync(envFilePath)) {
envContent = await fs.readFile(envFilePath, 'utf-8');
}
const parsedEnv = dotenv.parse(envContent);
parsedEnv[settingToUpdate.envVar] = newValue;
// We only want to write back the variables that are not sensitive.
const nonSensitiveSettings: Record<string, string> = {};
const sensitiveEnvVars = new Set(
settings.filter((s) => s.sensitive).map((s) => s.envVar),
);
for (const [key, value] of Object.entries(parsedEnv)) {
if (!sensitiveEnvVars.has(key)) {
nonSensitiveSettings[key] = value;
}
}
const newEnvContent = formatEnvContent(nonSensitiveSettings);
await fs.writeFile(envFilePath, newEnvContent);
}
interface settingsChanges {
promptForSensitive: ExtensionSetting[];
removeSensitive: ExtensionSetting[];
promptForEnv: ExtensionSetting[];
removeEnv: ExtensionSetting[];
}
function getSettingsChanges(
settings: ExtensionSetting[],
oldSettings: ExtensionSetting[],
): settingsChanges {
const isSameSetting = (a: ExtensionSetting, b: ExtensionSetting) =>
a.envVar === b.envVar && (a.sensitive ?? false) === (b.sensitive ?? false);
const sensitiveOld = oldSettings.filter((s) => s.sensitive ?? false);
const sensitiveNew = settings.filter((s) => s.sensitive ?? false);
const envOld = oldSettings.filter((s) => !(s.sensitive ?? false));
const envNew = settings.filter((s) => !(s.sensitive ?? false));
return {
promptForSensitive: sensitiveNew.filter(
(s) => !sensitiveOld.some((old) => isSameSetting(s, old)),
),
removeSensitive: sensitiveOld.filter(
(s) => !sensitiveNew.some((neu) => isSameSetting(s, neu)),
),
promptForEnv: envNew.filter(
(s) => !envOld.some((old) => isSameSetting(s, old)),
),
removeEnv: envOld.filter(
(s) => !envNew.some((neu) => isSameSetting(s, neu)),
),
};
}
async function clearSettings(
envFilePath: string,
keychain: KeychainTokenStorage,
) {
if (fsSync.existsSync(envFilePath)) {
await fs.writeFile(envFilePath, '');
}
if (!(await keychain.isAvailable())) {
return;
}
const secrets = await keychain.listSecrets();
for (const secret of secrets) {
await keychain.deleteSecret(secret);
}
return;
}

View File

@@ -0,0 +1,179 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as path from 'node:path';
import {
convertGeminiToQwenConfig,
isGeminiExtensionConfig,
type GeminiExtensionConfig,
} from './gemini-converter.js';
// Mock fs module
vi.mock('node:fs', async (importOriginal) => {
const actual = await importOriginal<typeof import('node:fs')>();
return {
...actual,
existsSync: vi.fn(),
readFileSync: vi.fn(),
};
});
describe('convertGeminiToQwenConfig', () => {
beforeEach(() => {
vi.clearAllMocks();
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should convert basic Gemini config from directory', () => {
const mockDir = '/mock/extension/dir';
const geminiConfig: GeminiExtensionConfig = {
name: 'test-extension',
version: '1.0.0',
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(geminiConfig));
const result = convertGeminiToQwenConfig(mockDir);
expect(result.name).toBe('test-extension');
expect(result.version).toBe('1.0.0');
expect(fs.readFileSync).toHaveBeenCalledWith(
path.join(mockDir, 'gemini-extension.json'),
'utf-8',
);
});
it('should convert config with all optional fields', () => {
const mockDir = '/mock/extension/dir';
const geminiConfig = {
name: 'full-extension',
version: '2.0.0',
mcpServers: { server1: {} },
contextFileName: 'context.txt',
excludeTools: ['tool1', 'tool2'],
settings: [
{ name: 'Setting1', envVar: 'VAR1', description: 'Test setting' },
],
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(geminiConfig));
const result = convertGeminiToQwenConfig(mockDir);
expect(result.name).toBe('full-extension');
expect(result.version).toBe('2.0.0');
expect(result.mcpServers).toEqual({ server1: {} });
expect(result.contextFileName).toBe('context.txt');
expect(result.excludeTools).toEqual(['tool1', 'tool2']);
expect(result.settings).toHaveLength(1);
expect(result.settings?.[0].name).toBe('Setting1');
});
it('should throw error for missing name', () => {
const mockDir = '/mock/extension/dir';
const invalidConfig = {
version: '1.0.0',
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
expect(() => convertGeminiToQwenConfig(mockDir)).toThrow(
'Gemini extension config must have name and version fields',
);
});
it('should throw error for missing version', () => {
const mockDir = '/mock/extension/dir';
const invalidConfig = {
name: 'test-extension',
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
expect(() => convertGeminiToQwenConfig(mockDir)).toThrow(
'Gemini extension config must have name and version fields',
);
});
});
describe('isGeminiExtensionConfig', () => {
beforeEach(() => {
vi.clearAllMocks();
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should identify Gemini extension directory with valid config', () => {
const mockDir = '/mock/extension/dir';
const mockConfig = {
name: 'test',
version: '1.0.0',
settings: [{ name: 'Test', envVar: 'TEST', description: 'Test' }],
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(mockConfig));
expect(isGeminiExtensionConfig(mockDir)).toBe(true);
expect(fs.existsSync).toHaveBeenCalledWith(
path.join(mockDir, 'gemini-extension.json'),
);
});
it('should return false when gemini-extension.json does not exist', () => {
const mockDir = '/mock/nonexistent/dir';
vi.mocked(fs.existsSync).mockReturnValue(false);
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
});
it('should return false for invalid config content', () => {
const mockDir = '/mock/invalid/dir';
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue('null');
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
});
it('should return false for config missing required fields', () => {
const mockDir = '/mock/invalid/dir';
const invalidConfig = {
name: 'test',
// missing version
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
});
it('should return true for basic config without settings', () => {
const mockDir = '/mock/extension/dir';
const basicConfig = {
name: 'test',
version: '1.0.0',
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(basicConfig));
expect(isGeminiExtensionConfig(mockDir)).toBe(true);
});
});
// Note: convertGeminiExtensionPackage() is tested through integration tests
// as it requires real file system operations

View File

@@ -0,0 +1,217 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Converter for Gemini CLI extensions to Qwen Code format.
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import { glob } from 'glob';
import type { ExtensionConfig, ExtensionSetting } from './extensionManager.js';
import { ExtensionStorage } from './storage.js';
import { convertTomlToMarkdown } from '../utils/toml-to-markdown-converter.js';
export interface GeminiExtensionConfig {
name: string;
version: string;
mcpServers?: Record<string, unknown>;
contextFileName?: string | string[];
excludeTools?: string[];
settings?: ExtensionSetting[];
}
/**
* Converts a Gemini CLI extension config to Qwen Code format.
* @param extensionDir Path to the Gemini extension directory
* @returns Qwen ExtensionConfig
*/
export function convertGeminiToQwenConfig(
extensionDir: string,
): ExtensionConfig {
const configFilePath = path.join(extensionDir, 'gemini-extension.json');
const configContent = fs.readFileSync(configFilePath, 'utf-8');
const geminiConfig: GeminiExtensionConfig = JSON.parse(configContent);
// Validate required fields
if (!geminiConfig.name || !geminiConfig.version) {
throw new Error(
'Gemini extension config must have name and version fields',
);
}
const settings: ExtensionSetting[] | undefined = geminiConfig.settings;
// Direct field mapping
return {
name: geminiConfig.name,
version: geminiConfig.version,
mcpServers: geminiConfig.mcpServers as ExtensionConfig['mcpServers'],
contextFileName: geminiConfig.contextFileName,
excludeTools: geminiConfig.excludeTools,
settings,
};
}
/**
* Converts a complete Gemini extension package to Qwen Code format.
* Creates a new temporary directory with:
* 1. Converted qwen-extension.json
* 2. Commands converted from TOML to MD
* 3. All other files/folders preserved
*
* @param extensionDir Path to the Gemini extension directory
* @returns Object containing converted config and the temporary directory path
*/
export async function convertGeminiExtensionPackage(
extensionDir: string,
): Promise<{ config: ExtensionConfig; convertedDir: string }> {
const geminiConfig = convertGeminiToQwenConfig(extensionDir);
// Create temporary directory for converted extension
const tmpDir = await ExtensionStorage.createTmpDir();
try {
// Step 1: Copy all files and directories to temporary directory
await copyDirectory(extensionDir, tmpDir);
// Step 2: Convert TOML commands to Markdown in commands folder
const commandsDir = path.join(tmpDir, 'commands');
if (fs.existsSync(commandsDir)) {
await convertCommandsDirectory(commandsDir);
}
// Step 3: Create qwen-extension.json with converted config
const qwenConfigPath = path.join(tmpDir, 'qwen-extension.json');
fs.writeFileSync(
qwenConfigPath,
JSON.stringify(geminiConfig, null, 2),
'utf-8',
);
return {
config: geminiConfig,
convertedDir: tmpDir,
};
} catch (error) {
// Clean up temporary directory on error
try {
fs.rmSync(tmpDir, { recursive: true, force: true });
} catch {
// Ignore cleanup errors
}
throw error;
}
}
/**
* Recursively copies a directory and its contents.
* @param source Source directory path
* @param destination Destination directory path
*/
async function copyDirectory(
source: string,
destination: string,
): Promise<void> {
// Create destination directory if it doesn't exist
if (!fs.existsSync(destination)) {
fs.mkdirSync(destination, { recursive: true });
}
const entries = fs.readdirSync(source, { withFileTypes: true });
for (const entry of entries) {
const sourcePath = path.join(source, entry.name);
const destPath = path.join(destination, entry.name);
if (entry.isDirectory()) {
await copyDirectory(sourcePath, destPath);
} else {
fs.copyFileSync(sourcePath, destPath);
}
}
}
/**
* Converts all TOML command files in a directory to Markdown format.
* @param commandsDir Path to the commands directory
*/
async function convertCommandsDirectory(commandsDir: string): Promise<void> {
// Find all .toml files in the commands directory
const tomlFiles = await glob('**/*.toml', {
cwd: commandsDir,
nodir: true,
dot: false,
});
// Convert each TOML file to Markdown
for (const relativeFile of tomlFiles) {
const tomlPath = path.join(commandsDir, relativeFile);
try {
// Read TOML file
const tomlContent = fs.readFileSync(tomlPath, 'utf-8');
// Convert to Markdown
const markdownContent = convertTomlToMarkdown(tomlContent);
// Generate Markdown file path (same location, .md extension)
const markdownPath = tomlPath.replace(/\.toml$/, '.md');
// Write Markdown file
fs.writeFileSync(markdownPath, markdownContent, 'utf-8');
// Delete original TOML file
fs.unlinkSync(tomlPath);
} catch (error) {
console.warn(
`Warning: Failed to convert command file ${relativeFile}: ${error instanceof Error ? error.message : String(error)}`,
);
// Continue with other files even if one fails
}
}
}
/**
* Checks if a config object is in Gemini format.
* This is a heuristic check based on typical Gemini extension patterns.
* @param config Configuration object to check
* @returns true if config appears to be Gemini format
*/
export function isGeminiExtensionConfig(extensionDir: string) {
const configFilePath = path.join(extensionDir, 'gemini-extension.json');
if (!fs.existsSync(configFilePath)) {
return false;
}
const configContent = fs.readFileSync(configFilePath, 'utf-8');
const parsedConfig = JSON.parse(configContent);
if (typeof parsedConfig !== 'object' || parsedConfig === null) {
return false;
}
const obj = parsedConfig as Record<string, unknown>;
// Must have name and version
if (typeof obj['name'] !== 'string' || typeof obj['version'] !== 'string') {
return false;
}
// Check for Gemini-specific settings format
if (obj['settings'] && Array.isArray(obj['settings'])) {
const firstSetting = obj['settings'][0];
if (
firstSetting &&
typeof firstSetting === 'object' &&
'envVar' in firstSetting
) {
return true;
}
}
// If it has Gemini-specific fields but not Qwen-specific fields, likely Gemini
return true;
}

View File

@@ -13,14 +13,17 @@ import {
parseGitHubRepoForReleases,
} from './github.js';
import { simpleGit, type SimpleGit } from 'simple-git';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import * as os from 'node:os';
import * as fs from 'node:fs/promises';
import * as fsSync from 'node:fs';
import * as path from 'node:path';
import * as tar from 'tar';
import * as archiver from 'archiver';
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import {
ExtensionUpdateState,
type Extension,
type ExtensionManager,
} from './extensionManager.js';
const mockPlatform = vi.hoisted(() => vi.fn());
const mockArch = vi.hoisted(() => vi.fn());
@@ -123,119 +126,170 @@ describe('git extension helpers', () => {
revparse: vi.fn(),
};
const mockExtensionManager = {
loadExtensionConfig: vi.fn(),
} as unknown as ExtensionManager;
beforeEach(() => {
vi.mocked(simpleGit).mockReturnValue(mockGit as unknown as SimpleGit);
});
it('should return NOT_UPDATABLE for non-git extensions', async () => {
const extension: GeminiCLIExtension = {
function createExtension(overrides: Partial<Extension> = {}): Extension {
return {
id: 'test-id',
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
config: { name: 'test', version: '1.0.0' },
contextFiles: [],
...overrides,
};
}
it('should return NOT_UPDATABLE for non-git extensions', async () => {
const extension = createExtension({
installMetadata: {
type: 'link',
source: '',
},
};
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
});
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.NOT_UPDATABLE);
});
it('should return ERROR if no remotes found', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: '',
},
};
});
mockGit.getRemotes.mockResolvedValue([]);
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.ERROR);
});
it('should return UPDATE_AVAILABLE when remote hash is different', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
});
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
]);
mockGit.listRemote.mockResolvedValue('remote-hash\tHEAD');
mockGit.revparse.mockResolvedValue('local-hash');
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.UPDATE_AVAILABLE);
});
it('should return UP_TO_DATE when remote and local hashes are the same', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
});
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
]);
mockGit.listRemote.mockResolvedValue('same-hash\tHEAD');
mockGit.revparse.mockResolvedValue('same-hash');
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.UP_TO_DATE);
});
it('should return ERROR on git error', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
});
mockGit.getRemotes.mockRejectedValue(new Error('git error'));
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.ERROR);
});
it('should return UPDATE_AVAILABLE for local extension with different version', async () => {
const extension = createExtension({
version: '1.0.0',
installMetadata: {
type: 'local',
source: '/path/to/source',
},
});
const mockManager = {
loadExtensionConfig: vi.fn().mockReturnValue({
name: 'test',
version: '2.0.0',
}),
} as unknown as ExtensionManager;
const result = await checkForExtensionUpdate(extension, mockManager);
expect(result).toBe(ExtensionUpdateState.UPDATE_AVAILABLE);
});
it('should return UP_TO_DATE for local extension with same version', async () => {
const extension = createExtension({
version: '1.0.0',
installMetadata: {
type: 'local',
source: '/path/to/source',
},
});
const mockManager = {
loadExtensionConfig: vi.fn().mockReturnValue({
name: 'test',
version: '1.0.0',
}),
} as unknown as ExtensionManager;
const result = await checkForExtensionUpdate(extension, mockManager);
expect(result).toBe(ExtensionUpdateState.UP_TO_DATE);
});
it('should return NOT_UPDATABLE for local extension when source cannot be loaded', async () => {
const extension = createExtension({
version: '1.0.0',
installMetadata: {
type: 'local',
source: '/path/to/source',
},
});
const mockManager = {
loadExtensionConfig: vi.fn().mockImplementation(() => {
throw new Error('Cannot load config');
}),
} as unknown as ExtensionManager;
const result = await checkForExtensionUpdate(extension, mockManager);
expect(result).toBe(ExtensionUpdateState.NOT_UPDATABLE);
});
});
describe('findReleaseAsset', () => {

View File

@@ -5,19 +5,38 @@
*/
import { simpleGit } from 'simple-git';
import { getErrorMessage } from '../../utils/errors.js';
import type {
ExtensionInstallMetadata,
GeminiCLIExtension,
} from '@qwen-code/qwen-code-core';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import { getErrorMessage } from '../utils/errors.js';
import * as os from 'node:os';
import * as https from 'node:https';
import * as fs from 'node:fs';
import * as path from 'node:path';
import { EXTENSIONS_CONFIG_FILENAME, loadExtension } from '../extension.js';
import { EXTENSIONS_CONFIG_FILENAME } from './variables.js';
import * as tar from 'tar';
import extract from 'extract-zip';
import {
ExtensionUpdateState,
type Extension,
type ExtensionConfig,
type ExtensionManager,
} from './extensionManager.js';
import type { ExtensionInstallMetadata } from '../config/config.js';
interface GithubReleaseData {
assets: Asset[];
tag_name: string;
tarball_url?: string;
zipball_url?: string;
}
interface Asset {
name: string;
browser_download_url: string;
}
export interface GitHubDownloadResult {
tagName: string;
type: 'git' | 'github-release';
}
function getGitHubToken(): string | undefined {
return process.env['GITHUB_TOKEN'];
@@ -115,38 +134,40 @@ async function fetchReleaseFromGithub(
}
export async function checkForExtensionUpdate(
extension: GeminiCLIExtension,
setExtensionUpdateState: (updateState: ExtensionUpdateState) => void,
cwd: string = process.cwd(),
): Promise<void> {
setExtensionUpdateState(ExtensionUpdateState.CHECKING_FOR_UPDATES);
extension: Extension,
extensionManager: ExtensionManager,
): Promise<ExtensionUpdateState> {
const installMetadata = extension.installMetadata;
if (installMetadata?.type === 'local') {
const newExtension = loadExtension({
extensionDir: installMetadata.source,
workspaceDir: cwd,
});
if (!newExtension) {
let latestConfig: ExtensionConfig | undefined;
try {
latestConfig = extensionManager.loadExtensionConfig({
extensionDir: installMetadata.source,
});
} catch (e) {
console.error(
`Failed to check for update for local extension "${extension.name}". Could not load extension from source path: ${installMetadata.source}. Error: ${getErrorMessage(e)}`,
);
return ExtensionUpdateState.NOT_UPDATABLE;
}
if (!latestConfig) {
console.error(
`Failed to check for update for local extension "${extension.name}". Could not load extension from source path: ${installMetadata.source}`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.NOT_UPDATABLE;
}
if (newExtension.config.version !== extension.version) {
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
if (latestConfig.version !== extension.version) {
return ExtensionUpdateState.UPDATE_AVAILABLE;
}
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
return ExtensionUpdateState.UP_TO_DATE;
}
if (
!installMetadata ||
(installMetadata.type !== 'git' &&
installMetadata.type !== 'github-release')
) {
setExtensionUpdateState(ExtensionUpdateState.NOT_UPDATABLE);
return;
return ExtensionUpdateState.NOT_UPDATABLE;
}
try {
if (installMetadata.type === 'git') {
@@ -154,14 +175,12 @@ export async function checkForExtensionUpdate(
const remotes = await git.getRemotes(true);
if (remotes.length === 0) {
console.error('No git remotes found.');
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
const remoteUrl = remotes[0].refs.fetch;
if (!remoteUrl) {
console.error(`No fetch URL found for git remote ${remotes[0].name}.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
// Determine the ref to check on the remote.
@@ -171,8 +190,7 @@ export async function checkForExtensionUpdate(
if (typeof lsRemoteOutput !== 'string' || lsRemoteOutput.trim() === '') {
console.error(`Git ref ${refToCheck} not found.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
const remoteHash = lsRemoteOutput.split('\t')[0];
@@ -182,21 +200,17 @@ export async function checkForExtensionUpdate(
console.error(
`Unable to parse hash from git ls-remote output "${lsRemoteOutput}"`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
if (remoteHash === localHash) {
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
return ExtensionUpdateState.UP_TO_DATE;
}
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
return ExtensionUpdateState.UPDATE_AVAILABLE;
} else {
const { source, releaseTag } = installMetadata;
if (!source) {
console.error(`No "source" provided for extension.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
const { owner, repo } = parseGitHubRepoForReleases(source);
@@ -206,30 +220,28 @@ export async function checkForExtensionUpdate(
installMetadata.ref,
);
if (releaseData.tag_name !== releaseTag) {
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
return ExtensionUpdateState.UPDATE_AVAILABLE;
}
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
return ExtensionUpdateState.UP_TO_DATE;
}
} catch (error) {
console.error(
`Failed to check for updates for extension "${installMetadata.source}": ${getErrorMessage(error)}`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
}
export interface GitHubDownloadResult {
tagName: string;
type: 'git' | 'github-release';
}
export async function downloadFromGitHubRelease(
installMetadata: ExtensionInstallMetadata,
destination: string,
): Promise<GitHubDownloadResult> {
const { source, ref } = installMetadata;
const { owner, repo } = parseGitHubRepoForReleases(source);
const { source, ref, marketplace, type } = installMetadata;
const { owner, repo } = parseGitHubRepoForReleases(
type === 'marketplace' && marketplace
? marketplace.marketplaceSource
: source,
);
try {
const releaseData = await fetchReleaseFromGithub(owner, repo, ref);
@@ -276,28 +288,32 @@ export async function downloadFromGitHubRelease(
// For regular github releases, the repository is put inside of a top level
// directory. In this case we should see exactly two file in the destination
// dir, the archive and the directory. If we see that, validate that the
// dir has a qwen extension configuration file and then move all files
// from the directory up one level into the destination directory.
// dir has a qwen extension configuration file (or gemini-extension.json
// which will be converted later) and then move all files from the directory
// up one level into the destination directory.
const entries = await fs.promises.readdir(destination, {
withFileTypes: true,
});
if (entries.length === 2) {
const lonelyDir = entries.find((entry) => entry.isDirectory());
if (
lonelyDir &&
fs.existsSync(
if (lonelyDir) {
const hasQwenConfig = fs.existsSync(
path.join(destination, lonelyDir.name, EXTENSIONS_CONFIG_FILENAME),
)
) {
const dirPathToExtract = path.join(destination, lonelyDir.name);
const extractedDirFiles = await fs.promises.readdir(dirPathToExtract);
for (const file of extractedDirFiles) {
await fs.promises.rename(
path.join(dirPathToExtract, file),
path.join(destination, file),
);
);
const hasGeminiConfig = fs.existsSync(
path.join(destination, lonelyDir.name, 'gemini-extension.json'),
);
if (hasQwenConfig || hasGeminiConfig) {
const dirPathToExtract = path.join(destination, lonelyDir.name);
const extractedDirFiles = await fs.promises.readdir(dirPathToExtract);
for (const file of extractedDirFiles) {
await fs.promises.rename(
path.join(dirPathToExtract, file),
path.join(destination, file),
);
}
await fs.promises.rmdir(dirPathToExtract);
}
await fs.promises.rmdir(dirPathToExtract);
}
}
@@ -313,18 +329,6 @@ export async function downloadFromGitHubRelease(
}
}
interface GithubReleaseData {
assets: Asset[];
tag_name: string;
tarball_url?: string;
zipball_url?: string;
}
interface Asset {
name: string;
browser_download_url: string;
}
export function findReleaseAsset(assets: Asset[]): Asset | undefined {
const platform = os.platform();
const arch = os.arch();

View File

@@ -0,0 +1,3 @@
export * from './extensionManager.js';
export * from './variables.js';
export * from './github.js';

View File

@@ -0,0 +1,78 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import { parseMarketplaceSource } from './marketplace.js';
describe('Marketplace Installation', () => {
describe('parseMarketplaceSource', () => {
it('should parse valid marketplace source with http URL', () => {
const result = parseMarketplaceSource(
'http://example.com/marketplace:my-plugin',
);
expect(result).toEqual({
marketplaceSource: 'http://example.com/marketplace',
pluginName: 'my-plugin',
});
});
it('should parse valid marketplace source with https URL', () => {
const result = parseMarketplaceSource(
'https://github.com/example/marketplace:awesome-plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://github.com/example/marketplace',
pluginName: 'awesome-plugin',
});
});
it('should handle plugin names with hyphens', () => {
const result = parseMarketplaceSource(
'https://example.com:my-super-plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://example.com',
pluginName: 'my-super-plugin',
});
});
it('should handle URLs with ports', () => {
const result = parseMarketplaceSource(
'https://example.com:8080/marketplace:plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://example.com:8080/marketplace',
pluginName: 'plugin',
});
});
it('should return null for source without colon separator', () => {
const result = parseMarketplaceSource('https://example.com/plugin');
expect(result).toBeNull();
});
it('should return null for source without URL', () => {
const result = parseMarketplaceSource('not-a-url:plugin');
expect(result).toBeNull();
});
it('should return null for source with empty plugin name', () => {
const result = parseMarketplaceSource('https://example.com:');
expect(result).toBeNull();
});
it('should use last colon as separator', () => {
// URLs with ports have colons, should use the last one
const result = parseMarketplaceSource(
'https://example.com:8080:my-plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://example.com:8080',
pluginName: 'my-plugin',
});
});
});
});

View File

@@ -0,0 +1,259 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* This module handles installation of extensions from Claude marketplaces.
*
* A marketplace URL format: marketplace-url:plugin-name
* Example: https://github.com/example/marketplace:my-plugin
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import type { ExtensionConfig } from './extensionManager.js';
import {
convertClaudeToQwenConfig,
mergeClaudeConfigs,
type ClaudeMarketplaceConfig,
type ClaudeMarketplacePluginConfig,
type ClaudePluginConfig,
} from './claude-converter.js';
import { cloneFromGit, downloadFromGitHubRelease } from './github.js';
import type { ExtensionInstallMetadata } from '../config/config.js';
export interface MarketplaceInstallOptions {
marketplaceUrl: string;
pluginName: string;
tempDir: string;
requestConsent: (consent: string) => Promise<boolean>;
}
export interface MarketplaceInstallResult {
config: ExtensionConfig;
sourcePath: string;
installMetadata: ExtensionInstallMetadata;
}
/**
* Parse marketplace install source string.
* Format: marketplace-url:plugin-name
*/
export function parseMarketplaceSource(source: string): {
marketplaceSource: string;
pluginName: string;
} | null {
// Check if source contains a colon separator
const lastColonIndex = source.lastIndexOf(':');
if (lastColonIndex === -1) {
return null;
}
// Split at the last colon to separate URL from plugin name
const marketplaceSource = source.substring(0, lastColonIndex);
const pluginName = source.substring(lastColonIndex + 1);
// Validate that marketplace URL looks like a URL
if (
!marketplaceSource.startsWith('http://') &&
!marketplaceSource.startsWith('https://')
) {
return null;
}
if (!pluginName || pluginName.length === 0) {
return null;
}
return { marketplaceSource, pluginName };
}
/**
* Install an extension from a Claude marketplace.
*
* Process:
* 1. Download marketplace repository
* 2. Parse marketplace.json
* 3. Find the specified plugin
* 4. Download/copy plugin source
* 5. Merge configurations (if strict mode)
* 6. Convert to Qwen format
*/
export async function installFromMarketplace(
options: MarketplaceInstallOptions,
): Promise<MarketplaceInstallResult> {
const {
marketplaceUrl,
pluginName,
tempDir,
requestConsent: _requestConsent,
} = options;
// Step 1: Download marketplace repository
const marketplaceDir = path.join(tempDir, 'marketplace');
await fs.promises.mkdir(marketplaceDir, { recursive: true });
console.log(`Downloading marketplace from ${marketplaceUrl}...`);
const installMetadata: ExtensionInstallMetadata = {
source: marketplaceUrl,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, marketplaceDir);
} catch {
await cloneFromGit(installMetadata, marketplaceDir);
}
// Step 2: Parse marketplace.json
const marketplaceConfigPath = path.join(marketplaceDir, 'marketplace.json');
if (!fs.existsSync(marketplaceConfigPath)) {
throw new Error(
`Marketplace configuration not found at ${marketplaceConfigPath}`,
);
}
const marketplaceConfigContent = await fs.promises.readFile(
marketplaceConfigPath,
'utf-8',
);
const marketplaceConfig: ClaudeMarketplaceConfig = JSON.parse(
marketplaceConfigContent,
);
// Step 3: Find the plugin
const pluginConfig = marketplaceConfig.plugins.find(
(p) => p.name.toLowerCase() === pluginName.toLowerCase(),
);
if (!pluginConfig) {
throw new Error(
`Plugin "${pluginName}" not found in marketplace. Available plugins: ${marketplaceConfig.plugins.map((p) => p.name).join(', ')}`,
);
}
// Step 4: Download/copy plugin source
const pluginDir = path.join(tempDir, 'plugin');
await fs.promises.mkdir(pluginDir, { recursive: true });
const pluginSource = await resolvePluginSource(
pluginConfig,
marketplaceDir,
pluginDir,
);
// Step 5: Merge configurations (if strict mode)
let finalPluginConfig: ClaudePluginConfig;
const strict = pluginConfig.strict ?? true;
if (strict) {
// Read plugin.json from plugin source
const pluginJsonPath = path.join(
pluginSource,
'.claude-plugin',
'plugin.json',
);
if (!fs.existsSync(pluginJsonPath)) {
throw new Error(
`Strict mode requires plugin.json at ${pluginJsonPath}, but file not found`,
);
}
const pluginJsonContent = await fs.promises.readFile(
pluginJsonPath,
'utf-8',
);
const basePluginConfig: ClaudePluginConfig = JSON.parse(pluginJsonContent);
// Merge marketplace config with plugin config
finalPluginConfig = mergeClaudeConfigs(pluginConfig, basePluginConfig);
} else {
// Use marketplace config directly
finalPluginConfig = pluginConfig;
}
// Step 6: Convert to Qwen format
const qwenConfig = convertClaudeToQwenConfig(finalPluginConfig);
return {
config: qwenConfig,
sourcePath: pluginSource,
installMetadata: {
source: `${marketplaceUrl}:${pluginName}`,
type: 'git', // Marketplace installs are treated as git installs
},
};
}
/**
* Resolve plugin source from marketplace plugin configuration.
* Returns the absolute path to the plugin source directory.
*/
async function resolvePluginSource(
pluginConfig: ClaudeMarketplacePluginConfig,
marketplaceDir: string,
pluginDir: string,
): Promise<string> {
const source = pluginConfig.source;
// Handle string source (relative path or URL)
if (typeof source === 'string') {
// Check if it's a URL
if (source.startsWith('http://') || source.startsWith('https://')) {
// Download from URL
const installMetadata: ExtensionInstallMetadata = {
source,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, pluginDir);
} catch {
await cloneFromGit(installMetadata, pluginDir);
}
return pluginDir;
}
// Relative path within marketplace
const pluginRoot = marketplaceDir;
const sourcePath = path.join(pluginRoot, source);
if (!fs.existsSync(sourcePath)) {
throw new Error(`Plugin source not found at ${sourcePath}`);
}
// Copy to plugin directory
await fs.promises.cp(sourcePath, pluginDir, { recursive: true });
return pluginDir;
}
// Handle object source (github or url)
if (source.source === 'github') {
const installMetadata: ExtensionInstallMetadata = {
source: `https://github.com/${source.repo}`,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, pluginDir);
} catch {
await cloneFromGit(installMetadata, pluginDir);
}
return pluginDir;
}
if (source.source === 'url') {
const installMetadata: ExtensionInstallMetadata = {
source: source.url,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, pluginDir);
} catch {
await cloneFromGit(installMetadata, pluginDir);
}
return pluginDir;
}
throw new Error(`Unsupported plugin source type: ${JSON.stringify(source)}`);
}

View File

@@ -0,0 +1,110 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, expect, it } from 'vitest';
import { Override } from './override.js';
describe('Override', () => {
it('should create an override from input', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should create a disable override from input', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(true);
expect(override.includeSubdirs).toBe(false);
});
it('should create an override from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir');
expect(override.baseRule).toBe('/path/to/dir');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(false);
});
it('should create a disable override from a file rule', () => {
const override = Override.fromFileRule('!/path/to/dir/');
expect(override.isDisable).toBe(true);
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.includeSubdirs).toBe(false);
});
it('should create an override with subdirs from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir/*');
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should correctly identify conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', false);
expect(override1.conflictsWith(override2)).toBe(true);
});
it('should correctly identify non-conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/another/dir', true);
expect(override1.conflictsWith(override2)).toBe(false);
});
it('should correctly identify equal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(true);
});
it('should correctly identify unequal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('!/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(false);
});
it('should generate the correct regex', () => {
const override = Override.fromInput('/path/to/dir', true);
const regex = override.asRegex();
expect(regex.test('/path/to/dir/')).toBe(true);
expect(regex.test('/path/to/dir/subdir')).toBe(true);
expect(regex.test('/path/to/another/dir')).toBe(false);
});
it('should correctly identify child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify child overrides with glob', () => {
const parent = Override.fromInput('/path/to/dir/*', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify non-child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const other = Override.fromInput('/path/to/another/dir', false);
expect(other.isChildOf(parent)).toBe(false);
});
it('should generate the correct output string', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.output()).toBe(`/path/to/dir/*`);
});
it('should generate the correct output string for a disable override', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
it('should disable a path based on a disable override rule', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
});

View File

@@ -0,0 +1,102 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
export interface ExtensionEnablementConfig {
overrides: string[];
}
export interface AllExtensionsEnablementConfig {
[extensionName: string]: ExtensionEnablementConfig;
}
export class Override {
constructor(
public baseRule: string,
public isDisable: boolean,
public includeSubdirs: boolean,
) {}
static fromInput(inputRule: string, includeSubdirs: boolean): Override {
const isDisable = inputRule.startsWith('!');
let baseRule = isDisable ? inputRule.substring(1) : inputRule;
baseRule = ensureLeadingAndTrailingSlash(baseRule);
return new Override(baseRule, isDisable, includeSubdirs);
}
static fromFileRule(fileRule: string): Override {
const isDisable = fileRule.startsWith('!');
let baseRule = isDisable ? fileRule.substring(1) : fileRule;
const includeSubdirs = baseRule.endsWith('*');
baseRule = includeSubdirs
? baseRule.substring(0, baseRule.length - 1)
: baseRule;
return new Override(baseRule, isDisable, includeSubdirs);
}
conflictsWith(other: Override): boolean {
if (this.baseRule === other.baseRule) {
return (
this.includeSubdirs !== other.includeSubdirs ||
this.isDisable !== other.isDisable
);
}
return false;
}
isEqualTo(other: Override): boolean {
return (
this.baseRule === other.baseRule &&
this.includeSubdirs === other.includeSubdirs &&
this.isDisable === other.isDisable
);
}
asRegex(): RegExp {
return globToRegex(`${this.baseRule}${this.includeSubdirs ? '*' : ''}`);
}
isChildOf(parent: Override) {
if (!parent.includeSubdirs) {
return false;
}
return parent.asRegex().test(this.baseRule);
}
output(): string {
return `${this.isDisable ? '!' : ''}${this.baseRule}${this.includeSubdirs ? '*' : ''}`;
}
matchesPath(path: string) {
return this.asRegex().test(path);
}
}
const ensureLeadingAndTrailingSlash = function (dirPath: string): string {
// Normalize separators to forward slashes for consistent matching across platforms.
let result = dirPath.replace(/\\/g, '/');
if (result.charAt(0) !== '/') {
result = '/' + result;
}
if (result.charAt(result.length - 1) !== '/') {
result = result + '/';
}
return result;
};
/**
* Converts a glob pattern to a RegExp object.
* This is a simplified implementation that supports `*`.
*
* @param glob The glob pattern to convert.
* @returns A RegExp object.
*/
function globToRegex(glob: string): RegExp {
const regexString = glob
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex characters
.replace(/(\/?)\*/g, '($1.*)?'); // Convert * to optional group
return new RegExp(`^${regexString}$`);
}

View File

@@ -0,0 +1,138 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import { parseEnvFile, generateEnvFile, validateSettings } from './settings.js';
import type { ExtensionSetting } from './extensionManager.js';
describe('Extension Settings', () => {
describe('parseEnvFile', () => {
it('should parse simple KEY=VALUE pairs', () => {
const content = 'API_KEY=abc123\nSERVER_URL=http://example.com';
const result = parseEnvFile(content);
expect(result).toEqual({
API_KEY: 'abc123',
SERVER_URL: 'http://example.com',
});
});
it('should skip empty lines and comments', () => {
const content = `
# This is a comment
API_KEY=secret
# Another comment
DEBUG=true
`;
const result = parseEnvFile(content);
expect(result).toEqual({
API_KEY: 'secret',
DEBUG: 'true',
});
});
it('should handle quoted values', () => {
const content = `API_KEY="my secret key"\nPATH='/usr/local/bin'`;
const result = parseEnvFile(content);
expect(result).toEqual({
API_KEY: 'my secret key',
PATH: '/usr/local/bin',
});
});
it('should ignore invalid lines', () => {
const content = 'VALID=value\nINVALID LINE\nANOTHER=valid';
const result = parseEnvFile(content);
expect(result).toEqual({
VALID: 'value',
ANOTHER: 'valid',
});
});
});
describe('generateEnvFile', () => {
it('should generate properly formatted .env content', () => {
const settings = {
API_KEY: 'secret123',
DEBUG: 'true',
};
const result = generateEnvFile(settings);
expect(result).toContain('API_KEY=secret123');
expect(result).toContain('DEBUG=true');
expect(result).toContain('# Extension Settings');
});
it('should quote values with spaces', () => {
const settings = {
MESSAGE: 'Hello World',
PATH: '/usr/local/bin',
};
const result = generateEnvFile(settings);
expect(result).toContain('MESSAGE="Hello World"');
expect(result).toContain('PATH=/usr/local/bin');
});
});
describe('validateSettings', () => {
it('should pass validation for valid string settings', () => {
const settingsConfig: ExtensionSetting[] = [
{
name: 'API Key',
description: 'Your API key for the service',
envVar: 'API_KEY',
},
];
const settings = { API_KEY: 'my-key' };
const errors = validateSettings(settings, settingsConfig);
expect(errors).toHaveLength(0);
});
it('should fail validation for non-string values', () => {
const settingsConfig: ExtensionSetting[] = [
{
name: 'API Key',
description: 'Your API key for the service',
envVar: 'API_KEY',
},
];
// In TypeScript, this would be caught at compile time,
// but at runtime we check the type
const settings = { API_KEY: 123 as unknown as string };
const errors = validateSettings(settings, settingsConfig);
expect(errors).toHaveLength(1);
expect(errors[0]).toContain('API Key');
expect(errors[0]).toContain('string');
});
it('should allow undefined/missing settings (all settings are optional)', () => {
const settingsConfig: ExtensionSetting[] = [
{
name: 'Optional Setting',
description: 'An optional setting',
envVar: 'OPTIONAL_VAR',
},
];
const settings = {};
const errors = validateSettings(settings, settingsConfig);
expect(errors).toHaveLength(0);
});
it('should validate sensitive settings the same way', () => {
const settingsConfig: ExtensionSetting[] = [
{
name: 'Secret Key',
description: 'Your secret key',
envVar: 'SECRET_KEY',
sensitive: true,
},
];
const validSettings = { SECRET_KEY: 'super-secret' };
expect(validateSettings(validSettings, settingsConfig)).toHaveLength(0);
});
});
});

View File

@@ -0,0 +1,149 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* This module handles extension settings management.
* Settings are stored in .env files within extension directories.
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import type { ExtensionSetting } from './extensionManager.js';
/**
* Parse .env file content into key-value pairs.
* Simple parser that handles:
* - KEY=VALUE format
* - Comments starting with #
* - Empty lines
*/
export function parseEnvFile(content: string): Record<string, string> {
const result: Record<string, string> = {};
const lines = content.split('\n');
for (const line of lines) {
const trimmed = line.trim();
// Skip empty lines and comments
if (!trimmed || trimmed.startsWith('#')) {
continue;
}
// Parse KEY=VALUE
const equalIndex = trimmed.indexOf('=');
if (equalIndex === -1) {
continue; // Invalid line, skip
}
const key = trimmed.substring(0, equalIndex).trim();
const value = trimmed.substring(equalIndex + 1).trim();
// Remove quotes if present
let cleanValue = value;
if (
(value.startsWith('"') && value.endsWith('"')) ||
(value.startsWith("'") && value.endsWith("'"))
) {
cleanValue = value.substring(1, value.length - 1);
}
result[key] = cleanValue;
}
return result;
}
/**
* Generate .env file content from key-value pairs.
*/
export function generateEnvFile(settings: Record<string, string>): string {
const lines: string[] = [];
lines.push('# Extension Settings');
lines.push('# Generated by Qwen Code');
lines.push('');
for (const [key, value] of Object.entries(settings)) {
// Quote values that contain spaces
const quotedValue = value.includes(' ') ? `"${value}"` : value;
lines.push(`${key}=${quotedValue}`);
}
return lines.join('\n') + '\n';
}
/**
* Load settings from extension .env file.
*/
export async function loadExtensionSettings(
extensionPath: string,
): Promise<Record<string, string>> {
const envPath = path.join(extensionPath, '.env');
try {
const content = await fs.promises.readFile(envPath, 'utf-8');
return parseEnvFile(content);
} catch (error) {
// If .env file doesn't exist, return empty object
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
return {};
}
throw error;
}
}
/**
* Save settings to extension .env file.
*/
export async function saveExtensionSettings(
extensionPath: string,
settings: Record<string, string>,
): Promise<void> {
const envPath = path.join(extensionPath, '.env');
const content = generateEnvFile(settings);
await fs.promises.writeFile(envPath, content, 'utf-8');
}
/**
* Validate settings against configuration.
* Returns array of validation errors (empty if valid).
*
* Note: This validates that environment variables are properly set.
* In Gemini Extension format, all settings are treated as strings.
*/
export function validateSettings(
settings: Record<string, string>,
settingsConfig: ExtensionSetting[],
): string[] {
const errors: string[] = [];
for (const config of settingsConfig) {
const value = settings[config.envVar];
// Basic validation - check if value exists and is not empty
// Note: All settings are optional in Gemini Extension format
if (value !== undefined && typeof value !== 'string') {
errors.push(
`Setting "${config.name}" (${config.envVar}) must be a string`,
);
}
}
return errors;
}
/**
* Merge extension settings into process environment.
* This allows MCP servers and other extension components to access settings.
*/
export function applySettingsToEnv(settings: Record<string, string>): void {
for (const [key, value] of Object.entries(settings)) {
// Only set if not already defined in process.env
if (process.env[key] === undefined) {
process.env[key] = value;
}
}
}

View File

@@ -0,0 +1,101 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { ExtensionStorage } from './storage.js';
import * as os from 'node:os';
import * as path from 'node:path';
import * as fs from 'node:fs';
import {
EXTENSION_SETTINGS_FILENAME,
EXTENSIONS_CONFIG_FILENAME,
} from './variables.js';
import { Storage } from '../config/storage.js';
vi.mock('node:os');
vi.mock('node:fs', async (importOriginal) => {
const actual = await importOriginal<typeof fs>();
return {
...actual,
promises: {
...actual.promises,
mkdtemp: vi.fn(),
},
};
});
vi.mock('@google/gemini-cli-core');
describe('ExtensionStorage', () => {
const mockHomeDir = '/mock/home';
const extensionName = 'test-extension';
let storage: ExtensionStorage;
beforeEach(() => {
vi.mocked(os.homedir).mockReturnValue(mockHomeDir);
vi.mocked(Storage).mockImplementation(
() =>
({
getExtensionsDir: () =>
path.join(mockHomeDir, '.gemini', 'extensions'),
}) as any, // eslint-disable-line @typescript-eslint/no-explicit-any
);
storage = new ExtensionStorage(extensionName);
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should return the correct extension directory', () => {
const expectedDir = path.join(
mockHomeDir,
'.gemini',
'extensions',
extensionName,
);
expect(storage.getExtensionDir()).toBe(expectedDir);
});
it('should return the correct config path', () => {
const expectedPath = path.join(
mockHomeDir,
'.gemini',
'extensions',
extensionName,
EXTENSIONS_CONFIG_FILENAME, // EXTENSIONS_CONFIG_FILENAME
);
expect(storage.getConfigPath()).toBe(expectedPath);
});
it('should return the correct env file path', () => {
const expectedPath = path.join(
mockHomeDir,
'.gemini',
'extensions',
extensionName,
EXTENSION_SETTINGS_FILENAME, // EXTENSION_SETTINGS_FILENAME
);
expect(storage.getEnvFilePath()).toBe(expectedPath);
});
it('should return the correct user extensions directory', () => {
const expectedDir = path.join(mockHomeDir, '.gemini', 'extensions');
expect(ExtensionStorage.getUserExtensionsDir()).toBe(expectedDir);
});
it('should create a temporary directory', async () => {
const mockTmpDir = '/tmp/gemini-extension-123';
vi.mocked(fs.promises.mkdtemp).mockResolvedValue(mockTmpDir);
vi.mocked(os.tmpdir).mockReturnValue('/tmp');
const result = await ExtensionStorage.createTmpDir();
expect(fs.promises.mkdtemp).toHaveBeenCalledWith(
path.join('/tmp', 'gemini-extension'),
);
expect(result).toBe(mockTmpDir);
});
});

View File

@@ -0,0 +1,40 @@
import { Storage } from '../config/storage.js';
import path from 'node:path';
import * as os from 'node:os';
import {
EXTENSION_SETTINGS_FILENAME,
EXTENSIONS_CONFIG_FILENAME,
} from './variables.js';
import * as fs from 'node:fs';
export class ExtensionStorage {
private readonly extensionName: string;
constructor(extensionName: string) {
this.extensionName = extensionName;
}
getExtensionDir(): string {
return path.join(
ExtensionStorage.getUserExtensionsDir(),
this.extensionName,
);
}
getConfigPath(): string {
return path.join(this.getExtensionDir(), EXTENSIONS_CONFIG_FILENAME);
}
getEnvFilePath(): string {
return path.join(this.getExtensionDir(), EXTENSION_SETTINGS_FILENAME);
}
static getUserExtensionsDir(): string {
const storage = new Storage(os.homedir());
return storage.getExtensionsDir();
}
static async createTmpDir(): Promise<string> {
return await fs.promises.mkdtemp(path.join(os.tmpdir(), 'qwen-extension'));
}
}

View File

@@ -17,7 +17,7 @@ export interface VariableSchema {
export interface LoadExtensionContext {
extensionDir: string;
workspaceDir: string;
workspaceDir?: string;
}
const PATH_SEPARATOR_DEFINITION = {
@@ -30,6 +30,10 @@ export const VARIABLE_SCHEMA = {
type: 'string',
description: 'The path of the extension in the filesystem.',
},
CLAUDE_PLUGIN_ROOT: {
type: 'string',
description: 'The path of the extension in the filesystem.',
},
workspacePath: {
type: 'string',
description: 'The absolute path of the current workspace.',

View File

@@ -5,6 +5,13 @@
*/
import { type VariableSchema, VARIABLE_SCHEMA } from './variableSchema.js';
import path from 'node:path';
import { QWEN_DIR } from '../config/storage.js';
export const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
export const EXTENSIONS_CONFIG_FILENAME = 'qwen-extension.json';
export const INSTALL_METADATA_FILENAME = '.qwen-extension-install.json';
export const EXTENSION_SETTINGS_FILENAME = '.env';
export type JsonObject = { [key: string]: JsonValue };
export type JsonArray = JsonValue[];

View File

@@ -53,6 +53,8 @@ export * from './utils/subagentGenerator.js';
export * from './utils/projectSummary.js';
export * from './utils/promptIdContext.js';
export * from './utils/thoughtUtils.js';
export * from './utils/toml-to-markdown-converter.js';
export * from './utils/yaml-parser.js';
// Export services
export * from './services/fileDiscoveryService.js';
@@ -83,6 +85,9 @@ export * from './subagents/index.js';
// Export skills
export * from './skills/index.js';
// Export extension
export * from './extension/index.js';
// Export prompt logic
export * from './prompts/mcp-prompts.js';
@@ -115,6 +120,7 @@ export type {
OAuthCredentials,
} from './mcp/token-storage/types.js';
export { MCPOAuthTokenStorage } from './mcp/oauth-token-storage.js';
export { KeychainTokenStorage } from './mcp/token-storage/keychain-token-storage.js';
export type { MCPOAuthConfig } from './mcp/oauth-provider.js';
export type {
OAuthAuthorizationServerMetadata,

Some files were not shown because too many files have changed in this diff Show More