mirror of
https://github.com/QwenLM/qwen-code.git
synced 2026-01-15 21:39:13 +00:00
Compare commits
54 Commits
feature/re
...
feat/exten
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f00f76456c | ||
|
|
4c7605d900 | ||
|
|
b37ede07e8 | ||
|
|
0a88dd7861 | ||
|
|
70991e474f | ||
|
|
551e546974 | ||
|
|
117af05122 | ||
|
|
557e6397bb | ||
|
|
f762a62a2e | ||
|
|
ca12772a28 | ||
|
|
cec4b831b6 | ||
|
|
74bf72877d | ||
|
|
b60ae42d10 | ||
|
|
54fd4c22a9 | ||
|
|
f3b7c63cd1 | ||
|
|
e4dee3a2b2 | ||
|
|
a8e3b9ebe7 | ||
|
|
5cfc9f4686 | ||
|
|
85473210e5 | ||
|
|
c0c94bd4fc | ||
|
|
8111511a89 | ||
|
|
a8eb858f99 | ||
|
|
c845049d26 | ||
|
|
299b7de030 | ||
|
|
adb53a6dc6 | ||
|
|
6917031128 | ||
|
|
b33525183f | ||
|
|
ec8cccafd7 | ||
|
|
b0e561ca73 | ||
|
|
563d68ad5b | ||
|
|
82c524f87d | ||
|
|
df75aa06b6 | ||
|
|
8ea9871d23 | ||
|
|
2d1934bf2f | ||
|
|
1b7418f91f | ||
|
|
b7828ac765 | ||
|
|
0bd17a2406 | ||
|
|
59be5163fd | ||
|
|
95efe89ac0 | ||
|
|
d86903ced5 | ||
|
|
a47bdc0b06 | ||
|
|
0e769e100b | ||
|
|
b5bcc07223 | ||
|
|
9653dc90d5 | ||
|
|
74013bd8b2 | ||
|
|
052337861b | ||
|
|
f8aecb2631 | ||
|
|
18713ef2b0 | ||
|
|
50dac93c80 | ||
|
|
22504b0a5b | ||
|
|
361492247e | ||
|
|
824ca056a4 | ||
|
|
4f664d00ac | ||
|
|
7fdebe8fe6 |
@@ -25,7 +25,7 @@ Qwen Code is an open-source AI agent for the terminal, optimized for [Qwen3-Code
|
||||
- **OpenAI-compatible, OAuth free tier**: use an OpenAI-compatible API, or sign in with Qwen OAuth to get 2,000 free requests/day.
|
||||
- **Open-source, co-evolving**: both the framework and the Qwen3-Coder model are open-source—and they ship and evolve together.
|
||||
- **Agentic workflow, feature-rich**: rich built-in tools (Skills, SubAgents, Plan Mode) for a full agentic workflow and a Claude Code-like experience.
|
||||
- **Terminal-first, IDE-friendly**: built for developers who live in the command line, with optional integration for VS Code and Zed.
|
||||
- **Terminal-first, IDE-friendly**: built for developers who live in the command line, with optional integration for VS Code, Zed, and JetBrains IDEs.
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -137,10 +137,11 @@ Use `-p` to run Qwen Code without the interactive UI—ideal for scripts, automa
|
||||
|
||||
#### IDE integration
|
||||
|
||||
Use Qwen Code inside your editor (VS Code and Zed):
|
||||
Use Qwen Code inside your editor (VS Code, Zed, and JetBrains IDEs):
|
||||
|
||||
- [Use in VS Code](https://qwenlm.github.io/qwen-code-docs/en/users/integration-vscode/)
|
||||
- [Use in Zed](https://qwenlm.github.io/qwen-code-docs/en/users/integration-zed/)
|
||||
- [Use in JetBrains IDEs](https://qwenlm.github.io/qwen-code-docs/en/users/integration-jetbrains/)
|
||||
|
||||
#### TypeScript SDK
|
||||
|
||||
|
||||
@@ -10,4 +10,5 @@ export default {
|
||||
'web-search': 'Web Search',
|
||||
memory: 'Memory',
|
||||
'mcp-server': 'MCP Servers',
|
||||
sandbox: 'Sandboxing',
|
||||
};
|
||||
|
||||
90
docs/developers/tools/sandbox.md
Normal file
90
docs/developers/tools/sandbox.md
Normal file
@@ -0,0 +1,90 @@
|
||||
## Customizing the sandbox environment (Docker/Podman)
|
||||
|
||||
### Currently, the project does not support the use of the BUILD_SANDBOX function after installation through the npm package
|
||||
|
||||
1. To build a custom sandbox, you need to access the build scripts (scripts/build_sandbox.js) in the source code repository.
|
||||
2. These build scripts are not included in the packages released by npm.
|
||||
3. The code contains hard-coded path checks that explicitly reject build requests from non-source code environments.
|
||||
|
||||
If you need extra tools inside the container (e.g., `git`, `python`, `rg`), create a custom Dockerfile, The specific operation is as follows
|
||||
|
||||
#### 1、Clone qwen code project first, https://github.com/QwenLM/qwen-code.git
|
||||
|
||||
#### 2、Make sure you perform the following operation in the source code repository directory
|
||||
|
||||
```bash
|
||||
# 1. First, install the dependencies of the project
|
||||
npm install
|
||||
|
||||
# 2. Build the Qwen Code project
|
||||
npm run build
|
||||
|
||||
# 3. Verify that the dist directory has been generated
|
||||
ls -la packages/cli/dist/
|
||||
|
||||
# 4. Create a global link in the CLI package directory
|
||||
cd packages/cli
|
||||
npm link
|
||||
|
||||
# 5. Verification link (it should now point to the source code)
|
||||
which qwen
|
||||
# Expected output: /xxx/xxx/.nvm/versions/node/v24.11.1/bin/qwen
|
||||
# Or similar paths, but it should be a symbolic link
|
||||
|
||||
# 6. For details of the symbolic link, you can see the specific source code path
|
||||
ls -la $(dirname $(which qwen))/../lib/node_modules/@qwen-code/qwen-code
|
||||
# It should show that this is a symbolic link pointing to your source code directory
|
||||
|
||||
# 7.Test the version of qwen
|
||||
qwen -v
|
||||
# npm link will overwrite the global qwen. To avoid being unable to distinguish the same version number, you can uninstall the global CLI first
|
||||
```
|
||||
|
||||
#### 3、Create your sandbox Dockerfile under the root directory of your own project
|
||||
|
||||
- Path: `.qwen/sandbox.Dockerfile`
|
||||
|
||||
- Official mirror image address:https://github.com/QwenLM/qwen-code/pkgs/container/qwen-code
|
||||
|
||||
```bash
|
||||
# Based on the official Qwen sandbox image (It is recommended to explicitly specify the version)
|
||||
FROM ghcr.io/qwenlm/qwen-code:sha-570ec43
|
||||
# Add your extra tools here
|
||||
RUN apt-get update && apt-get install -y \
|
||||
git \
|
||||
python3 \
|
||||
ripgrep
|
||||
```
|
||||
|
||||
#### 4、Create the first sandbox image under the root directory of your project
|
||||
|
||||
```bash
|
||||
GEMINI_SANDBOX=docker BUILD_SANDBOX=1 qwen -s
|
||||
# Observe whether the sandbox version of the tool you launched is consistent with the version of your custom image. If they are consistent, the startup will be successful
|
||||
```
|
||||
|
||||
This builds a project-specific image based on the default sandbox image.
|
||||
|
||||
#### Remove npm link
|
||||
|
||||
- If you want to restore the official CLI of qwen, please remove the npm link
|
||||
|
||||
```bash
|
||||
# Method 1: Unlink globally
|
||||
npm unlink -g @qwen-code/qwen-code
|
||||
|
||||
# Method 2: Remove it in the packages/cli directory
|
||||
cd packages/cli
|
||||
npm unlink
|
||||
|
||||
# Verification has been lifted
|
||||
which qwen
|
||||
# It should display "qwen not found"
|
||||
|
||||
# Reinstall the global version if necessary
|
||||
npm install -g @qwen-code/qwen-code
|
||||
|
||||
# Verification Recovery
|
||||
which qwen
|
||||
qwen --version
|
||||
```
|
||||
@@ -12,6 +12,7 @@ export default {
|
||||
},
|
||||
'integration-vscode': 'Visual Studio Code',
|
||||
'integration-zed': 'Zed IDE',
|
||||
'integration-jetbrains': 'JetBrains IDEs',
|
||||
'integration-github-action': 'Github Actions',
|
||||
'Code with Qwen Code': {
|
||||
type: 'separator',
|
||||
|
||||
@@ -104,7 +104,7 @@ Settings are organized into categories. All settings should be placed within the
|
||||
| `model.name` | string | The Qwen model to use for conversations. | `undefined` |
|
||||
| `model.maxSessionTurns` | number | Maximum number of user/model/tool turns to keep in a session. -1 means unlimited. | `-1` |
|
||||
| `model.summarizeToolOutput` | object | Enables or disables the summarization of tool output. You can specify the token budget for the summarization using the `tokenBudget` setting. Note: Currently only the `run_shell_command` tool is supported. For example `{"run_shell_command": {"tokenBudget": 2000}}` | `undefined` |
|
||||
| `model.generationConfig` | object | Advanced overrides passed to the underlying content generator. Supports request controls such as `timeout`, `maxRetries`, and `disableCacheControl`, along with fine-tuning knobs under `samplingParams` (for example `temperature`, `top_p`, `max_tokens`). Leave unset to rely on provider defaults. | `undefined` |
|
||||
| `model.generationConfig` | object | Advanced overrides passed to the underlying content generator. Supports request controls such as `timeout`, `maxRetries`, `disableCacheControl`, and `customHeaders` (custom HTTP headers for API requests), along with fine-tuning knobs under `samplingParams` (for example `temperature`, `top_p`, `max_tokens`). Leave unset to rely on provider defaults. | `undefined` |
|
||||
| `model.chatCompression.contextPercentageThreshold` | number | Sets the threshold for chat history compression as a percentage of the model's total token limit. This is a value between 0 and 1 that applies to both automatic compression and the manual `/compress` command. For example, a value of `0.6` will trigger compression when the chat history exceeds 60% of the token limit. Use `0` to disable compression entirely. | `0.7` |
|
||||
| `model.skipNextSpeakerCheck` | boolean | Skip the next speaker check. | `false` |
|
||||
| `model.skipLoopDetection` | boolean | Disables loop detection checks. Loop detection prevents infinite loops in AI responses but can generate false positives that interrupt legitimate workflows. Enable this option if you experience frequent false positive loop detection interruptions. | `false` |
|
||||
@@ -114,12 +114,16 @@ Settings are organized into categories. All settings should be placed within the
|
||||
|
||||
**Example model.generationConfig:**
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"model": {
|
||||
"generationConfig": {
|
||||
"timeout": 60000,
|
||||
"disableCacheControl": false,
|
||||
"customHeaders": {
|
||||
"X-Request-ID": "req-123",
|
||||
"X-User-ID": "user-456"
|
||||
},
|
||||
"samplingParams": {
|
||||
"temperature": 0.2,
|
||||
"top_p": 0.8,
|
||||
@@ -130,6 +134,8 @@ Settings are organized into categories. All settings should be placed within the
|
||||
}
|
||||
```
|
||||
|
||||
The `customHeaders` field allows you to add custom HTTP headers to all API requests. This is useful for request tracing, monitoring, API gateway routing, or when different models require different headers. If `customHeaders` is defined in `modelProviders[].generationConfig.customHeaders`, it will be used directly; otherwise, headers from `model.generationConfig.customHeaders` will be used. No merging occurs between the two levels.
|
||||
|
||||
**model.openAILoggingDir examples:**
|
||||
|
||||
- `"~/qwen-logs"` - Logs to `~/qwen-logs` directory
|
||||
@@ -154,6 +160,10 @@ Use `modelProviders` to declare curated model lists per auth type that the `/mod
|
||||
"generationConfig": {
|
||||
"timeout": 60000,
|
||||
"maxRetries": 3,
|
||||
"customHeaders": {
|
||||
"X-Model-Version": "v1.0",
|
||||
"X-Request-Priority": "high"
|
||||
},
|
||||
"samplingParams": { "temperature": 0.2 }
|
||||
}
|
||||
}
|
||||
@@ -215,7 +225,7 @@ Per-field precedence for `generationConfig`:
|
||||
3. `settings.model.generationConfig`
|
||||
4. Content-generator defaults (`getDefaultGenerationConfig` for OpenAI, `getParameterValue` for Gemini, etc.)
|
||||
|
||||
`samplingParams` is treated atomically; provider values replace the entire object. Defaults from the content generator apply last so each provider retains its tuned baseline.
|
||||
`samplingParams` and `customHeaders` are both treated atomically; provider values replace the entire object. If `modelProviders[].generationConfig` defines these fields, they are used directly; otherwise, values from `model.generationConfig` are used. No merging occurs between provider and global configuration levels. Defaults from the content generator apply last so each provider retains its tuned baseline.
|
||||
|
||||
##### Selection persistence and recommendations
|
||||
|
||||
|
||||
@@ -59,6 +59,7 @@ Commands for managing AI tools and models.
|
||||
| ---------------- | --------------------------------------------- | --------------------------------------------- |
|
||||
| `/mcp` | List configured MCP servers and tools | `/mcp`, `/mcp desc` |
|
||||
| `/tools` | Display currently available tool list | `/tools`, `/tools desc` |
|
||||
| `/skills` | List and run available skills (experimental) | `/skills`, `/skills <name>` |
|
||||
| `/approval-mode` | Change approval mode for tool usage | `/approval-mode <mode (auto-edit)> --project` |
|
||||
| →`plan` | Analysis only, no execution | Secure review |
|
||||
| →`default` | Require approval for edits | Daily use |
|
||||
|
||||
@@ -166,15 +166,6 @@ export SANDBOX_SET_UID_GID=true # Force host UID/GID
|
||||
export SANDBOX_SET_UID_GID=false # Disable UID/GID mapping
|
||||
```
|
||||
|
||||
## Customizing the sandbox environment (Docker/Podman)
|
||||
|
||||
If you need extra tools inside the container (e.g., `git`, `python`, `rg`), create a custom Dockerfile:
|
||||
|
||||
- Path: `.qwen/sandbox.Dockerfile`
|
||||
- Then run with: `BUILD_SANDBOX=1 qwen -s ...`
|
||||
|
||||
This builds a project-specific image based on the default sandbox image.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common issues
|
||||
|
||||
@@ -27,6 +27,14 @@ Agent Skills package expertise into discoverable capabilities. Each Skill consis
|
||||
|
||||
Skills are **model-invoked** — the model autonomously decides when to use them based on your request and the Skill’s description. This is different from slash commands, which are **user-invoked** (you explicitly type `/command`).
|
||||
|
||||
If you want to invoke a Skill explicitly, use the `/skills` slash command:
|
||||
|
||||
```bash
|
||||
/skills <skill-name>
|
||||
```
|
||||
|
||||
The `/skills` command is only available when you run with `--experimental-skills`. Use autocomplete to browse available Skills and descriptions.
|
||||
|
||||
### Benefits
|
||||
|
||||
- Extend Qwen Code for your workflows
|
||||
|
||||
BIN
docs/users/images/jetbrains-acp.png
Normal file
BIN
docs/users/images/jetbrains-acp.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 36 KiB |
57
docs/users/integration-jetbrains.md
Normal file
57
docs/users/integration-jetbrains.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# JetBrains IDEs
|
||||
|
||||
> JetBrains IDEs provide native support for AI coding assistants through the Agent Control Protocol (ACP). This integration allows you to use Qwen Code directly within your JetBrains IDE with real-time code suggestions.
|
||||
|
||||
### Features
|
||||
|
||||
- **Native agent experience**: Integrated AI assistant panel within your JetBrains IDE
|
||||
- **Agent Control Protocol**: Full support for ACP enabling advanced IDE interactions
|
||||
- **Symbol management**: #-mention files to add them to the conversation context
|
||||
- **Conversation history**: Access to past conversations within the IDE
|
||||
|
||||
### Requirements
|
||||
|
||||
- JetBrains IDE with ACP support (IntelliJ IDEA, WebStorm, PyCharm, etc.)
|
||||
- Qwen Code CLI installed
|
||||
|
||||
### Installation
|
||||
|
||||
1. Install Qwen Code CLI:
|
||||
|
||||
```bash
|
||||
npm install -g @qwen-code/qwen-code
|
||||
```
|
||||
|
||||
2. Open your JetBrains IDE and navigate to AI Chat tool window.
|
||||
|
||||
3. Click the 3-dot menu in the upper-right corner and select **Configure ACP Agent** and configure Qwen Code with the following settings:
|
||||
|
||||
```json
|
||||
{
|
||||
"agent_servers": {
|
||||
"qwen": {
|
||||
"command": "/path/to/qwen",
|
||||
"args": ["--acp"],
|
||||
"env": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. The Qwen Code agent should now be available in the AI Assistant panel
|
||||
|
||||

|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Agent not appearing
|
||||
|
||||
- Run `qwen --version` in terminal to verify installation
|
||||
- Ensure your JetBrains IDE version supports ACP
|
||||
- Restart your JetBrains IDE
|
||||
|
||||
### Qwen Code not responding
|
||||
|
||||
- Check your internet connection
|
||||
- Verify CLI works by running `qwen` in terminal
|
||||
- [File an issue on GitHub](https://github.com/qwenlm/qwen-code/issues) if the problem persists
|
||||
@@ -159,7 +159,7 @@ Qwen Code will:
|
||||
|
||||
### Test out other common workflows
|
||||
|
||||
There are a number of ways to work with Claude:
|
||||
There are a number of ways to work with Qwen Code:
|
||||
|
||||
**Refactor code**
|
||||
|
||||
|
||||
48
package-lock.json
generated
48
package-lock.json
generated
@@ -3875,6 +3875,17 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/prompts": {
|
||||
"version": "2.4.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/prompts/-/prompts-2.4.9.tgz",
|
||||
"integrity": "sha512-qTxFi6Buiu8+50/+3DGIWLHM6QuWsEKugJnnP6iv2Mc4ncxE4A/OJkjuVOA+5X0X1S/nq5VJRa8Lu+nwcvbrKA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"kleur": "^3.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/prop-types": {
|
||||
"version": "15.7.15",
|
||||
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz",
|
||||
@@ -6216,10 +6227,7 @@
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz",
|
||||
"integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"readdirp": "^4.0.1"
|
||||
},
|
||||
@@ -10984,6 +10992,15 @@
|
||||
"json-buffer": "3.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/kleur": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz",
|
||||
"integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/ky": {
|
||||
"version": "1.8.1",
|
||||
"resolved": "https://registry.npmjs.org/ky/-/ky-1.8.1.tgz",
|
||||
@@ -13393,6 +13410,19 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/prompts": {
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz",
|
||||
"integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"kleur": "^3.0.3",
|
||||
"sisteransi": "^1.0.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/prop-types": {
|
||||
"version": "15.8.1",
|
||||
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz",
|
||||
@@ -13882,10 +13912,7 @@
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz",
|
||||
"integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">= 14.18.0"
|
||||
},
|
||||
@@ -14753,6 +14780,12 @@
|
||||
"url": "https://github.com/steveukx/git-js?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/sisteransi": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
|
||||
"integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/slash": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz",
|
||||
@@ -17338,6 +17371,7 @@
|
||||
"ink-spinner": "^5.0.0",
|
||||
"lowlight": "^3.3.0",
|
||||
"open": "^10.1.2",
|
||||
"prompts": "^2.4.2",
|
||||
"qrcode-terminal": "^0.12.0",
|
||||
"react": "^19.1.0",
|
||||
"read-package-up": "^11.0.0",
|
||||
@@ -17366,6 +17400,7 @@
|
||||
"@types/diff": "^7.0.2",
|
||||
"@types/dotenv": "^6.1.1",
|
||||
"@types/node": "^20.11.24",
|
||||
"@types/prompts": "^2.4.9",
|
||||
"@types/react": "^19.1.8",
|
||||
"@types/react-dom": "^19.1.6",
|
||||
"@types/semver": "^7.7.0",
|
||||
@@ -17974,6 +18009,7 @@
|
||||
"ajv-formats": "^3.0.0",
|
||||
"async-mutex": "^0.5.0",
|
||||
"chardet": "^2.1.0",
|
||||
"chokidar": "^4.0.3",
|
||||
"diff": "^7.0.0",
|
||||
"dotenv": "^17.1.0",
|
||||
"fast-levenshtein": "^2.0.6",
|
||||
|
||||
@@ -23,8 +23,6 @@
|
||||
"build-and-start": "npm run build && npm run start",
|
||||
"build:vscode": "node scripts/build_vscode_companion.js",
|
||||
"build:all": "npm run build && npm run build:sandbox && npm run build:vscode",
|
||||
"build:native": "node scripts/build_native.js",
|
||||
"build:native:all": "node scripts/build_native.js --all",
|
||||
"build:packages": "npm run build --workspaces",
|
||||
"build:sandbox": "node scripts/build_sandbox.js",
|
||||
"bundle": "npm run generate && node esbuild.config.js && node scripts/copy_bundle_assets.js",
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
"comment-json": "^4.2.5",
|
||||
"diff": "^7.0.0",
|
||||
"dotenv": "^17.1.0",
|
||||
"prompts": "^2.4.2",
|
||||
"fzf": "^0.5.2",
|
||||
"glob": "^10.5.0",
|
||||
"highlight.js": "^11.11.1",
|
||||
@@ -79,6 +80,7 @@
|
||||
"@types/command-exists": "^1.2.3",
|
||||
"@types/diff": "^7.0.2",
|
||||
"@types/dotenv": "^6.1.1",
|
||||
"@types/prompts": "^2.4.9",
|
||||
"@types/node": "^20.11.24",
|
||||
"@types/react": "^19.1.8",
|
||||
"@types/react-dom": "^19.1.6",
|
||||
|
||||
@@ -27,10 +27,8 @@ import { Readable, Writable } from 'node:stream';
|
||||
import type { LoadedSettings } from '../config/settings.js';
|
||||
import { SettingScope } from '../config/settings.js';
|
||||
import { z } from 'zod';
|
||||
import { ExtensionStorage, type Extension } from '../config/extension.js';
|
||||
import type { CliArgs } from '../config/config.js';
|
||||
import { loadCliConfig } from '../config/config.js';
|
||||
import { ExtensionEnablementManager } from '../config/extensions/extensionEnablement.js';
|
||||
|
||||
// Import the modular Session class
|
||||
import { Session } from './session/Session.js';
|
||||
@@ -38,7 +36,6 @@ import { Session } from './session/Session.js';
|
||||
export async function runAcpAgent(
|
||||
config: Config,
|
||||
settings: LoadedSettings,
|
||||
extensions: Extension[],
|
||||
argv: CliArgs,
|
||||
) {
|
||||
const stdout = Writable.toWeb(process.stdout) as WritableStream;
|
||||
@@ -51,8 +48,7 @@ export async function runAcpAgent(
|
||||
console.debug = console.error;
|
||||
|
||||
new acp.AgentSideConnection(
|
||||
(client: acp.Client) =>
|
||||
new GeminiAgent(config, settings, extensions, argv, client),
|
||||
(client: acp.Client) => new GeminiAgent(config, settings, argv, client),
|
||||
stdout,
|
||||
stdin,
|
||||
);
|
||||
@@ -65,7 +61,6 @@ class GeminiAgent {
|
||||
constructor(
|
||||
private config: Config,
|
||||
private settings: LoadedSettings,
|
||||
private extensions: Extension[],
|
||||
private argv: CliArgs,
|
||||
private client: acp.Client,
|
||||
) {}
|
||||
@@ -215,16 +210,7 @@ class GeminiAgent {
|
||||
continue: false,
|
||||
};
|
||||
|
||||
const config = await loadCliConfig(
|
||||
settings,
|
||||
this.extensions,
|
||||
new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
this.argv.extensions,
|
||||
),
|
||||
argvForSession,
|
||||
cwd,
|
||||
);
|
||||
const config = await loadCliConfig(settings, argvForSession, cwd);
|
||||
|
||||
await config.initialize();
|
||||
return config;
|
||||
|
||||
87
packages/cli/src/commands/extensions/consent.ts
Normal file
87
packages/cli/src/commands/extensions/consent.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import type { ConfirmationRequest } from '../../ui/types.js';
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, by reading a Y/n
|
||||
* character from stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentNonInteractive(
|
||||
consentDescription: string,
|
||||
): Promise<boolean> {
|
||||
console.info(consentDescription);
|
||||
const result = await promptForConsentNonInteractive(
|
||||
'Do you want to continue? [Y/n]: ',
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, in interactive mode.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will not work.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @param addExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentInteractive(
|
||||
consentDescription: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return promptForConsentInteractive(
|
||||
consentDescription + '\n\nDo you want to continue?',
|
||||
addExtensionUpdateConfirmationRequest,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users a prompt and awaits for a y/n response on stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A yes/no prompt to ask the user
|
||||
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
|
||||
*/
|
||||
async function promptForConsentNonInteractive(
|
||||
prompt: string,
|
||||
): Promise<boolean> {
|
||||
const readline = await import('node:readline');
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
rl.question(prompt, (answer) => {
|
||||
rl.close();
|
||||
resolve(['y', ''].includes(answer.trim().toLowerCase()));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users an interactive yes/no prompt.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A markdown prompt to ask the user
|
||||
* @param addExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
|
||||
* @returns Whether or not the user answers yes.
|
||||
*/
|
||||
async function promptForConsentInteractive(
|
||||
prompt: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return new Promise<boolean>((resolve) => {
|
||||
addExtensionUpdateConfirmationRequest({
|
||||
prompt,
|
||||
onConfirm: (resolvedConfirmed) => {
|
||||
resolve(resolvedConfirmed);
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -5,21 +5,22 @@
|
||||
*/
|
||||
|
||||
import { type CommandModule } from 'yargs';
|
||||
import { disableExtension } from '../../config/extension.js';
|
||||
import { SettingScope } from '../../config/settings.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface DisableArgs {
|
||||
name: string;
|
||||
scope?: string;
|
||||
}
|
||||
|
||||
export function handleDisable(args: DisableArgs) {
|
||||
export async function handleDisable(args: DisableArgs) {
|
||||
const extensionManager = await getExtensionManager();
|
||||
try {
|
||||
if (args.scope?.toLowerCase() === 'workspace') {
|
||||
disableExtension(args.name, SettingScope.Workspace);
|
||||
extensionManager.disableExtension(args.name, SettingScope.Workspace);
|
||||
} else {
|
||||
disableExtension(args.name, SettingScope.User);
|
||||
extensionManager.disableExtension(args.name, SettingScope.User);
|
||||
}
|
||||
console.log(
|
||||
`Extension "${args.name}" successfully disabled for scope "${args.scope}".`,
|
||||
@@ -61,8 +62,8 @@ export const disableCommand: CommandModule = {
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
handler: (argv) => {
|
||||
handleDisable({
|
||||
handler: async (argv) => {
|
||||
await handleDisable({
|
||||
name: argv['name'] as string,
|
||||
scope: argv['scope'] as string,
|
||||
});
|
||||
|
||||
@@ -6,20 +6,22 @@
|
||||
|
||||
import { type CommandModule } from 'yargs';
|
||||
import { FatalConfigError, getErrorMessage } from '@qwen-code/qwen-code-core';
|
||||
import { enableExtension } from '../../config/extension.js';
|
||||
import { SettingScope } from '../../config/settings.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface EnableArgs {
|
||||
name: string;
|
||||
scope?: string;
|
||||
}
|
||||
|
||||
export function handleEnable(args: EnableArgs) {
|
||||
export async function handleEnable(args: EnableArgs) {
|
||||
const extensionManager = await getExtensionManager();
|
||||
|
||||
try {
|
||||
if (args.scope?.toLowerCase() === 'workspace') {
|
||||
enableExtension(args.name, SettingScope.Workspace);
|
||||
extensionManager.enableExtension(args.name, SettingScope.Workspace);
|
||||
} else {
|
||||
enableExtension(args.name, SettingScope.User);
|
||||
extensionManager.enableExtension(args.name, SettingScope.User);
|
||||
}
|
||||
if (args.scope) {
|
||||
console.log(
|
||||
@@ -66,8 +68,8 @@ export const enableCommand: CommandModule = {
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
handler: (argv) => {
|
||||
handleEnable({
|
||||
handler: async (argv) => {
|
||||
await handleEnable({
|
||||
name: argv['name'] as string,
|
||||
scope: argv['scope'] as string,
|
||||
});
|
||||
|
||||
@@ -5,58 +5,64 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
|
||||
import {
|
||||
installExtension,
|
||||
requestConsentNonInteractive,
|
||||
} from '../../config/extension.js';
|
||||
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
|
||||
ExtensionManager,
|
||||
parseInstallSource,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { stat } from 'node:fs/promises';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import { requestConsentNonInteractive } from './consent.js';
|
||||
|
||||
interface InstallArgs {
|
||||
source: string;
|
||||
ref?: string;
|
||||
autoUpdate?: boolean;
|
||||
allowPreRelease?: boolean;
|
||||
consent?: boolean;
|
||||
}
|
||||
|
||||
export async function handleInstall(args: InstallArgs) {
|
||||
try {
|
||||
let installMetadata: ExtensionInstallMetadata;
|
||||
const { source } = args;
|
||||
const installMetadata = await parseInstallSource(args.source);
|
||||
|
||||
if (
|
||||
source.startsWith('http://') ||
|
||||
source.startsWith('https://') ||
|
||||
source.startsWith('git@') ||
|
||||
source.startsWith('sso://')
|
||||
installMetadata.type !== 'git' &&
|
||||
installMetadata.type !== 'github-release'
|
||||
) {
|
||||
installMetadata = {
|
||||
source,
|
||||
type: 'git',
|
||||
ref: args.ref,
|
||||
autoUpdate: args.autoUpdate,
|
||||
};
|
||||
} else {
|
||||
if (args.ref || args.autoUpdate) {
|
||||
throw new Error(
|
||||
'--ref and --auto-update are not applicable for local extensions.',
|
||||
'--ref and --auto-update are not applicable for marketplace extensions.',
|
||||
);
|
||||
}
|
||||
try {
|
||||
await stat(source);
|
||||
installMetadata = {
|
||||
source,
|
||||
type: 'local',
|
||||
};
|
||||
} catch {
|
||||
throw new Error('Install source not found.');
|
||||
}
|
||||
}
|
||||
|
||||
const name = await installExtension(
|
||||
installMetadata,
|
||||
requestConsentNonInteractive,
|
||||
const requestConsent = args.consent
|
||||
? () => Promise.resolve(true)
|
||||
: requestConsentNonInteractive;
|
||||
const workspaceDir = process.cwd();
|
||||
const extensionManager = new ExtensionManager({
|
||||
workspaceDir,
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(
|
||||
loadSettings(workspaceDir).merged,
|
||||
),
|
||||
requestConsent,
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
|
||||
const extension = await extensionManager.installExtension(
|
||||
{
|
||||
...installMetadata,
|
||||
ref: args.ref,
|
||||
autoUpdate: args.autoUpdate,
|
||||
allowPreRelease: args.allowPreRelease,
|
||||
},
|
||||
requestConsent,
|
||||
);
|
||||
console.log(
|
||||
`Extension "${extension.name}" installed successfully and enabled.`,
|
||||
);
|
||||
console.log(`Extension "${name}" installed successfully and enabled.`);
|
||||
} catch (error) {
|
||||
console.error(getErrorMessage(error));
|
||||
process.exit(1);
|
||||
@@ -65,11 +71,13 @@ export async function handleInstall(args: InstallArgs) {
|
||||
|
||||
export const installCommand: CommandModule = {
|
||||
command: 'install <source>',
|
||||
describe: 'Installs an extension from a git repository URL or a local path.',
|
||||
describe:
|
||||
'Installs an extension from a git repository URL, local path, or claude marketplace (marketplace-url:plugin-name).',
|
||||
builder: (yargs) =>
|
||||
yargs
|
||||
.positional('source', {
|
||||
describe: 'The github URL or local path of the extension to install.',
|
||||
describe:
|
||||
'The github URL, local path, or marketplace source (marketplace-url:plugin-name) of the extension to install.',
|
||||
type: 'string',
|
||||
demandOption: true,
|
||||
})
|
||||
@@ -81,6 +89,16 @@ export const installCommand: CommandModule = {
|
||||
describe: 'Enable auto-update for this extension.',
|
||||
type: 'boolean',
|
||||
})
|
||||
.option('pre-release', {
|
||||
describe: 'Enable pre-release versions for this extension.',
|
||||
type: 'boolean',
|
||||
})
|
||||
.option('consent', {
|
||||
describe:
|
||||
'Acknowledge the security risks of installing an extension and skip the confirmation prompt.',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
})
|
||||
.check((argv) => {
|
||||
if (!argv.source) {
|
||||
throw new Error('The source argument must be provided.');
|
||||
@@ -92,6 +110,8 @@ export const installCommand: CommandModule = {
|
||||
source: argv['source'] as string,
|
||||
ref: argv['ref'] as string | undefined,
|
||||
autoUpdate: argv['auto-update'] as boolean | undefined,
|
||||
allowPreRelease: argv['pre-release'] as boolean | undefined,
|
||||
consent: argv['consent'] as boolean | undefined,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
@@ -5,13 +5,10 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import {
|
||||
installExtension,
|
||||
requestConsentNonInteractive,
|
||||
} from '../../config/extension.js';
|
||||
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
|
||||
|
||||
import { type ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { requestConsentNonInteractive } from './consent.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface InstallArgs {
|
||||
path: string;
|
||||
@@ -23,12 +20,14 @@ export async function handleLink(args: InstallArgs) {
|
||||
source: args.path,
|
||||
type: 'link',
|
||||
};
|
||||
const extensionName = await installExtension(
|
||||
const extensionManager = await getExtensionManager();
|
||||
|
||||
const extension = await extensionManager.installExtension(
|
||||
installMetadata,
|
||||
requestConsentNonInteractive,
|
||||
);
|
||||
console.log(
|
||||
`Extension "${extensionName}" linked successfully and enabled.`,
|
||||
`Extension "${extension.name}" linked successfully and enabled.`,
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(getErrorMessage(error));
|
||||
|
||||
@@ -5,19 +5,23 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import { loadUserExtensions, toOutputString } from '../../config/extension.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
export async function handleList() {
|
||||
try {
|
||||
const extensions = loadUserExtensions();
|
||||
const extensionManager = await getExtensionManager();
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
|
||||
if (extensions.length === 0) {
|
||||
console.log('No extensions installed.');
|
||||
return;
|
||||
}
|
||||
console.log(
|
||||
extensions
|
||||
.map((extension, _): string => toOutputString(extension, process.cwd()))
|
||||
.map((extension, _): string =>
|
||||
extensionManager.toOutputString(extension, process.cwd()),
|
||||
)
|
||||
.join('\n\n'),
|
||||
);
|
||||
} catch (error) {
|
||||
|
||||
@@ -5,8 +5,11 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import { uninstallExtension } from '../../config/extension.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { ExtensionManager } from '@qwen-code/qwen-code-core';
|
||||
import { requestConsentNonInteractive } from './consent.js';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
|
||||
interface UninstallArgs {
|
||||
name: string; // can be extension name or source URL.
|
||||
@@ -14,7 +17,16 @@ interface UninstallArgs {
|
||||
|
||||
export async function handleUninstall(args: UninstallArgs) {
|
||||
try {
|
||||
await uninstallExtension(args.name);
|
||||
const workspaceDir = process.cwd();
|
||||
const extensionManager = new ExtensionManager({
|
||||
workspaceDir,
|
||||
requestConsent: requestConsentNonInteractive,
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(
|
||||
loadSettings(workspaceDir).merged,
|
||||
),
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
await extensionManager.uninstallExtension(args.name, false);
|
||||
console.log(`Extension "${args.name}" successfully uninstalled.`);
|
||||
} catch (error) {
|
||||
console.error(getErrorMessage(error));
|
||||
|
||||
@@ -5,22 +5,13 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import {
|
||||
loadExtensions,
|
||||
annotateActiveExtensions,
|
||||
ExtensionStorage,
|
||||
requestConsentNonInteractive,
|
||||
} from '../../config/extension.js';
|
||||
import {
|
||||
updateAllUpdatableExtensions,
|
||||
type ExtensionUpdateInfo,
|
||||
checkForAllExtensionUpdates,
|
||||
updateExtension,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { checkForExtensionUpdate } from '../../config/extensions/github.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
|
||||
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
|
||||
import {
|
||||
checkForExtensionUpdate,
|
||||
type ExtensionUpdateInfo,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface UpdateArgs {
|
||||
name?: string;
|
||||
@@ -31,19 +22,9 @@ const updateOutput = (info: ExtensionUpdateInfo) =>
|
||||
`Extension "${info.name}" successfully updated: ${info.originalVersion} → ${info.updatedVersion}.`;
|
||||
|
||||
export async function handleUpdate(args: UpdateArgs) {
|
||||
const workingDir = process.cwd();
|
||||
const extensionEnablementManager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
// Force enable named extensions, otherwise we will only update the enabled
|
||||
// ones.
|
||||
args.name ? [args.name] : [],
|
||||
);
|
||||
const allExtensions = loadExtensions(extensionEnablementManager);
|
||||
const extensions = annotateActiveExtensions(
|
||||
allExtensions,
|
||||
workingDir,
|
||||
extensionEnablementManager,
|
||||
);
|
||||
const extensionManager = await getExtensionManager();
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
|
||||
if (args.name) {
|
||||
try {
|
||||
const extension = extensions.find(
|
||||
@@ -53,25 +34,23 @@ export async function handleUpdate(args: UpdateArgs) {
|
||||
console.log(`Extension "${args.name}" not found.`);
|
||||
return;
|
||||
}
|
||||
let updateState: ExtensionUpdateState | undefined;
|
||||
if (!extension.installMetadata) {
|
||||
console.log(
|
||||
`Unable to install extension "${args.name}" due to missing install metadata`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
await checkForExtensionUpdate(extension, (newState) => {
|
||||
updateState = newState;
|
||||
});
|
||||
const updateState = await checkForExtensionUpdate(
|
||||
extension,
|
||||
extensionManager,
|
||||
);
|
||||
if (updateState !== ExtensionUpdateState.UPDATE_AVAILABLE) {
|
||||
console.log(`Extension "${args.name}" is already up to date.`);
|
||||
return;
|
||||
}
|
||||
// TODO(chrstnb): we should list extensions if the requested extension is not installed.
|
||||
const updatedExtensionInfo = (await updateExtension(
|
||||
const updatedExtensionInfo = (await extensionManager.updateExtension(
|
||||
extension,
|
||||
workingDir,
|
||||
requestConsentNonInteractive,
|
||||
updateState,
|
||||
() => {},
|
||||
))!;
|
||||
@@ -92,18 +71,15 @@ export async function handleUpdate(args: UpdateArgs) {
|
||||
if (args.all) {
|
||||
try {
|
||||
const extensionState = new Map();
|
||||
await checkForAllExtensionUpdates(extensions, (action) => {
|
||||
if (action.type === 'SET_STATE') {
|
||||
extensionState.set(action.payload.name, {
|
||||
status: action.payload.state,
|
||||
await extensionManager.checkForAllExtensionUpdates(
|
||||
(extensionName, state) => {
|
||||
extensionState.set(extensionName, {
|
||||
status: state,
|
||||
processed: true, // No need to process as we will force the update.
|
||||
});
|
||||
}
|
||||
});
|
||||
let updateInfos = await updateAllUpdatableExtensions(
|
||||
workingDir,
|
||||
requestConsentNonInteractive,
|
||||
extensions,
|
||||
},
|
||||
);
|
||||
let updateInfos = await extensionManager.updateAllUpdatableExtensions(
|
||||
extensionState,
|
||||
() => {},
|
||||
);
|
||||
|
||||
21
packages/cli/src/commands/extensions/utils.ts
Normal file
21
packages/cli/src/commands/extensions/utils.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { ExtensionManager } from '@qwen-code/qwen-code-core';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import { requestConsentNonInteractive } from './consent.js';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
|
||||
export async function getExtensionManager(): Promise<ExtensionManager> {
|
||||
const workspaceDir = process.cwd();
|
||||
const extensionManager = new ExtensionManager({
|
||||
workspaceDir,
|
||||
requestConsent: requestConsentNonInteractive,
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(loadSettings(workspaceDir).merged),
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
return extensionManager;
|
||||
}
|
||||
@@ -7,7 +7,8 @@
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { listMcpServers } from './list.js';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
|
||||
import { loadExtensions } from '../../config/extension.js';
|
||||
import { ExtensionStorage } from '../../config/extensions/storage.js';
|
||||
import { createTransport } from '@qwen-code/qwen-code-core';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
|
||||
|
||||
@@ -8,10 +8,13 @@
|
||||
import type { CommandModule } from 'yargs';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import type { MCPServerConfig } from '@qwen-code/qwen-code-core';
|
||||
import { MCPServerStatus, createTransport } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
MCPServerStatus,
|
||||
createTransport,
|
||||
ExtensionManager,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
|
||||
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
|
||||
const COLOR_GREEN = '\u001b[32m';
|
||||
const COLOR_YELLOW = '\u001b[33m';
|
||||
@@ -22,22 +25,27 @@ async function getMcpServersFromConfig(): Promise<
|
||||
Record<string, MCPServerConfig>
|
||||
> {
|
||||
const settings = loadSettings();
|
||||
const extensions = loadExtensions(
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
);
|
||||
const extensionManager = new ExtensionManager({
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(settings.merged),
|
||||
telemetrySettings: settings.merged.telemetry,
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
const mcpServers = { ...(settings.merged.mcpServers || {}) };
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) {
|
||||
return;
|
||||
}
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
if (extension.isActive) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) {
|
||||
return;
|
||||
}
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
@@ -16,7 +16,8 @@ import {
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { loadCliConfig, parseArguments, type CliArgs } from './config.js';
|
||||
import type { Settings } from './settings.js';
|
||||
import { ExtensionStorage, type Extension } from './extension.js';
|
||||
import type { Extension } from './extension.js';
|
||||
import { ExtensionStorage } from './extensions/storage.js';
|
||||
import * as ServerConfig from '@qwen-code/qwen-code-core';
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
AuthType,
|
||||
Config,
|
||||
DEFAULT_QWEN_EMBEDDING_MODEL,
|
||||
DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
|
||||
FileDiscoveryService,
|
||||
getCurrentGeminiMdFilename,
|
||||
loadServerHierarchicalMemory,
|
||||
@@ -23,7 +22,6 @@ import {
|
||||
SessionService,
|
||||
type ResumedSessionData,
|
||||
type FileFilteringOptions,
|
||||
type MCPServerConfig,
|
||||
type ToolName,
|
||||
EditTool,
|
||||
ShellTool,
|
||||
@@ -43,14 +41,11 @@ import { homedir } from 'node:os';
|
||||
|
||||
import { resolvePath } from '../utils/resolvePath.js';
|
||||
import { getCliVersion } from '../utils/version.js';
|
||||
import type { Extension } from './extension.js';
|
||||
import { annotateActiveExtensions } from './extension.js';
|
||||
import { loadSandboxConfig } from './sandboxConfig.js';
|
||||
import { appEvents } from '../utils/events.js';
|
||||
import { mcpCommand } from '../commands/mcp.js';
|
||||
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import type { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
|
||||
import { buildWebSearchConfig } from './webSearch.js';
|
||||
|
||||
// Simple console logger for now - replace with actual logger if available
|
||||
@@ -169,8 +164,18 @@ function normalizeOutputFormat(
|
||||
return OutputFormat.TEXT;
|
||||
}
|
||||
|
||||
export async function parseArguments(settings: Settings): Promise<CliArgs> {
|
||||
const rawArgv = hideBin(process.argv);
|
||||
export async function parseArguments(): Promise<CliArgs> {
|
||||
let rawArgv = hideBin(process.argv);
|
||||
|
||||
// hack: if the first argument is the CLI entry point, remove it
|
||||
if (
|
||||
rawArgv.length > 0 &&
|
||||
(rawArgv[0].endsWith('/dist/qwen-cli/cli.js') ||
|
||||
rawArgv[0].endsWith('/dist/cli.js'))
|
||||
) {
|
||||
rawArgv = rawArgv.slice(1);
|
||||
}
|
||||
|
||||
const yargsInstance = yargs(rawArgv)
|
||||
.locale('en')
|
||||
.scriptName('qwen')
|
||||
@@ -550,11 +555,9 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
|
||||
}),
|
||||
)
|
||||
// Register MCP subcommands
|
||||
.command(mcpCommand);
|
||||
|
||||
if (settings?.experimental?.extensionManagement ?? true) {
|
||||
yargsInstance.command(extensionsCommand);
|
||||
}
|
||||
.command(mcpCommand)
|
||||
// Register Extension subcommands
|
||||
.command(extensionsCommand);
|
||||
|
||||
yargsInstance
|
||||
.version(await getCliVersion()) // This will enable the --version flag based on package.json
|
||||
@@ -629,11 +632,11 @@ export async function loadHierarchicalGeminiMemory(
|
||||
includeDirectoriesToReadGemini: readonly string[] = [],
|
||||
debugMode: boolean,
|
||||
fileService: FileDiscoveryService,
|
||||
settings: Settings,
|
||||
extensionContextFilePaths: string[] = [],
|
||||
folderTrust: boolean,
|
||||
memoryImportFormat: 'flat' | 'tree' = 'tree',
|
||||
fileFilteringOptions?: FileFilteringOptions,
|
||||
maxDirs: number = 200,
|
||||
): Promise<{ memoryContent: string; fileCount: number }> {
|
||||
// FIX: Use real, canonical paths for a reliable comparison to handle symlinks.
|
||||
const realCwd = fs.realpathSync(path.resolve(currentWorkingDirectory));
|
||||
@@ -660,7 +663,7 @@ export async function loadHierarchicalGeminiMemory(
|
||||
folderTrust,
|
||||
memoryImportFormat,
|
||||
fileFilteringOptions,
|
||||
settings.context?.discoveryMaxDirs,
|
||||
maxDirs,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -675,30 +678,17 @@ export function isDebugMode(argv: CliArgs): boolean {
|
||||
|
||||
export async function loadCliConfig(
|
||||
settings: Settings,
|
||||
extensions: Extension[],
|
||||
extensionEnablementManager: ExtensionEnablementManager,
|
||||
argv: CliArgs,
|
||||
cwd: string = process.cwd(),
|
||||
overrideExtensions?: string[],
|
||||
): Promise<Config> {
|
||||
const debugMode = isDebugMode(argv);
|
||||
|
||||
const memoryImportFormat = settings.context?.importFormat || 'tree';
|
||||
|
||||
const ideMode = settings.ide?.enabled ?? false;
|
||||
|
||||
const folderTrust = settings.security?.folderTrust?.enabled ?? false;
|
||||
const trustedFolder = isWorkspaceTrusted(settings)?.isTrusted ?? true;
|
||||
|
||||
const allExtensions = annotateActiveExtensions(
|
||||
extensions,
|
||||
cwd,
|
||||
extensionEnablementManager,
|
||||
);
|
||||
|
||||
const activeExtensions = extensions.filter(
|
||||
(_, i) => allExtensions[i].isActive,
|
||||
);
|
||||
|
||||
// Set the context filename in the server's memoryTool module BEFORE loading memory
|
||||
// TODO(b/343434939): This is a bit of a hack. The contextFileName should ideally be passed
|
||||
// directly to the Config constructor in core, and have core handle setGeminiMdFilename.
|
||||
@@ -710,51 +700,27 @@ export async function loadCliConfig(
|
||||
setServerGeminiMdFilename(getCurrentGeminiMdFilename());
|
||||
}
|
||||
|
||||
const extensionContextFilePaths = activeExtensions.flatMap(
|
||||
(e) => e.contextFiles,
|
||||
);
|
||||
|
||||
// Automatically load output-language.md if it exists
|
||||
const outputLanguageFilePath = path.join(
|
||||
let outputLanguageFilePath: string | undefined = path.join(
|
||||
Storage.getGlobalQwenDir(),
|
||||
'output-language.md',
|
||||
);
|
||||
if (fs.existsSync(outputLanguageFilePath)) {
|
||||
extensionContextFilePaths.push(outputLanguageFilePath);
|
||||
if (debugMode) {
|
||||
logger.debug(
|
||||
`Found output-language.md, adding to context files: ${outputLanguageFilePath}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
outputLanguageFilePath = undefined;
|
||||
}
|
||||
|
||||
const fileService = new FileDiscoveryService(cwd);
|
||||
|
||||
const fileFiltering = {
|
||||
...DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
|
||||
...settings.context?.fileFiltering,
|
||||
};
|
||||
|
||||
const includeDirectories = (settings.context?.includeDirectories || [])
|
||||
.map(resolvePath)
|
||||
.concat((argv.includeDirectories || []).map(resolvePath));
|
||||
|
||||
// Call the (now wrapper) loadHierarchicalGeminiMemory which calls the server's version
|
||||
const { memoryContent, fileCount } = await loadHierarchicalGeminiMemory(
|
||||
cwd,
|
||||
settings.context?.loadMemoryFromIncludeDirectories
|
||||
? includeDirectories
|
||||
: [],
|
||||
debugMode,
|
||||
fileService,
|
||||
settings,
|
||||
extensionContextFilePaths,
|
||||
trustedFolder,
|
||||
memoryImportFormat,
|
||||
fileFiltering,
|
||||
);
|
||||
|
||||
let mcpServers = mergeMcpServers(settings, activeExtensions);
|
||||
const question = argv.promptInteractive || argv.prompt || '';
|
||||
const inputFormat: InputFormat =
|
||||
(argv.inputFormat as InputFormat | undefined) ?? InputFormat.TEXT;
|
||||
@@ -893,38 +859,18 @@ export async function loadCliConfig(
|
||||
|
||||
const excludeTools = mergeExcludeTools(
|
||||
settings,
|
||||
activeExtensions,
|
||||
extraExcludes.length > 0 ? extraExcludes : undefined,
|
||||
argv.excludeTools,
|
||||
);
|
||||
const blockedMcpServers: Array<{ name: string; extensionName: string }> = [];
|
||||
|
||||
if (!argv.allowedMcpServerNames) {
|
||||
if (settings.mcp?.allowed) {
|
||||
mcpServers = allowedMcpServers(
|
||||
mcpServers,
|
||||
settings.mcp.allowed,
|
||||
blockedMcpServers,
|
||||
);
|
||||
}
|
||||
|
||||
if (settings.mcp?.excluded) {
|
||||
const excludedNames = new Set(settings.mcp.excluded.filter(Boolean));
|
||||
if (excludedNames.size > 0) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(([key]) => !excludedNames.has(key)),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (argv.allowedMcpServerNames) {
|
||||
mcpServers = allowedMcpServers(
|
||||
mcpServers,
|
||||
argv.allowedMcpServerNames,
|
||||
blockedMcpServers,
|
||||
);
|
||||
}
|
||||
const allowedMcpServers = argv.allowedMcpServerNames
|
||||
? new Set(argv.allowedMcpServerNames.filter(Boolean))
|
||||
: settings.mcp?.allowed
|
||||
? new Set(settings.mcp.allowed.filter(Boolean))
|
||||
: undefined;
|
||||
const excludedMcpServers = settings.mcp?.excluded
|
||||
? new Set(settings.mcp.excluded.filter(Boolean))
|
||||
: undefined;
|
||||
|
||||
const selectedAuthType =
|
||||
(argv.authType as AuthType | undefined) ||
|
||||
@@ -991,6 +937,8 @@ export async function loadCliConfig(
|
||||
includeDirectories,
|
||||
loadMemoryFromIncludeDirectories:
|
||||
settings.context?.loadMemoryFromIncludeDirectories || false,
|
||||
importFormat: settings.context?.importFormat || 'tree',
|
||||
discoveryMaxDirs: settings.context?.discoveryMaxDirs || 200,
|
||||
debugMode,
|
||||
question,
|
||||
fullContext: argv.allFiles || false,
|
||||
@@ -1000,9 +948,13 @@ export async function loadCliConfig(
|
||||
toolDiscoveryCommand: settings.tools?.discoveryCommand,
|
||||
toolCallCommand: settings.tools?.callCommand,
|
||||
mcpServerCommand: settings.mcp?.serverCommand,
|
||||
mcpServers,
|
||||
userMemory: memoryContent,
|
||||
geminiMdFileCount: fileCount,
|
||||
mcpServers: settings.mcpServers || {},
|
||||
allowedMcpServers: allowedMcpServers
|
||||
? Array.from(allowedMcpServers)
|
||||
: undefined,
|
||||
excludedMcpServers: excludedMcpServers
|
||||
? Array.from(excludedMcpServers)
|
||||
: undefined,
|
||||
approvalMode,
|
||||
showMemoryUsage:
|
||||
argv.showMemoryUsage || settings.ui?.showMemoryUsage || false,
|
||||
@@ -1025,15 +977,14 @@ export async function loadCliConfig(
|
||||
fileDiscoveryService: fileService,
|
||||
bugCommand: settings.advanced?.bugCommand,
|
||||
model: resolvedModel,
|
||||
extensionContextFilePaths,
|
||||
outputLanguageFilePath,
|
||||
sessionTokenLimit: settings.model?.sessionTokenLimit ?? -1,
|
||||
maxSessionTurns:
|
||||
argv.maxSessionTurns ?? settings.model?.maxSessionTurns ?? -1,
|
||||
experimentalZedIntegration: argv.acp || argv.experimentalAcp || false,
|
||||
experimentalSkills: argv.experimentalSkills || false,
|
||||
listExtensions: argv.listExtensions || false,
|
||||
extensions: allExtensions,
|
||||
blockedMcpServers,
|
||||
overrideExtensions: overrideExtensions || argv.extensions,
|
||||
noBrowser: !!process.env['NO_BROWSER'],
|
||||
authType: selectedAuthType,
|
||||
inputFormat,
|
||||
@@ -1075,61 +1026,8 @@ export async function loadCliConfig(
|
||||
});
|
||||
}
|
||||
|
||||
function allowedMcpServers(
|
||||
mcpServers: { [x: string]: MCPServerConfig },
|
||||
allowMCPServers: string[],
|
||||
blockedMcpServers: Array<{ name: string; extensionName: string }>,
|
||||
) {
|
||||
const allowedNames = new Set(allowMCPServers.filter(Boolean));
|
||||
if (allowedNames.size > 0) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(([key, server]) => {
|
||||
const isAllowed = allowedNames.has(key);
|
||||
if (!isAllowed) {
|
||||
blockedMcpServers.push({
|
||||
name: key,
|
||||
extensionName: server.extensionName || '',
|
||||
});
|
||||
}
|
||||
return isAllowed;
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
blockedMcpServers.push(
|
||||
...Object.entries(mcpServers).map(([key, server]) => ({
|
||||
name: key,
|
||||
extensionName: server.extensionName || '',
|
||||
})),
|
||||
);
|
||||
mcpServers = {};
|
||||
}
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
function mergeMcpServers(settings: Settings, extensions: Extension[]) {
|
||||
const mcpServers = { ...(settings.mcpServers || {}) };
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) {
|
||||
logger.warn(
|
||||
`Skipping extension MCP config for server with key "${key}" as it already exists.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
function mergeExcludeTools(
|
||||
settings: Settings,
|
||||
extensions: Extension[],
|
||||
extraExcludes?: string[] | undefined,
|
||||
cliExcludeTools?: string[] | undefined,
|
||||
): string[] {
|
||||
@@ -1138,10 +1036,5 @@ function mergeExcludeTools(
|
||||
...(settings.tools?.exclude || []),
|
||||
...(extraExcludes || []),
|
||||
]);
|
||||
for (const extension of extensions) {
|
||||
for (const tool of extension.config.excludeTools || []) {
|
||||
allExcludeTools.add(tool);
|
||||
}
|
||||
}
|
||||
return [...allExcludeTools];
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,786 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type {
|
||||
MCPServerConfig,
|
||||
GeminiCLIExtension,
|
||||
ExtensionInstallMetadata,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
QWEN_DIR,
|
||||
Storage,
|
||||
Config,
|
||||
ExtensionInstallEvent,
|
||||
ExtensionUninstallEvent,
|
||||
ExtensionDisableEvent,
|
||||
ExtensionEnableEvent,
|
||||
logExtensionEnable,
|
||||
logExtensionInstallEvent,
|
||||
logExtensionUninstall,
|
||||
logExtensionDisable,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { SettingScope, loadSettings } from '../config/settings.js';
|
||||
import { getErrorMessage } from '../utils/errors.js';
|
||||
import { recursivelyHydrateStrings } from './extensions/variables.js';
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
|
||||
import {
|
||||
cloneFromGit,
|
||||
downloadFromGitHubRelease,
|
||||
} from './extensions/github.js';
|
||||
import type { LoadExtensionContext } from './extensions/variableSchema.js';
|
||||
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
|
||||
import chalk from 'chalk';
|
||||
import type { ConfirmationRequest } from '../ui/types.js';
|
||||
|
||||
export const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
|
||||
|
||||
export const EXTENSIONS_CONFIG_FILENAME = 'qwen-extension.json';
|
||||
export const INSTALL_METADATA_FILENAME = '.qwen-extension-install.json';
|
||||
|
||||
export interface Extension {
|
||||
path: string;
|
||||
config: ExtensionConfig;
|
||||
contextFiles: string[];
|
||||
installMetadata?: ExtensionInstallMetadata | undefined;
|
||||
}
|
||||
|
||||
export interface ExtensionConfig {
|
||||
name: string;
|
||||
version: string;
|
||||
mcpServers?: Record<string, MCPServerConfig>;
|
||||
contextFileName?: string | string[];
|
||||
excludeTools?: string[];
|
||||
}
|
||||
|
||||
export interface ExtensionUpdateInfo {
|
||||
name: string;
|
||||
originalVersion: string;
|
||||
updatedVersion: string;
|
||||
}
|
||||
|
||||
export class ExtensionStorage {
|
||||
private readonly extensionName: string;
|
||||
|
||||
constructor(extensionName: string) {
|
||||
this.extensionName = extensionName;
|
||||
}
|
||||
|
||||
getExtensionDir(): string {
|
||||
return path.join(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
this.extensionName,
|
||||
);
|
||||
}
|
||||
|
||||
getConfigPath(): string {
|
||||
return path.join(this.getExtensionDir(), EXTENSIONS_CONFIG_FILENAME);
|
||||
}
|
||||
|
||||
static getUserExtensionsDir(): string {
|
||||
const storage = new Storage(os.homedir());
|
||||
return storage.getExtensionsDir();
|
||||
}
|
||||
|
||||
static async createTmpDir(): Promise<string> {
|
||||
return await fs.promises.mkdtemp(path.join(os.tmpdir(), 'qwen-extension'));
|
||||
}
|
||||
}
|
||||
|
||||
export function getWorkspaceExtensions(workspaceDir: string): Extension[] {
|
||||
// If the workspace dir is the user extensions dir, there are no workspace extensions.
|
||||
if (path.resolve(workspaceDir) === path.resolve(os.homedir())) {
|
||||
return [];
|
||||
}
|
||||
return loadExtensionsFromDir(workspaceDir);
|
||||
}
|
||||
|
||||
export async function copyExtension(
|
||||
source: string,
|
||||
destination: string,
|
||||
): Promise<void> {
|
||||
await fs.promises.cp(source, destination, { recursive: true });
|
||||
}
|
||||
|
||||
export async function performWorkspaceExtensionMigration(
|
||||
extensions: Extension[],
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
): Promise<string[]> {
|
||||
const failedInstallNames: string[] = [];
|
||||
|
||||
for (const extension of extensions) {
|
||||
try {
|
||||
const installMetadata: ExtensionInstallMetadata = {
|
||||
source: extension.path,
|
||||
type: 'local',
|
||||
};
|
||||
await installExtension(installMetadata, requestConsent);
|
||||
} catch (_) {
|
||||
failedInstallNames.push(extension.config.name);
|
||||
}
|
||||
}
|
||||
return failedInstallNames;
|
||||
}
|
||||
|
||||
function getTelemetryConfig(cwd: string) {
|
||||
const settings = loadSettings(cwd);
|
||||
const config = new Config({
|
||||
telemetry: settings.merged.telemetry,
|
||||
interactive: false,
|
||||
targetDir: cwd,
|
||||
cwd,
|
||||
model: '',
|
||||
debugMode: false,
|
||||
});
|
||||
return config;
|
||||
}
|
||||
|
||||
export function loadExtensions(
|
||||
extensionEnablementManager: ExtensionEnablementManager,
|
||||
workspaceDir: string = process.cwd(),
|
||||
): Extension[] {
|
||||
const settings = loadSettings(workspaceDir).merged;
|
||||
const allExtensions = [...loadUserExtensions()];
|
||||
|
||||
if (
|
||||
(isWorkspaceTrusted(settings) ?? true) &&
|
||||
// Default management setting to true
|
||||
!(settings.experimental?.extensionManagement ?? true)
|
||||
) {
|
||||
allExtensions.push(...getWorkspaceExtensions(workspaceDir));
|
||||
}
|
||||
|
||||
const uniqueExtensions = new Map<string, Extension>();
|
||||
|
||||
for (const extension of allExtensions) {
|
||||
if (
|
||||
!uniqueExtensions.has(extension.config.name) &&
|
||||
extensionEnablementManager.isEnabled(extension.config.name, workspaceDir)
|
||||
) {
|
||||
uniqueExtensions.set(extension.config.name, extension);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(uniqueExtensions.values());
|
||||
}
|
||||
|
||||
export function loadUserExtensions(): Extension[] {
|
||||
const userExtensions = loadExtensionsFromDir(os.homedir());
|
||||
|
||||
const uniqueExtensions = new Map<string, Extension>();
|
||||
for (const extension of userExtensions) {
|
||||
if (!uniqueExtensions.has(extension.config.name)) {
|
||||
uniqueExtensions.set(extension.config.name, extension);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(uniqueExtensions.values());
|
||||
}
|
||||
|
||||
export function loadExtensionsFromDir(dir: string): Extension[] {
|
||||
const storage = new Storage(dir);
|
||||
const extensionsDir = storage.getExtensionsDir();
|
||||
if (!fs.existsSync(extensionsDir)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const extensions: Extension[] = [];
|
||||
for (const subdir of fs.readdirSync(extensionsDir)) {
|
||||
const extensionDir = path.join(extensionsDir, subdir);
|
||||
|
||||
const extension = loadExtension({ extensionDir, workspaceDir: dir });
|
||||
if (extension != null) {
|
||||
extensions.push(extension);
|
||||
}
|
||||
}
|
||||
return extensions;
|
||||
}
|
||||
|
||||
export function loadExtension(context: LoadExtensionContext): Extension | null {
|
||||
const { extensionDir, workspaceDir } = context;
|
||||
if (!fs.statSync(extensionDir).isDirectory()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const installMetadata = loadInstallMetadata(extensionDir);
|
||||
let effectiveExtensionPath = extensionDir;
|
||||
|
||||
if (installMetadata?.type === 'link') {
|
||||
effectiveExtensionPath = installMetadata.source;
|
||||
}
|
||||
|
||||
try {
|
||||
let config = loadExtensionConfig({
|
||||
extensionDir: effectiveExtensionPath,
|
||||
workspaceDir,
|
||||
});
|
||||
|
||||
config = resolveEnvVarsInObject(config);
|
||||
|
||||
if (config.mcpServers) {
|
||||
config.mcpServers = Object.fromEntries(
|
||||
Object.entries(config.mcpServers).map(([key, value]) => [
|
||||
key,
|
||||
filterMcpConfig(value),
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
const contextFiles = getContextFileNames(config)
|
||||
.map((contextFileName) =>
|
||||
path.join(effectiveExtensionPath, contextFileName),
|
||||
)
|
||||
.filter((contextFilePath) => fs.existsSync(contextFilePath));
|
||||
|
||||
return {
|
||||
path: effectiveExtensionPath,
|
||||
config,
|
||||
contextFiles,
|
||||
installMetadata,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(
|
||||
`Warning: Skipping extension in ${effectiveExtensionPath}: ${getErrorMessage(
|
||||
e,
|
||||
)}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function loadExtensionByName(
|
||||
name: string,
|
||||
workspaceDir: string = process.cwd(),
|
||||
): Extension | null {
|
||||
const userExtensionsDir = ExtensionStorage.getUserExtensionsDir();
|
||||
if (!fs.existsSync(userExtensionsDir)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (const subdir of fs.readdirSync(userExtensionsDir)) {
|
||||
const extensionDir = path.join(userExtensionsDir, subdir);
|
||||
if (!fs.statSync(extensionDir).isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const extension = loadExtension({ extensionDir, workspaceDir });
|
||||
if (
|
||||
extension &&
|
||||
extension.config.name.toLowerCase() === name.toLowerCase()
|
||||
) {
|
||||
return extension;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function filterMcpConfig(original: MCPServerConfig): MCPServerConfig {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { trust, ...rest } = original;
|
||||
return Object.freeze(rest);
|
||||
}
|
||||
|
||||
export function loadInstallMetadata(
|
||||
extensionDir: string,
|
||||
): ExtensionInstallMetadata | undefined {
|
||||
const metadataFilePath = path.join(extensionDir, INSTALL_METADATA_FILENAME);
|
||||
try {
|
||||
const configContent = fs.readFileSync(metadataFilePath, 'utf-8');
|
||||
const metadata = JSON.parse(configContent) as ExtensionInstallMetadata;
|
||||
return metadata;
|
||||
} catch (_e) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function getContextFileNames(config: ExtensionConfig): string[] {
|
||||
if (!config.contextFileName) {
|
||||
return ['QWEN.md'];
|
||||
} else if (!Array.isArray(config.contextFileName)) {
|
||||
return [config.contextFileName];
|
||||
}
|
||||
return config.contextFileName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an annotated list of extensions. If an extension is listed in enabledExtensionNames, it will be active.
|
||||
* If enabledExtensionNames is empty, an extension is active unless it is disabled.
|
||||
* @param extensions The base list of extensions.
|
||||
* @param enabledExtensionNames The names of explicitly enabled extensions.
|
||||
* @param workspaceDir The current workspace directory.
|
||||
*/
|
||||
export function annotateActiveExtensions(
|
||||
extensions: Extension[],
|
||||
workspaceDir: string,
|
||||
manager: ExtensionEnablementManager,
|
||||
): GeminiCLIExtension[] {
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
return extensions.map((extension) => ({
|
||||
name: extension.config.name,
|
||||
version: extension.config.version,
|
||||
isActive: manager.isEnabled(extension.config.name, workspaceDir),
|
||||
path: extension.path,
|
||||
installMetadata: extension.installMetadata,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, by reading a Y/n
|
||||
* character from stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentNonInteractive(
|
||||
consentDescription: string,
|
||||
): Promise<boolean> {
|
||||
console.info(consentDescription);
|
||||
const result = await promptForConsentNonInteractive(
|
||||
'Do you want to continue? [Y/n]: ',
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, in interactive mode.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will not work.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @param setExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentInteractive(
|
||||
consentDescription: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return await promptForConsentInteractive(
|
||||
consentDescription + '\n\nDo you want to continue?',
|
||||
addExtensionUpdateConfirmationRequest,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users a prompt and awaits for a y/n response on stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A yes/no prompt to ask the user
|
||||
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
|
||||
*/
|
||||
async function promptForConsentNonInteractive(
|
||||
prompt: string,
|
||||
): Promise<boolean> {
|
||||
const readline = await import('node:readline');
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
rl.question(prompt, (answer) => {
|
||||
rl.close();
|
||||
resolve(['y', ''].includes(answer.trim().toLowerCase()));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users an interactive yes/no prompt.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A markdown prompt to ask the user
|
||||
* @param setExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
|
||||
* @returns Whether or not the user answers yes.
|
||||
*/
|
||||
async function promptForConsentInteractive(
|
||||
prompt: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return await new Promise<boolean>((resolve) => {
|
||||
addExtensionUpdateConfirmationRequest({
|
||||
prompt,
|
||||
onConfirm: (resolvedConfirmed) => {
|
||||
resolve(resolvedConfirmed);
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function installExtension(
|
||||
installMetadata: ExtensionInstallMetadata,
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
cwd: string = process.cwd(),
|
||||
previousExtensionConfig?: ExtensionConfig,
|
||||
): Promise<string> {
|
||||
const telemetryConfig = getTelemetryConfig(cwd);
|
||||
let newExtensionConfig: ExtensionConfig | null = null;
|
||||
let localSourcePath: string | undefined;
|
||||
|
||||
try {
|
||||
const settings = loadSettings(cwd).merged;
|
||||
if (!isWorkspaceTrusted(settings)) {
|
||||
throw new Error(
|
||||
`Could not install extension from untrusted folder at ${installMetadata.source}`,
|
||||
);
|
||||
}
|
||||
|
||||
const extensionsDir = ExtensionStorage.getUserExtensionsDir();
|
||||
await fs.promises.mkdir(extensionsDir, { recursive: true });
|
||||
|
||||
if (
|
||||
!path.isAbsolute(installMetadata.source) &&
|
||||
(installMetadata.type === 'local' || installMetadata.type === 'link')
|
||||
) {
|
||||
installMetadata.source = path.resolve(cwd, installMetadata.source);
|
||||
}
|
||||
|
||||
let tempDir: string | undefined;
|
||||
|
||||
if (
|
||||
installMetadata.type === 'git' ||
|
||||
installMetadata.type === 'github-release'
|
||||
) {
|
||||
tempDir = await ExtensionStorage.createTmpDir();
|
||||
try {
|
||||
const result = await downloadFromGitHubRelease(
|
||||
installMetadata,
|
||||
tempDir,
|
||||
);
|
||||
installMetadata.type = result.type;
|
||||
installMetadata.releaseTag = result.tagName;
|
||||
} catch (_error) {
|
||||
await cloneFromGit(installMetadata, tempDir);
|
||||
installMetadata.type = 'git';
|
||||
}
|
||||
localSourcePath = tempDir;
|
||||
} else if (
|
||||
installMetadata.type === 'local' ||
|
||||
installMetadata.type === 'link'
|
||||
) {
|
||||
localSourcePath = installMetadata.source;
|
||||
} else {
|
||||
throw new Error(`Unsupported install type: ${installMetadata.type}`);
|
||||
}
|
||||
|
||||
try {
|
||||
newExtensionConfig = loadExtensionConfig({
|
||||
extensionDir: localSourcePath,
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
|
||||
const newExtensionName = newExtensionConfig.name;
|
||||
const extensionStorage = new ExtensionStorage(newExtensionName);
|
||||
const destinationPath = extensionStorage.getExtensionDir();
|
||||
|
||||
const installedExtensions = loadUserExtensions();
|
||||
if (
|
||||
installedExtensions.some(
|
||||
(installed) => installed.config.name === newExtensionName,
|
||||
)
|
||||
) {
|
||||
throw new Error(
|
||||
`Extension "${newExtensionName}" is already installed. Please uninstall it first.`,
|
||||
);
|
||||
}
|
||||
await maybeRequestConsentOrFail(
|
||||
newExtensionConfig,
|
||||
requestConsent,
|
||||
previousExtensionConfig,
|
||||
);
|
||||
await fs.promises.mkdir(destinationPath, { recursive: true });
|
||||
|
||||
if (
|
||||
installMetadata.type === 'local' ||
|
||||
installMetadata.type === 'git' ||
|
||||
installMetadata.type === 'github-release'
|
||||
) {
|
||||
await copyExtension(localSourcePath, destinationPath);
|
||||
}
|
||||
|
||||
const metadataString = JSON.stringify(installMetadata, null, 2);
|
||||
const metadataPath = path.join(
|
||||
destinationPath,
|
||||
INSTALL_METADATA_FILENAME,
|
||||
);
|
||||
await fs.promises.writeFile(metadataPath, metadataString);
|
||||
} finally {
|
||||
if (tempDir) {
|
||||
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
logExtensionInstallEvent(
|
||||
telemetryConfig,
|
||||
new ExtensionInstallEvent(
|
||||
newExtensionConfig!.name,
|
||||
newExtensionConfig!.version,
|
||||
installMetadata.source,
|
||||
'success',
|
||||
),
|
||||
);
|
||||
|
||||
enableExtension(newExtensionConfig!.name, SettingScope.User);
|
||||
return newExtensionConfig!.name;
|
||||
} catch (error) {
|
||||
// Attempt to load config from the source path even if installation fails
|
||||
// to get the name and version for logging.
|
||||
if (!newExtensionConfig && localSourcePath) {
|
||||
try {
|
||||
newExtensionConfig = loadExtensionConfig({
|
||||
extensionDir: localSourcePath,
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
} catch {
|
||||
// Ignore error, this is just for logging.
|
||||
}
|
||||
}
|
||||
logExtensionInstallEvent(
|
||||
telemetryConfig,
|
||||
new ExtensionInstallEvent(
|
||||
newExtensionConfig?.name ?? '',
|
||||
newExtensionConfig?.version ?? '',
|
||||
installMetadata.source,
|
||||
'error',
|
||||
),
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a consent string for installing an extension based on it's
|
||||
* extensionConfig.
|
||||
*/
|
||||
function extensionConsentString(extensionConfig: ExtensionConfig): string {
|
||||
const output: string[] = [];
|
||||
const mcpServerEntries = Object.entries(extensionConfig.mcpServers || {});
|
||||
output.push(`Installing extension "${extensionConfig.name}".`);
|
||||
output.push(
|
||||
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**',
|
||||
);
|
||||
|
||||
if (mcpServerEntries.length) {
|
||||
output.push('This extension will run the following MCP servers:');
|
||||
for (const [key, mcpServer] of mcpServerEntries) {
|
||||
const isLocal = !!mcpServer.command;
|
||||
const source =
|
||||
mcpServer.httpUrl ??
|
||||
`${mcpServer.command || ''}${mcpServer.args ? ' ' + mcpServer.args.join(' ') : ''}`;
|
||||
output.push(` * ${key} (${isLocal ? 'local' : 'remote'}): ${source}`);
|
||||
}
|
||||
}
|
||||
if (extensionConfig.contextFileName) {
|
||||
output.push(
|
||||
`This extension will append info to your QWEN.md context using ${extensionConfig.contextFileName}`,
|
||||
);
|
||||
}
|
||||
if (extensionConfig.excludeTools) {
|
||||
output.push(
|
||||
`This extension will exclude the following core tools: ${extensionConfig.excludeTools}`,
|
||||
);
|
||||
}
|
||||
return output.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to install an extension (extensionConfig), if
|
||||
* there is any difference between the consent string for `extensionConfig` and
|
||||
* `previousExtensionConfig`.
|
||||
*
|
||||
* Always requests consent if previousExtensionConfig is null.
|
||||
*
|
||||
* Throws if the user does not consent.
|
||||
*/
|
||||
async function maybeRequestConsentOrFail(
|
||||
extensionConfig: ExtensionConfig,
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
previousExtensionConfig?: ExtensionConfig,
|
||||
) {
|
||||
const extensionConsent = extensionConsentString(extensionConfig);
|
||||
if (previousExtensionConfig) {
|
||||
const previousExtensionConsent = extensionConsentString(
|
||||
previousExtensionConfig,
|
||||
);
|
||||
if (previousExtensionConsent === extensionConsent) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!(await requestConsent(extensionConsent))) {
|
||||
throw new Error(`Installation cancelled for "${extensionConfig.name}".`);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateName(name: string) {
|
||||
if (!/^[a-zA-Z0-9-]+$/.test(name)) {
|
||||
throw new Error(
|
||||
`Invalid extension name: "${name}". Only letters (a-z, A-Z), numbers (0-9), and dashes (-) are allowed.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function loadExtensionConfig(
|
||||
context: LoadExtensionContext,
|
||||
): ExtensionConfig {
|
||||
const { extensionDir, workspaceDir } = context;
|
||||
const configFilePath = path.join(extensionDir, EXTENSIONS_CONFIG_FILENAME);
|
||||
if (!fs.existsSync(configFilePath)) {
|
||||
throw new Error(`Configuration file not found at ${configFilePath}`);
|
||||
}
|
||||
try {
|
||||
const configContent = fs.readFileSync(configFilePath, 'utf-8');
|
||||
const config = recursivelyHydrateStrings(JSON.parse(configContent), {
|
||||
extensionPath: extensionDir,
|
||||
workspacePath: workspaceDir,
|
||||
'/': path.sep,
|
||||
pathSeparator: path.sep,
|
||||
}) as unknown as ExtensionConfig;
|
||||
if (!config.name || !config.version) {
|
||||
throw new Error(
|
||||
`Invalid configuration in ${configFilePath}: missing ${!config.name ? '"name"' : '"version"'}`,
|
||||
);
|
||||
}
|
||||
validateName(config.name);
|
||||
return config;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Failed to load extension config from ${configFilePath}: ${getErrorMessage(
|
||||
e,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function uninstallExtension(
|
||||
extensionIdentifier: string,
|
||||
cwd: string = process.cwd(),
|
||||
): Promise<void> {
|
||||
const telemetryConfig = getTelemetryConfig(cwd);
|
||||
const installedExtensions = loadUserExtensions();
|
||||
const extensionName = installedExtensions.find(
|
||||
(installed) =>
|
||||
installed.config.name.toLowerCase() ===
|
||||
extensionIdentifier.toLowerCase() ||
|
||||
installed.installMetadata?.source.toLowerCase() ===
|
||||
extensionIdentifier.toLowerCase(),
|
||||
)?.config.name;
|
||||
if (!extensionName) {
|
||||
throw new Error(`Extension not found.`);
|
||||
}
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
[extensionName],
|
||||
);
|
||||
manager.remove(extensionName);
|
||||
const storage = new ExtensionStorage(extensionName);
|
||||
|
||||
await fs.promises.rm(storage.getExtensionDir(), {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
logExtensionUninstall(
|
||||
telemetryConfig,
|
||||
new ExtensionUninstallEvent(extensionName, 'success'),
|
||||
);
|
||||
}
|
||||
|
||||
export function toOutputString(
|
||||
extension: Extension,
|
||||
workspaceDir: string,
|
||||
): string {
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
);
|
||||
const userEnabled = manager.isEnabled(extension.config.name, os.homedir());
|
||||
const workspaceEnabled = manager.isEnabled(
|
||||
extension.config.name,
|
||||
workspaceDir,
|
||||
);
|
||||
|
||||
const status = workspaceEnabled ? chalk.green('✓') : chalk.red('✗');
|
||||
let output = `${status} ${extension.config.name} (${extension.config.version})`;
|
||||
output += `\n Path: ${extension.path}`;
|
||||
if (extension.installMetadata) {
|
||||
output += `\n Source: ${extension.installMetadata.source} (Type: ${extension.installMetadata.type})`;
|
||||
if (extension.installMetadata.ref) {
|
||||
output += `\n Ref: ${extension.installMetadata.ref}`;
|
||||
}
|
||||
if (extension.installMetadata.releaseTag) {
|
||||
output += `\n Release tag: ${extension.installMetadata.releaseTag}`;
|
||||
}
|
||||
}
|
||||
output += `\n Enabled (User): ${userEnabled}`;
|
||||
output += `\n Enabled (Workspace): ${workspaceEnabled}`;
|
||||
if (extension.contextFiles.length > 0) {
|
||||
output += `\n Context files:`;
|
||||
extension.contextFiles.forEach((contextFile) => {
|
||||
output += `\n ${contextFile}`;
|
||||
});
|
||||
}
|
||||
if (extension.config.mcpServers) {
|
||||
output += `\n MCP servers:`;
|
||||
Object.keys(extension.config.mcpServers).forEach((key) => {
|
||||
output += `\n ${key}`;
|
||||
});
|
||||
}
|
||||
if (extension.config.excludeTools) {
|
||||
output += `\n Excluded tools:`;
|
||||
extension.config.excludeTools.forEach((tool) => {
|
||||
output += `\n ${tool}`;
|
||||
});
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
export function disableExtension(
|
||||
name: string,
|
||||
scope: SettingScope,
|
||||
cwd: string = process.cwd(),
|
||||
) {
|
||||
const config = getTelemetryConfig(cwd);
|
||||
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
|
||||
throw new Error('System and SystemDefaults scopes are not supported.');
|
||||
}
|
||||
const extension = loadExtensionByName(name, cwd);
|
||||
if (!extension) {
|
||||
throw new Error(`Extension with name ${name} does not exist.`);
|
||||
}
|
||||
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
[name],
|
||||
);
|
||||
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
|
||||
manager.disable(name, true, scopePath);
|
||||
logExtensionDisable(config, new ExtensionDisableEvent(name, scope));
|
||||
}
|
||||
|
||||
export function enableExtension(
|
||||
name: string,
|
||||
scope: SettingScope,
|
||||
cwd: string = process.cwd(),
|
||||
) {
|
||||
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
|
||||
throw new Error('System and SystemDefaults scopes are not supported.');
|
||||
}
|
||||
const extension = loadExtensionByName(name, cwd);
|
||||
if (!extension) {
|
||||
throw new Error(`Extension with name ${name} does not exist.`);
|
||||
}
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
);
|
||||
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
|
||||
manager.enable(name, true, scopePath);
|
||||
const config = getTelemetryConfig(cwd);
|
||||
logExtensionEnable(config, new ExtensionEnableEvent(name, scope));
|
||||
}
|
||||
@@ -1,424 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
import { ExtensionEnablementManager, Override } from './extensionEnablement.js';
|
||||
import type { Extension } from '../extension.js';
|
||||
|
||||
// Helper to create a temporary directory for testing
|
||||
function createTestDir() {
|
||||
const dirPath = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-test-'));
|
||||
return {
|
||||
path: dirPath,
|
||||
cleanup: () => fs.rmSync(dirPath, { recursive: true, force: true }),
|
||||
};
|
||||
}
|
||||
|
||||
let testDir: { path: string; cleanup: () => void };
|
||||
let configDir: string;
|
||||
let manager: ExtensionEnablementManager;
|
||||
|
||||
describe('ExtensionEnablementManager', () => {
|
||||
beforeEach(() => {
|
||||
testDir = createTestDir();
|
||||
configDir = path.join(testDir.path, '.gemini');
|
||||
manager = new ExtensionEnablementManager(configDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testDir.cleanup();
|
||||
// Reset the singleton instance for test isolation
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(ExtensionEnablementManager as any).instance = undefined;
|
||||
});
|
||||
|
||||
describe('isEnabled', () => {
|
||||
it('should return true if extension is not configured', () => {
|
||||
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true if no overrides match', () => {
|
||||
manager.disable('ext-test', false, '/another/path');
|
||||
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
|
||||
});
|
||||
|
||||
it('should enable a path based on an override rule', () => {
|
||||
manager.disable('ext-test', true, '/');
|
||||
manager.enable('ext-test', true, '/home/user/projects/');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should disable a path based on a disable override rule', () => {
|
||||
manager.enable('ext-test', true, '/');
|
||||
manager.disable('ext-test', true, '/home/user/projects/');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
it('should respect the last matching rule (enable wins)', () => {
|
||||
manager.disable('ext-test', true, '/home/user/projects/');
|
||||
manager.enable('ext-test', false, '/home/user/projects/my-app');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should respect the last matching rule (disable wins)', () => {
|
||||
manager.enable('ext-test', true, '/home/user/projects/');
|
||||
manager.disable('ext-test', false, '/home/user/projects/my-app');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle', () => {
|
||||
manager.enable('ext-test', true, '/home/user/projects');
|
||||
manager.disable('ext-test', false, '/home/user/projects/my-app');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
false,
|
||||
);
|
||||
expect(
|
||||
manager.isEnabled('ext-test', '/home/user/projects/something-else'),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('includeSubdirs', () => {
|
||||
it('should add a glob when enabling with includeSubdirs', () => {
|
||||
manager.enable('ext-test', true, '/path/to/dir');
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should not add a glob when enabling without includeSubdirs', () => {
|
||||
manager.enable('ext-test', false, '/path/to/dir');
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should add a glob when disabling with includeSubdirs', () => {
|
||||
manager.disable('ext-test', true, '/path/to/dir');
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('!/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should remove conflicting glob rule when enabling without subdirs', () => {
|
||||
manager.enable('ext-test', true, '/path/to/dir'); // Adds /path/to/dir*
|
||||
manager.enable('ext-test', false, '/path/to/dir'); // Should remove the glob
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should remove conflicting non-glob rule when enabling with subdirs', () => {
|
||||
manager.enable('ext-test', false, '/path/to/dir'); // Adds /path/to/dir
|
||||
manager.enable('ext-test', true, '/path/to/dir'); // Should remove the non-glob
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/');
|
||||
});
|
||||
|
||||
it('should remove conflicting rules when disabling', () => {
|
||||
manager.enable('ext-test', true, '/path/to/dir'); // enabled with glob
|
||||
manager.disable('ext-test', false, '/path/to/dir'); // disabled without
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('!/path/to/dir/');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should correctly evaluate isEnabled with subdirs', () => {
|
||||
manager.disable('ext-test', true, '/');
|
||||
manager.enable('ext-test', true, '/path/to/dir');
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir/')).toBe(true);
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir/sub/')).toBe(true);
|
||||
expect(manager.isEnabled('ext-test', '/path/to/another/')).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly evaluate isEnabled without subdirs', () => {
|
||||
manager.disable('ext-test', true, '/*');
|
||||
manager.enable('ext-test', false, '/path/to/dir');
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(true);
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir/sub')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('pruning child rules', () => {
|
||||
it('should remove child rules when enabling a parent with subdirs', () => {
|
||||
// Pre-existing rules for children
|
||||
manager.enable('ext-test', false, '/path/to/dir/subdir1');
|
||||
manager.disable('ext-test', true, '/path/to/dir/subdir2');
|
||||
manager.enable('ext-test', false, '/path/to/another/dir');
|
||||
|
||||
// Enable the parent directory
|
||||
manager.enable('ext-test', true, '/path/to/dir');
|
||||
|
||||
const config = manager.readConfig();
|
||||
const overrides = config['ext-test'].overrides;
|
||||
|
||||
// The new parent rule should be present
|
||||
expect(overrides).toContain(`/path/to/dir/*`);
|
||||
|
||||
// Child rules should be removed
|
||||
expect(overrides).not.toContain('/path/to/dir/subdir1/');
|
||||
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
|
||||
|
||||
// Unrelated rules should remain
|
||||
expect(overrides).toContain('/path/to/another/dir/');
|
||||
});
|
||||
|
||||
it('should remove child rules when disabling a parent with subdirs', () => {
|
||||
// Pre-existing rules for children
|
||||
manager.enable('ext-test', false, '/path/to/dir/subdir1');
|
||||
manager.disable('ext-test', true, '/path/to/dir/subdir2');
|
||||
manager.enable('ext-test', false, '/path/to/another/dir');
|
||||
|
||||
// Disable the parent directory
|
||||
manager.disable('ext-test', true, '/path/to/dir');
|
||||
|
||||
const config = manager.readConfig();
|
||||
const overrides = config['ext-test'].overrides;
|
||||
|
||||
// The new parent rule should be present
|
||||
expect(overrides).toContain(`!/path/to/dir/*`);
|
||||
|
||||
// Child rules should be removed
|
||||
expect(overrides).not.toContain('/path/to/dir/subdir1/');
|
||||
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
|
||||
|
||||
// Unrelated rules should remain
|
||||
expect(overrides).toContain('/path/to/another/dir/');
|
||||
});
|
||||
|
||||
it('should not remove child rules if includeSubdirs is false', () => {
|
||||
manager.enable('ext-test', false, '/path/to/dir/subdir1');
|
||||
manager.enable('ext-test', false, '/path/to/dir'); // Not including subdirs
|
||||
|
||||
const config = manager.readConfig();
|
||||
const overrides = config['ext-test'].overrides;
|
||||
|
||||
expect(overrides).toContain('/path/to/dir/subdir1/');
|
||||
expect(overrides).toContain('/path/to/dir/');
|
||||
});
|
||||
});
|
||||
|
||||
it('should enable a path based on an enable override', () => {
|
||||
manager.disable('ext-test', true, '/Users/chrstn');
|
||||
manager.enable('ext-test', true, '/Users/chrstn/gemini-cli');
|
||||
|
||||
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should ignore subdirs', () => {
|
||||
manager.disable('ext-test', false, '/Users/chrstn');
|
||||
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
describe('extension overrides (-e <name>)', () => {
|
||||
beforeEach(() => {
|
||||
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
|
||||
});
|
||||
|
||||
it('can enable extensions, case-insensitive', () => {
|
||||
manager.disable('ext-test', true, '/');
|
||||
expect(manager.isEnabled('ext-test', '/')).toBe(true);
|
||||
expect(manager.isEnabled('Ext-Test', '/')).toBe(true);
|
||||
// Double check that it would have been disabled otherwise
|
||||
expect(
|
||||
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('disable all other extensions', () => {
|
||||
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
|
||||
manager.enable('ext-test-2', true, '/');
|
||||
expect(manager.isEnabled('ext-test-2', '/')).toBe(false);
|
||||
// Double check that it would have been enabled otherwise
|
||||
expect(
|
||||
new ExtensionEnablementManager(configDir).isEnabled('ext-test-2', '/'),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('none disables all extensions', () => {
|
||||
manager = new ExtensionEnablementManager(configDir, ['none']);
|
||||
manager.enable('ext-test', true, '/');
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(false);
|
||||
// Double check that it would have been enabled otherwise
|
||||
expect(
|
||||
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateExtensionOverrides', () => {
|
||||
let consoleErrorSpy: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should not log an error if enabledExtensionNamesOverride is empty', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, []);
|
||||
manager.validateExtensionOverrides([]);
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not log an error if all enabledExtensionNamesOverride are valid', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, [
|
||||
'ext-one',
|
||||
'ext-two',
|
||||
]);
|
||||
const extensions = [
|
||||
{ config: { name: 'ext-one' } },
|
||||
{ config: { name: 'ext-two' } },
|
||||
] as Extension[];
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log an error for each invalid extension name in enabledExtensionNamesOverride', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, [
|
||||
'ext-one',
|
||||
'ext-invalid',
|
||||
'ext-another-invalid',
|
||||
]);
|
||||
const extensions = [
|
||||
{ config: { name: 'ext-one' } },
|
||||
{ config: { name: 'ext-two' } },
|
||||
] as Extension[];
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
expect(consoleErrorSpy).toHaveBeenCalledTimes(2);
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'Extension not found: ext-invalid',
|
||||
);
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'Extension not found: ext-another-invalid',
|
||||
);
|
||||
});
|
||||
|
||||
it('should not log an error if "none" is in enabledExtensionNamesOverride', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, ['none']);
|
||||
manager.validateExtensionOverrides([]);
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Override', () => {
|
||||
it('should create an override from input', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
expect(override.baseRule).toBe(`/path/to/dir/`);
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(true);
|
||||
});
|
||||
|
||||
it('should create a disable override from input', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.baseRule).toBe(`/path/to/dir/`);
|
||||
expect(override.isDisable).toBe(true);
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create an override from a file rule', () => {
|
||||
const override = Override.fromFileRule('/path/to/dir');
|
||||
expect(override.baseRule).toBe('/path/to/dir');
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create a disable override from a file rule', () => {
|
||||
const override = Override.fromFileRule('!/path/to/dir/');
|
||||
expect(override.isDisable).toBe(true);
|
||||
expect(override.baseRule).toBe('/path/to/dir/');
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create an override with subdirs from a file rule', () => {
|
||||
const override = Override.fromFileRule('/path/to/dir/*');
|
||||
expect(override.baseRule).toBe('/path/to/dir/');
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify conflicting overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/dir', false);
|
||||
expect(override1.conflictsWith(override2)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify non-conflicting overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/another/dir', true);
|
||||
expect(override1.conflictsWith(override2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly identify equal overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/dir', true);
|
||||
expect(override1.isEqualTo(override2)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify unequal overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('!/path/to/dir', true);
|
||||
expect(override1.isEqualTo(override2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should generate the correct regex', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
const regex = override.asRegex();
|
||||
expect(regex.test('/path/to/dir/')).toBe(true);
|
||||
expect(regex.test('/path/to/dir/subdir')).toBe(true);
|
||||
expect(regex.test('/path/to/another/dir')).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly identify child overrides', () => {
|
||||
const parent = Override.fromInput('/path/to/dir', true);
|
||||
const child = Override.fromInput('/path/to/dir/subdir', false);
|
||||
expect(child.isChildOf(parent)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify child overrides with glob', () => {
|
||||
const parent = Override.fromInput('/path/to/dir/*', true);
|
||||
const child = Override.fromInput('/path/to/dir/subdir', false);
|
||||
expect(child.isChildOf(parent)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify non-child overrides', () => {
|
||||
const parent = Override.fromInput('/path/to/dir', true);
|
||||
const other = Override.fromInput('/path/to/another/dir', false);
|
||||
expect(other.isChildOf(parent)).toBe(false);
|
||||
});
|
||||
|
||||
it('should generate the correct output string', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
expect(override.output()).toBe(`/path/to/dir/*`);
|
||||
});
|
||||
|
||||
it('should generate the correct output string for a disable override', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.output()).toBe(`!/path/to/dir/`);
|
||||
});
|
||||
|
||||
it('should disable a path based on a disable override rule', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.output()).toBe(`!/path/to/dir/`);
|
||||
});
|
||||
});
|
||||
@@ -1,239 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { type Extension } from '../extension.js';
|
||||
|
||||
export interface ExtensionEnablementConfig {
|
||||
overrides: string[];
|
||||
}
|
||||
|
||||
export interface AllExtensionsEnablementConfig {
|
||||
[extensionName: string]: ExtensionEnablementConfig;
|
||||
}
|
||||
|
||||
export class Override {
|
||||
constructor(
|
||||
public baseRule: string,
|
||||
public isDisable: boolean,
|
||||
public includeSubdirs: boolean,
|
||||
) {}
|
||||
|
||||
static fromInput(inputRule: string, includeSubdirs: boolean): Override {
|
||||
const isDisable = inputRule.startsWith('!');
|
||||
let baseRule = isDisable ? inputRule.substring(1) : inputRule;
|
||||
baseRule = ensureLeadingAndTrailingSlash(baseRule);
|
||||
return new Override(baseRule, isDisable, includeSubdirs);
|
||||
}
|
||||
|
||||
static fromFileRule(fileRule: string): Override {
|
||||
const isDisable = fileRule.startsWith('!');
|
||||
let baseRule = isDisable ? fileRule.substring(1) : fileRule;
|
||||
const includeSubdirs = baseRule.endsWith('*');
|
||||
baseRule = includeSubdirs
|
||||
? baseRule.substring(0, baseRule.length - 1)
|
||||
: baseRule;
|
||||
return new Override(baseRule, isDisable, includeSubdirs);
|
||||
}
|
||||
|
||||
conflictsWith(other: Override): boolean {
|
||||
if (this.baseRule === other.baseRule) {
|
||||
return (
|
||||
this.includeSubdirs !== other.includeSubdirs ||
|
||||
this.isDisable !== other.isDisable
|
||||
);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
isEqualTo(other: Override): boolean {
|
||||
return (
|
||||
this.baseRule === other.baseRule &&
|
||||
this.includeSubdirs === other.includeSubdirs &&
|
||||
this.isDisable === other.isDisable
|
||||
);
|
||||
}
|
||||
|
||||
asRegex(): RegExp {
|
||||
return globToRegex(`${this.baseRule}${this.includeSubdirs ? '*' : ''}`);
|
||||
}
|
||||
|
||||
isChildOf(parent: Override) {
|
||||
if (!parent.includeSubdirs) {
|
||||
return false;
|
||||
}
|
||||
return parent.asRegex().test(this.baseRule);
|
||||
}
|
||||
|
||||
output(): string {
|
||||
return `${this.isDisable ? '!' : ''}${this.baseRule}${this.includeSubdirs ? '*' : ''}`;
|
||||
}
|
||||
|
||||
matchesPath(path: string) {
|
||||
return this.asRegex().test(path);
|
||||
}
|
||||
}
|
||||
|
||||
const ensureLeadingAndTrailingSlash = function (dirPath: string): string {
|
||||
// Normalize separators to forward slashes for consistent matching across platforms.
|
||||
let result = dirPath.replace(/\\/g, '/');
|
||||
if (result.charAt(0) !== '/') {
|
||||
result = '/' + result;
|
||||
}
|
||||
if (result.charAt(result.length - 1) !== '/') {
|
||||
result = result + '/';
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts a glob pattern to a RegExp object.
|
||||
* This is a simplified implementation that supports `*`.
|
||||
*
|
||||
* @param glob The glob pattern to convert.
|
||||
* @returns A RegExp object.
|
||||
*/
|
||||
function globToRegex(glob: string): RegExp {
|
||||
const regexString = glob
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex characters
|
||||
.replace(/(\/?)\*/g, '($1.*)?'); // Convert * to optional group
|
||||
|
||||
return new RegExp(`^${regexString}$`);
|
||||
}
|
||||
|
||||
export class ExtensionEnablementManager {
|
||||
private configFilePath: string;
|
||||
private configDir: string;
|
||||
// If non-empty, this overrides all other extension configuration and enables
|
||||
// only the ones in this list.
|
||||
private enabledExtensionNamesOverride: string[];
|
||||
|
||||
constructor(configDir: string, enabledExtensionNames?: string[]) {
|
||||
this.configDir = configDir;
|
||||
this.configFilePath = path.join(configDir, 'extension-enablement.json');
|
||||
this.enabledExtensionNamesOverride =
|
||||
enabledExtensionNames?.map((name) => name.toLowerCase()) ?? [];
|
||||
}
|
||||
|
||||
validateExtensionOverrides(extensions: Extension[]) {
|
||||
for (const name of this.enabledExtensionNamesOverride) {
|
||||
if (name === 'none') continue;
|
||||
if (
|
||||
!extensions.some(
|
||||
(ext) => ext.config.name.toLowerCase() === name.toLowerCase(),
|
||||
)
|
||||
) {
|
||||
console.error(`Extension not found: ${name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if an extension is enabled based on its name and the current
|
||||
* path. The last matching rule in the overrides list wins.
|
||||
*
|
||||
* @param extensionName The name of the extension.
|
||||
* @param currentPath The absolute path of the current working directory.
|
||||
* @returns True if the extension is enabled, false otherwise.
|
||||
*/
|
||||
isEnabled(extensionName: string, currentPath: string): boolean {
|
||||
// If we have a single override called 'none', this disables all extensions.
|
||||
// Typically, this comes from the user passing `-e none`.
|
||||
if (
|
||||
this.enabledExtensionNamesOverride.length === 1 &&
|
||||
this.enabledExtensionNamesOverride[0] === 'none'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If we have explicit overrides, only enable those extensions.
|
||||
if (this.enabledExtensionNamesOverride.length > 0) {
|
||||
// When checking against overrides ONLY, we use a case insensitive match.
|
||||
// The override names are already lowercased in the constructor.
|
||||
return this.enabledExtensionNamesOverride.includes(
|
||||
extensionName.toLocaleLowerCase(),
|
||||
);
|
||||
}
|
||||
|
||||
// Otherwise, we use the configuration settings
|
||||
const config = this.readConfig();
|
||||
const extensionConfig = config[extensionName];
|
||||
// Extensions are enabled by default.
|
||||
let enabled = true;
|
||||
const allOverrides = extensionConfig?.overrides ?? [];
|
||||
for (const rule of allOverrides) {
|
||||
const override = Override.fromFileRule(rule);
|
||||
if (override.matchesPath(ensureLeadingAndTrailingSlash(currentPath))) {
|
||||
enabled = !override.isDisable;
|
||||
}
|
||||
}
|
||||
return enabled;
|
||||
}
|
||||
|
||||
readConfig(): AllExtensionsEnablementConfig {
|
||||
try {
|
||||
const content = fs.readFileSync(this.configFilePath, 'utf-8');
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'code' in error &&
|
||||
error.code === 'ENOENT'
|
||||
) {
|
||||
return {};
|
||||
}
|
||||
console.error('Error reading extension enablement config:', error);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
writeConfig(config: AllExtensionsEnablementConfig): void {
|
||||
fs.mkdirSync(this.configDir, { recursive: true });
|
||||
fs.writeFileSync(this.configFilePath, JSON.stringify(config, null, 2));
|
||||
}
|
||||
|
||||
enable(
|
||||
extensionName: string,
|
||||
includeSubdirs: boolean,
|
||||
scopePath: string,
|
||||
): void {
|
||||
const config = this.readConfig();
|
||||
if (!config[extensionName]) {
|
||||
config[extensionName] = { overrides: [] };
|
||||
}
|
||||
const override = Override.fromInput(scopePath, includeSubdirs);
|
||||
const overrides = config[extensionName].overrides.filter((rule) => {
|
||||
const fileOverride = Override.fromFileRule(rule);
|
||||
if (
|
||||
fileOverride.conflictsWith(override) ||
|
||||
fileOverride.isEqualTo(override)
|
||||
) {
|
||||
return false; // Remove conflicts and equivalent values.
|
||||
}
|
||||
return !fileOverride.isChildOf(override);
|
||||
});
|
||||
overrides.push(override.output());
|
||||
config[extensionName].overrides = overrides;
|
||||
this.writeConfig(config);
|
||||
}
|
||||
|
||||
disable(
|
||||
extensionName: string,
|
||||
includeSubdirs: boolean,
|
||||
scopePath: string,
|
||||
): void {
|
||||
this.enable(extensionName, includeSubdirs, `!${scopePath}`);
|
||||
}
|
||||
|
||||
remove(extensionName: string): void {
|
||||
const config = this.readConfig();
|
||||
if (config[extensionName]) {
|
||||
delete config[extensionName];
|
||||
this.writeConfig(config);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,468 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
ExtensionStorage,
|
||||
INSTALL_METADATA_FILENAME,
|
||||
annotateActiveExtensions,
|
||||
loadExtension,
|
||||
} from '../extension.js';
|
||||
import { checkForAllExtensionUpdates, updateExtension } from './update.js';
|
||||
import { QWEN_DIR } from '@qwen-code/qwen-code-core';
|
||||
import { isWorkspaceTrusted } from '../trustedFolders.js';
|
||||
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
|
||||
import { createExtension } from '../../test-utils/createExtension.js';
|
||||
import { ExtensionEnablementManager } from './extensionEnablement.js';
|
||||
|
||||
const mockGit = {
|
||||
clone: vi.fn(),
|
||||
getRemotes: vi.fn(),
|
||||
fetch: vi.fn(),
|
||||
checkout: vi.fn(),
|
||||
listRemote: vi.fn(),
|
||||
revparse: vi.fn(),
|
||||
// Not a part of the actual API, but we need to use this to do the correct
|
||||
// file system interactions.
|
||||
path: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mock('simple-git', () => ({
|
||||
simpleGit: vi.fn((path: string) => {
|
||||
mockGit.path.mockReturnValue(path);
|
||||
return mockGit;
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('../extensions/github.js', async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<typeof import('../extensions/github.js')>();
|
||||
return {
|
||||
...actual,
|
||||
downloadFromGitHubRelease: vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const mockedOs = await importOriginal<typeof os>();
|
||||
return {
|
||||
...mockedOs,
|
||||
homedir: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../trustedFolders.js', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../trustedFolders.js')>();
|
||||
return {
|
||||
...actual,
|
||||
isWorkspaceTrusted: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<typeof import('@qwen-code/qwen-code-core')>();
|
||||
return {
|
||||
...actual,
|
||||
logExtensionInstallEvent: mockLogExtensionInstallEvent,
|
||||
logExtensionUninstall: mockLogExtensionUninstall,
|
||||
ExtensionInstallEvent: vi.fn(),
|
||||
ExtensionUninstallEvent: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('update tests', () => {
|
||||
let tempHomeDir: string;
|
||||
let tempWorkspaceDir: string;
|
||||
let userExtensionsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'qwen-code-test-home-'),
|
||||
);
|
||||
tempWorkspaceDir = fs.mkdtempSync(
|
||||
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
|
||||
);
|
||||
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
|
||||
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
|
||||
// Clean up before each test
|
||||
fs.rmSync(userExtensionsDir, { recursive: true, force: true });
|
||||
fs.mkdirSync(userExtensionsDir, { recursive: true });
|
||||
vi.mocked(isWorkspaceTrusted).mockReturnValue({
|
||||
isTrusted: true,
|
||||
source: 'file',
|
||||
});
|
||||
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
|
||||
Object.values(mockGit).forEach((fn) => fn.mockReset());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('updateExtension', () => {
|
||||
it('should update a git-installed extension', async () => {
|
||||
const gitUrl = 'https://github.com/google/gemini-extensions.git';
|
||||
const extensionName = 'qwen-extensions';
|
||||
const targetExtDir = path.join(userExtensionsDir, extensionName);
|
||||
const metadataPath = path.join(targetExtDir, INSTALL_METADATA_FILENAME);
|
||||
|
||||
fs.mkdirSync(targetExtDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name: extensionName, version: '1.0.0' }),
|
||||
);
|
||||
fs.writeFileSync(
|
||||
metadataPath,
|
||||
JSON.stringify({ source: gitUrl, type: 'git' }),
|
||||
);
|
||||
|
||||
mockGit.clone.mockImplementation(async (_, destination) => {
|
||||
fs.mkdirSync(path.join(mockGit.path(), destination), {
|
||||
recursive: true,
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name: extensionName, version: '1.1.0' }),
|
||||
);
|
||||
});
|
||||
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: targetExtDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
const updateInfo = await updateExtension(
|
||||
extension,
|
||||
tempHomeDir,
|
||||
async (_) => true,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
() => {},
|
||||
);
|
||||
|
||||
expect(updateInfo).toEqual({
|
||||
name: 'qwen-extensions',
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
});
|
||||
|
||||
const updatedConfig = JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
'utf-8',
|
||||
),
|
||||
);
|
||||
expect(updatedConfig.version).toBe('1.1.0');
|
||||
});
|
||||
|
||||
it('should call setExtensionUpdateState with UPDATING and then UPDATED_NEEDS_RESTART on success', async () => {
|
||||
const extensionName = 'test-extension';
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: extensionName,
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
|
||||
mockGit.clone.mockImplementation(async (_, destination) => {
|
||||
fs.mkdirSync(path.join(mockGit.path(), destination), {
|
||||
recursive: true,
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name: extensionName, version: '1.1.0' }),
|
||||
);
|
||||
});
|
||||
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
|
||||
|
||||
const dispatch = vi.fn();
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
await updateExtension(
|
||||
extension,
|
||||
tempHomeDir,
|
||||
async (_) => true,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
dispatch,
|
||||
);
|
||||
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.UPDATING,
|
||||
},
|
||||
});
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call setExtensionUpdateState with ERROR on failure', async () => {
|
||||
const extensionName = 'test-extension';
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: extensionName,
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
|
||||
mockGit.clone.mockRejectedValue(new Error('Git clone failed'));
|
||||
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
|
||||
|
||||
const dispatch = vi.fn();
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
await expect(
|
||||
updateExtension(
|
||||
extension,
|
||||
tempHomeDir,
|
||||
async (_) => true,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
dispatch,
|
||||
),
|
||||
).rejects.toThrow();
|
||||
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.UPDATING,
|
||||
},
|
||||
});
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.ERROR,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkForAllExtensionUpdates', () => {
|
||||
it('should return UpdateAvailable for a git extension with updates', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
mockGit.getRemotes.mockResolvedValue([
|
||||
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
|
||||
]);
|
||||
mockGit.listRemote.mockResolvedValue('remoteHash HEAD');
|
||||
mockGit.revparse.mockResolvedValue('localHash');
|
||||
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return UpToDate for a git extension with no updates', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
mockGit.getRemotes.mockResolvedValue([
|
||||
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
|
||||
]);
|
||||
mockGit.listRemote.mockResolvedValue('sameHash HEAD');
|
||||
mockGit.revparse.mockResolvedValue('sameHash');
|
||||
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UP_TO_DATE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return UpToDate for a local extension with no updates', async () => {
|
||||
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
|
||||
const sourceExtensionDir = createExtension({
|
||||
extensionsDir: localExtensionSourcePath,
|
||||
name: 'my-local-ext',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const installedExtensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'local-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: { source: sourceExtensionDir, type: 'local' },
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: installedExtensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'local-extension',
|
||||
state: ExtensionUpdateState.UP_TO_DATE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return UpdateAvailable for a local extension with updates', async () => {
|
||||
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
|
||||
const sourceExtensionDir = createExtension({
|
||||
extensionsDir: localExtensionSourcePath,
|
||||
name: 'my-local-ext',
|
||||
version: '1.1.0',
|
||||
});
|
||||
|
||||
const installedExtensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'local-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: { source: sourceExtensionDir, type: 'local' },
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: installedExtensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'local-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return Error when git check fails', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'error-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
mockGit.getRemotes.mockRejectedValue(new Error('Git error'));
|
||||
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'error-extension',
|
||||
state: ExtensionUpdateState.ERROR,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,182 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import {
|
||||
type ExtensionUpdateAction,
|
||||
ExtensionUpdateState,
|
||||
type ExtensionUpdateStatus,
|
||||
} from '../../ui/state/extensions.js';
|
||||
import {
|
||||
copyExtension,
|
||||
installExtension,
|
||||
uninstallExtension,
|
||||
loadExtension,
|
||||
loadInstallMetadata,
|
||||
ExtensionStorage,
|
||||
loadExtensionConfig,
|
||||
} from '../extension.js';
|
||||
import { checkForExtensionUpdate } from './github.js';
|
||||
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import * as fs from 'node:fs';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
|
||||
export interface ExtensionUpdateInfo {
|
||||
name: string;
|
||||
originalVersion: string;
|
||||
updatedVersion: string;
|
||||
}
|
||||
|
||||
export async function updateExtension(
|
||||
extension: GeminiCLIExtension,
|
||||
cwd: string = process.cwd(),
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
currentState: ExtensionUpdateState,
|
||||
dispatchExtensionStateUpdate: (action: ExtensionUpdateAction) => void,
|
||||
): Promise<ExtensionUpdateInfo | undefined> {
|
||||
if (currentState === ExtensionUpdateState.UPDATING) {
|
||||
return undefined;
|
||||
}
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.UPDATING },
|
||||
});
|
||||
const installMetadata = loadInstallMetadata(extension.path);
|
||||
|
||||
if (!installMetadata?.type) {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
|
||||
});
|
||||
throw new Error(
|
||||
`Extension ${extension.name} cannot be updated, type is unknown.`,
|
||||
);
|
||||
}
|
||||
if (installMetadata?.type === 'link') {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.UP_TO_DATE },
|
||||
});
|
||||
throw new Error(`Extension is linked so does not need to be updated`);
|
||||
}
|
||||
const originalVersion = extension.version;
|
||||
|
||||
const tempDir = await ExtensionStorage.createTmpDir();
|
||||
try {
|
||||
await copyExtension(extension.path, tempDir);
|
||||
const previousExtensionConfig = await loadExtensionConfig({
|
||||
extensionDir: extension.path,
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
await uninstallExtension(extension.name, cwd);
|
||||
await installExtension(
|
||||
installMetadata,
|
||||
requestConsent,
|
||||
cwd,
|
||||
previousExtensionConfig,
|
||||
);
|
||||
|
||||
const updatedExtensionStorage = new ExtensionStorage(extension.name);
|
||||
const updatedExtension = loadExtension({
|
||||
extensionDir: updatedExtensionStorage.getExtensionDir(),
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
if (!updatedExtension) {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
|
||||
});
|
||||
throw new Error('Updated extension not found after installation.');
|
||||
}
|
||||
const updatedVersion = updatedExtension.config.version;
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extension.name,
|
||||
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
|
||||
},
|
||||
});
|
||||
return {
|
||||
name: extension.name,
|
||||
originalVersion,
|
||||
updatedVersion,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(
|
||||
`Error updating extension, rolling back. ${getErrorMessage(e)}`,
|
||||
);
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
|
||||
});
|
||||
await copyExtension(tempDir, extension.path);
|
||||
throw e;
|
||||
} finally {
|
||||
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateAllUpdatableExtensions(
|
||||
cwd: string = process.cwd(),
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
extensions: GeminiCLIExtension[],
|
||||
extensionsState: Map<string, ExtensionUpdateStatus>,
|
||||
dispatch: (action: ExtensionUpdateAction) => void,
|
||||
): Promise<ExtensionUpdateInfo[]> {
|
||||
return (
|
||||
await Promise.all(
|
||||
extensions
|
||||
.filter(
|
||||
(extension) =>
|
||||
extensionsState.get(extension.name)?.status ===
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
)
|
||||
.map((extension) =>
|
||||
updateExtension(
|
||||
extension,
|
||||
cwd,
|
||||
requestConsent,
|
||||
extensionsState.get(extension.name)!.status,
|
||||
dispatch,
|
||||
),
|
||||
),
|
||||
)
|
||||
).filter((updateInfo) => !!updateInfo);
|
||||
}
|
||||
|
||||
export interface ExtensionUpdateCheckResult {
|
||||
state: ExtensionUpdateState;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export async function checkForAllExtensionUpdates(
|
||||
extensions: GeminiCLIExtension[],
|
||||
dispatch: (action: ExtensionUpdateAction) => void,
|
||||
): Promise<void> {
|
||||
dispatch({ type: 'BATCH_CHECK_START' });
|
||||
const promises: Array<Promise<void>> = [];
|
||||
for (const extension of extensions) {
|
||||
if (!extension.installMetadata) {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extension.name,
|
||||
state: ExtensionUpdateState.NOT_UPDATABLE,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
promises.push(
|
||||
checkForExtensionUpdate(extension, (updatedState) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: updatedState },
|
||||
});
|
||||
}),
|
||||
);
|
||||
}
|
||||
await Promise.all(promises);
|
||||
dispatch({ type: 'BATCH_CHECK_END' });
|
||||
}
|
||||
@@ -51,7 +51,6 @@ import {
|
||||
import * as fs from 'node:fs'; // fs will be mocked separately
|
||||
import stripJsonComments from 'strip-json-comments'; // Will be mocked separately
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import { disableExtension } from './extension.js';
|
||||
|
||||
// These imports will get the versions from the vi.mock('./settings.js', ...) factory.
|
||||
import {
|
||||
@@ -65,8 +64,6 @@ import {
|
||||
needsMigration,
|
||||
type Settings,
|
||||
loadEnvironment,
|
||||
migrateDeprecatedSettings,
|
||||
SettingScope,
|
||||
SETTINGS_VERSION,
|
||||
SETTINGS_VERSION_KEY,
|
||||
} from './settings.js';
|
||||
@@ -2730,122 +2727,4 @@ describe('Settings Loading and Merging', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateDeprecatedSettings', () => {
|
||||
let mockFsExistsSync: Mocked<typeof fs.existsSync>;
|
||||
let mockFsReadFileSync: Mocked<typeof fs.readFileSync>;
|
||||
let mockDisableExtension: Mocked<typeof disableExtension>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
|
||||
mockFsExistsSync = vi.mocked(fs.existsSync);
|
||||
mockFsReadFileSync = vi.mocked(fs.readFileSync);
|
||||
mockDisableExtension = vi.mocked(disableExtension);
|
||||
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true);
|
||||
vi.mocked(isWorkspaceTrusted).mockReturnValue(true);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should migrate disabled extensions from user and workspace settings', () => {
|
||||
const userSettingsContent = {
|
||||
extensions: {
|
||||
disabled: ['user-ext-1', 'shared-ext'],
|
||||
},
|
||||
};
|
||||
const workspaceSettingsContent = {
|
||||
extensions: {
|
||||
disabled: ['workspace-ext-1', 'shared-ext'],
|
||||
},
|
||||
};
|
||||
|
||||
(mockFsReadFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
|
||||
|
||||
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
|
||||
|
||||
// Check user settings migration
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'user-ext-1',
|
||||
SettingScope.User,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'shared-ext',
|
||||
SettingScope.User,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
|
||||
// Check workspace settings migration
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'workspace-ext-1',
|
||||
SettingScope.Workspace,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'shared-ext',
|
||||
SettingScope.Workspace,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
|
||||
// Check that setValue was called to remove the deprecated setting
|
||||
expect(setValueSpy).toHaveBeenCalledWith(
|
||||
SettingScope.User,
|
||||
'extensions',
|
||||
{
|
||||
disabled: undefined,
|
||||
},
|
||||
);
|
||||
expect(setValueSpy).toHaveBeenCalledWith(
|
||||
SettingScope.Workspace,
|
||||
'extensions',
|
||||
{
|
||||
disabled: undefined,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should not do anything if there are no deprecated settings', () => {
|
||||
const userSettingsContent = {
|
||||
extensions: {
|
||||
enabled: ['user-ext-1'],
|
||||
},
|
||||
};
|
||||
const workspaceSettingsContent = {
|
||||
someOtherSetting: 'value',
|
||||
};
|
||||
|
||||
(mockFsReadFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
|
||||
|
||||
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(mockDisableExtension).not.toHaveBeenCalled();
|
||||
expect(setValueSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -30,7 +30,6 @@ import {
|
||||
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
|
||||
import { customDeepMerge, type MergeableObject } from '../utils/deepMerge.js';
|
||||
import { updateSettingsFilePreservingFormat } from '../utils/commentJson.js';
|
||||
import { disableExtension } from './extension.js';
|
||||
|
||||
function getMergeStrategyForPath(path: string[]): MergeStrategy | undefined {
|
||||
let current: SettingDefinition | undefined = undefined;
|
||||
@@ -81,7 +80,6 @@ const MIGRATION_MAP: Record<string, string> = {
|
||||
excludeTools: 'tools.exclude',
|
||||
excludeMCPServers: 'mcp.excluded',
|
||||
excludedProjectEnvVars: 'advanced.excludedEnvVars',
|
||||
extensionManagement: 'experimental.extensionManagement',
|
||||
extensions: 'extensions',
|
||||
fileFiltering: 'context.fileFiltering',
|
||||
folderTrustFeature: 'security.folderTrust.featureEnabled',
|
||||
@@ -903,31 +901,6 @@ export function loadSettings(
|
||||
);
|
||||
}
|
||||
|
||||
export function migrateDeprecatedSettings(
|
||||
loadedSettings: LoadedSettings,
|
||||
workspaceDir: string = process.cwd(),
|
||||
): void {
|
||||
const processScope = (scope: SettingScope) => {
|
||||
const settings = loadedSettings.forScope(scope).settings;
|
||||
if (settings.extensions?.disabled) {
|
||||
console.log(
|
||||
`Migrating deprecated extensions.disabled settings from ${scope} settings...`,
|
||||
);
|
||||
for (const extension of settings.extensions.disabled ?? []) {
|
||||
disableExtension(extension, scope, workspaceDir);
|
||||
}
|
||||
|
||||
const newExtensionsValue = { ...settings.extensions };
|
||||
newExtensionsValue.disabled = undefined;
|
||||
|
||||
loadedSettings.setValue(scope, 'extensions', newExtensionsValue);
|
||||
}
|
||||
};
|
||||
|
||||
processScope(SettingScope.User);
|
||||
processScope(SettingScope.Workspace);
|
||||
}
|
||||
|
||||
export function saveSettings(settingsFile: SettingsFile): void {
|
||||
try {
|
||||
// Ensure the directory exists
|
||||
|
||||
@@ -1207,15 +1207,6 @@ const SETTINGS_SCHEMA = {
|
||||
description: 'Setting to enable experimental features',
|
||||
showInDialog: false,
|
||||
properties: {
|
||||
extensionManagement: {
|
||||
type: 'boolean',
|
||||
label: 'Extension Management',
|
||||
category: 'Experimental',
|
||||
requiresRestart: true,
|
||||
default: true,
|
||||
description: 'Enable extension management features.',
|
||||
showInDialog: false,
|
||||
},
|
||||
visionModelPreview: {
|
||||
type: 'boolean',
|
||||
label: 'Vision Model Preview',
|
||||
@@ -1238,39 +1229,6 @@ const SETTINGS_SCHEMA = {
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
extensions: {
|
||||
type: 'object',
|
||||
label: 'Extensions',
|
||||
category: 'Extensions',
|
||||
requiresRestart: true,
|
||||
default: {},
|
||||
description: 'Settings for extensions.',
|
||||
showInDialog: false,
|
||||
properties: {
|
||||
disabled: {
|
||||
type: 'array',
|
||||
label: 'Disabled Extensions',
|
||||
category: 'Extensions',
|
||||
requiresRestart: true,
|
||||
default: [] as string[],
|
||||
description: 'List of disabled extensions.',
|
||||
showInDialog: false,
|
||||
mergeStrategy: MergeStrategy.UNION,
|
||||
},
|
||||
workspacesWithMigrationNudge: {
|
||||
type: 'array',
|
||||
label: 'Workspaces with Migration Nudge',
|
||||
category: 'Extensions',
|
||||
requiresRestart: false,
|
||||
default: [] as string[],
|
||||
description:
|
||||
'List of workspaces for which the migration nudge has been shown.',
|
||||
showInDialog: false,
|
||||
mergeStrategy: MergeStrategy.UNION,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as const satisfies SettingsSchema;
|
||||
|
||||
export type SettingsSchemaType = typeof SETTINGS_SCHEMA;
|
||||
|
||||
@@ -271,7 +271,6 @@ describe('gemini.tsx main function', () => {
|
||||
);
|
||||
const { loadSettings } = await import('./config/settings.js');
|
||||
const cleanupModule = await import('./utils/cleanup.js');
|
||||
const extensionModule = await import('./config/extension.js');
|
||||
const validatorModule = await import('./validateNonInterActiveAuth.js');
|
||||
const streamJsonModule = await import('./nonInteractive/session.js');
|
||||
const initializerModule = await import('./core/initializer.js');
|
||||
@@ -284,11 +283,6 @@ describe('gemini.tsx main function', () => {
|
||||
vi.mocked(cleanupModule.registerCleanup).mockImplementation(() => {});
|
||||
const runExitCleanupMock = vi.mocked(cleanupModule.runExitCleanup);
|
||||
runExitCleanupMock.mockResolvedValue(undefined);
|
||||
vi.spyOn(extensionModule, 'loadExtensions').mockReturnValue([]);
|
||||
vi.spyOn(
|
||||
extensionModule.ExtensionStorage,
|
||||
'getUserExtensionsDir',
|
||||
).mockReturnValue('/tmp/extensions');
|
||||
vi.spyOn(initializerModule, 'initializeApp').mockResolvedValue({
|
||||
authError: null,
|
||||
themeError: null,
|
||||
|
||||
@@ -15,13 +15,8 @@ import React from 'react';
|
||||
import { validateAuthMethod } from './config/auth.js';
|
||||
import * as cliConfig from './config/config.js';
|
||||
import { loadCliConfig, parseArguments } from './config/config.js';
|
||||
import { ExtensionStorage, loadExtensions } from './config/extension.js';
|
||||
import type { DnsResolutionOrder, LoadedSettings } from './config/settings.js';
|
||||
import {
|
||||
getSettingsWarnings,
|
||||
loadSettings,
|
||||
migrateDeprecatedSettings,
|
||||
} from './config/settings.js';
|
||||
import { getSettingsWarnings, loadSettings } from './config/settings.js';
|
||||
import {
|
||||
initializeApp,
|
||||
type InitializationResult,
|
||||
@@ -107,7 +102,6 @@ function getNodeMemoryArgs(isDebugMode: boolean): string[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
import { ExtensionEnablementManager } from './config/extensions/extensionEnablement.js';
|
||||
import { loadSandboxConfig } from './config/sandboxConfig.js';
|
||||
import { runAcpAgent } from './acp-integration/acpAgent.js';
|
||||
|
||||
@@ -206,10 +200,9 @@ export async function startInteractiveUI(
|
||||
export async function main() {
|
||||
setupUnhandledRejectionHandler();
|
||||
const settings = loadSettings();
|
||||
migrateDeprecatedSettings(settings);
|
||||
await cleanupCheckpoints();
|
||||
|
||||
let argv = await parseArguments(settings.merged);
|
||||
let argv = await parseArguments();
|
||||
|
||||
// Check for invalid input combinations early to prevent crashes
|
||||
if (argv.promptInteractive && !process.stdin.isTTY) {
|
||||
@@ -251,9 +244,9 @@ export async function main() {
|
||||
if (sandboxConfig) {
|
||||
const partialConfig = await loadCliConfig(
|
||||
settings.merged,
|
||||
[],
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
argv,
|
||||
undefined,
|
||||
[],
|
||||
);
|
||||
|
||||
if (!settings.merged.security?.auth?.useExternal) {
|
||||
@@ -335,25 +328,22 @@ export async function main() {
|
||||
// to run Gemini CLI. It is now safe to perform expensive initialization that
|
||||
// may have side effects.
|
||||
{
|
||||
const extensionEnablementManager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
argv.extensions,
|
||||
);
|
||||
const extensions = loadExtensions(extensionEnablementManager);
|
||||
const config = await loadCliConfig(
|
||||
settings.merged,
|
||||
extensions,
|
||||
extensionEnablementManager,
|
||||
argv,
|
||||
process.cwd(),
|
||||
argv.extensions,
|
||||
);
|
||||
registerCleanup(() => config.shutdown());
|
||||
|
||||
if (config.getListExtensions()) {
|
||||
console.log('Installed extensions:');
|
||||
for (const extension of extensions) {
|
||||
console.log(`- ${extension.config.name}`);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
// FIXME: list extensions after the config initialize
|
||||
// if (config.getListExtensions()) {
|
||||
// console.log('Installed extensions:');
|
||||
// for (const extension of extensions) {
|
||||
// console.log(`- ${extension.config.name}`);
|
||||
// }
|
||||
// process.exit(0);
|
||||
// }
|
||||
|
||||
// Setup unified ConsolePatcher based on interactive mode
|
||||
const isInteractive = config.isInteractive();
|
||||
@@ -399,7 +389,7 @@ export async function main() {
|
||||
}
|
||||
|
||||
if (config.getExperimentalZedIntegration()) {
|
||||
return runAcpAgent(config, settings, extensions, argv);
|
||||
return runAcpAgent(config, settings, argv);
|
||||
}
|
||||
|
||||
let input = config.getQuestion();
|
||||
|
||||
@@ -31,6 +31,7 @@ import { quitCommand } from '../ui/commands/quitCommand.js';
|
||||
import { restoreCommand } from '../ui/commands/restoreCommand.js';
|
||||
import { resumeCommand } from '../ui/commands/resumeCommand.js';
|
||||
import { settingsCommand } from '../ui/commands/settingsCommand.js';
|
||||
import { skillsCommand } from '../ui/commands/skillsCommand.js';
|
||||
import { statsCommand } from '../ui/commands/statsCommand.js';
|
||||
import { summaryCommand } from '../ui/commands/summaryCommand.js';
|
||||
import { terminalSetupCommand } from '../ui/commands/terminalSetupCommand.js';
|
||||
@@ -78,6 +79,7 @@ export class BuiltinCommandLoader implements ICommandLoader {
|
||||
quitCommand,
|
||||
restoreCommand(this.config),
|
||||
resumeCommand,
|
||||
...(this.config?.getExperimentalSkills?.() ? [skillsCommand] : []),
|
||||
statsCommand,
|
||||
summaryCommand,
|
||||
themeCommand,
|
||||
|
||||
327
packages/cli/src/services/FileCommandLoader-extension.test.ts
Normal file
327
packages/cli/src/services/FileCommandLoader-extension.test.ts
Normal file
@@ -0,0 +1,327 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { FileCommandLoader } from './FileCommandLoader.js';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import { Storage } from '@qwen-code/qwen-code-core';
|
||||
|
||||
describe('FileCommandLoader - Extension Commands Support', () => {
|
||||
let tempDir: string;
|
||||
let mockConfig: Partial<Config>;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await fs.promises.mkdtemp(
|
||||
path.join(os.tmpdir(), 'file-command-loader-ext-test-'),
|
||||
);
|
||||
|
||||
mockConfig = {
|
||||
getFolderTrustFeature: () => false,
|
||||
getFolderTrust: () => true,
|
||||
getProjectRoot: () => tempDir,
|
||||
storage: new Storage(tempDir),
|
||||
getExtensions: () => [],
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should load commands from extension with config.commands path', async () => {
|
||||
// Setup extension structure
|
||||
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'test-ext');
|
||||
const customCommandsDir = path.join(extensionDir, 'custom-cmds');
|
||||
await fs.promises.mkdir(customCommandsDir, { recursive: true });
|
||||
|
||||
// Create extension config with custom commands path
|
||||
const extensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
commands: 'custom-cmds',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Create a test command in custom directory
|
||||
const commandContent =
|
||||
'---\ndescription: Test command from extension\n---\nDo something';
|
||||
await fs.promises.writeFile(
|
||||
path.join(customCommandsDir, 'test.md'),
|
||||
commandContent,
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'test-ext',
|
||||
config: extensionConfig,
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
contextFiles: [],
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('test-ext:test');
|
||||
expect(commands[0].description).toBe(
|
||||
'[test-ext] Test command from extension',
|
||||
);
|
||||
});
|
||||
|
||||
it('should load commands from extension with multiple commands paths', async () => {
|
||||
// Setup extension structure
|
||||
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'multi-ext');
|
||||
const cmdsDir1 = path.join(extensionDir, 'commands1');
|
||||
const cmdsDir2 = path.join(extensionDir, 'commands2');
|
||||
await fs.promises.mkdir(cmdsDir1, { recursive: true });
|
||||
await fs.promises.mkdir(cmdsDir2, { recursive: true });
|
||||
|
||||
// Create extension config with multiple commands paths
|
||||
const extensionConfig = {
|
||||
name: 'multi-ext',
|
||||
version: '1.0.0',
|
||||
commands: ['commands1', 'commands2'],
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Create test commands in both directories
|
||||
await fs.promises.writeFile(
|
||||
path.join(cmdsDir1, 'cmd1.md'),
|
||||
'---\n---\nCommand 1',
|
||||
);
|
||||
await fs.promises.writeFile(
|
||||
path.join(cmdsDir2, 'cmd2.md'),
|
||||
'---\n---\nCommand 2',
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'multi-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'multi-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(2);
|
||||
const commandNames = commands.map((c) => c.name).sort();
|
||||
expect(commandNames).toEqual(['multi-ext:cmd1', 'multi-ext:cmd2']);
|
||||
});
|
||||
|
||||
it('should fallback to default "commands" directory when config.commands not specified', async () => {
|
||||
// Setup extension structure with default commands directory
|
||||
const extensionDir = path.join(
|
||||
tempDir,
|
||||
'.qwen',
|
||||
'extensions',
|
||||
'default-ext',
|
||||
);
|
||||
const defaultCommandsDir = path.join(extensionDir, 'commands');
|
||||
await fs.promises.mkdir(defaultCommandsDir, { recursive: true });
|
||||
|
||||
// Create extension config without commands field
|
||||
const extensionConfig = {
|
||||
name: 'default-ext',
|
||||
version: '1.0.0',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Create a test command in default directory
|
||||
await fs.promises.writeFile(
|
||||
path.join(defaultCommandsDir, 'default.md'),
|
||||
'---\n---\nDefault command',
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'default-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'default-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('default-ext:default');
|
||||
});
|
||||
|
||||
it('should handle extension without commands directory gracefully', async () => {
|
||||
// Setup extension structure without commands directory
|
||||
const extensionDir = path.join(
|
||||
tempDir,
|
||||
'.qwen',
|
||||
'extensions',
|
||||
'no-cmds-ext',
|
||||
);
|
||||
await fs.promises.mkdir(extensionDir, { recursive: true });
|
||||
|
||||
// Create extension config
|
||||
const extensionConfig = {
|
||||
name: 'no-cmds-ext',
|
||||
version: '1.0.0',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'no-cmds-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'no-cmds-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
// Should not throw and return empty array
|
||||
expect(commands).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should prefix extension commands with extension name', async () => {
|
||||
// Setup extension
|
||||
const extensionDir = path.join(
|
||||
tempDir,
|
||||
'.qwen',
|
||||
'extensions',
|
||||
'prefix-ext',
|
||||
);
|
||||
const commandsDir = path.join(extensionDir, 'commands');
|
||||
await fs.promises.mkdir(commandsDir, { recursive: true });
|
||||
|
||||
const extensionConfig = {
|
||||
name: 'prefix-ext',
|
||||
version: '1.0.0',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
await fs.promises.writeFile(
|
||||
path.join(commandsDir, 'mycommand.md'),
|
||||
'---\n---\nMy command',
|
||||
);
|
||||
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'prefix-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'prefix-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('prefix-ext:mycommand');
|
||||
});
|
||||
|
||||
it('should load commands from multiple extensions in alphabetical order', async () => {
|
||||
// Setup two extensions
|
||||
const ext1Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-b');
|
||||
const ext2Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-a');
|
||||
|
||||
await fs.promises.mkdir(path.join(ext1Dir, 'commands'), {
|
||||
recursive: true,
|
||||
});
|
||||
await fs.promises.mkdir(path.join(ext2Dir, 'commands'), {
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
// Extension B
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext1Dir, 'qwen-extension.json'),
|
||||
JSON.stringify({ name: 'ext-b', version: '1.0.0' }),
|
||||
);
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext1Dir, 'commands', 'cmd.md'),
|
||||
'---\n---\nCommand B',
|
||||
);
|
||||
|
||||
// Extension A
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext2Dir, 'qwen-extension.json'),
|
||||
JSON.stringify({ name: 'ext-a', version: '1.0.0' }),
|
||||
);
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext2Dir, 'commands', 'cmd.md'),
|
||||
'---\n---\nCommand A',
|
||||
);
|
||||
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'ext-b',
|
||||
config: { name: 'ext-b', version: '1.0.0' },
|
||||
contextFiles: [],
|
||||
name: 'ext-b',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: ext1Dir,
|
||||
},
|
||||
{
|
||||
id: 'ext-a',
|
||||
config: { name: 'ext-a', version: '1.0.0' },
|
||||
contextFiles: [],
|
||||
name: 'ext-a',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: ext2Dir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(2);
|
||||
// Extensions are sorted alphabetically, so ext-a comes before ext-b
|
||||
expect(commands[0].name).toBe('ext-a:cmd');
|
||||
expect(commands[1].name).toBe('ext-b:cmd');
|
||||
});
|
||||
});
|
||||
117
packages/cli/src/services/FileCommandLoader-markdown.test.ts
Normal file
117
packages/cli/src/services/FileCommandLoader-markdown.test.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import os from 'node:os';
|
||||
import { FileCommandLoader } from './FileCommandLoader.js';
|
||||
|
||||
describe('FileCommandLoader - Markdown support', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a temporary directory for test commands
|
||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-md-test-'));
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Clean up
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should load markdown commands with frontmatter', async () => {
|
||||
// Create a test markdown command file
|
||||
const mdContent = `---
|
||||
description: Test markdown command
|
||||
---
|
||||
|
||||
This is a test prompt from markdown.`;
|
||||
|
||||
const commandPath = path.join(tempDir, 'test-command.md');
|
||||
await fs.writeFile(commandPath, mdContent, 'utf-8');
|
||||
|
||||
// Create loader with temp dir as command source
|
||||
const loader = new FileCommandLoader(null);
|
||||
|
||||
// Mock the getCommandDirectories to return our temp dir
|
||||
const originalMethod = loader['getCommandDirectories'];
|
||||
loader['getCommandDirectories'] = () => [{ path: tempDir }];
|
||||
|
||||
try {
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('test-command');
|
||||
expect(commands[0].description).toBe('Test markdown command');
|
||||
} finally {
|
||||
// Restore original method
|
||||
loader['getCommandDirectories'] = originalMethod;
|
||||
}
|
||||
});
|
||||
|
||||
it('should load markdown commands without frontmatter', async () => {
|
||||
// Create a test markdown command file without frontmatter
|
||||
const mdContent = 'This is a simple prompt without frontmatter.';
|
||||
|
||||
const commandPath = path.join(tempDir, 'simple-command.md');
|
||||
await fs.writeFile(commandPath, mdContent, 'utf-8');
|
||||
|
||||
const loader = new FileCommandLoader(null);
|
||||
const originalMethod = loader['getCommandDirectories'];
|
||||
loader['getCommandDirectories'] = () => [{ path: tempDir }];
|
||||
|
||||
try {
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
const simpleCommand = commands.find(
|
||||
(cmd) => cmd.name === 'simple-command',
|
||||
);
|
||||
expect(simpleCommand).toBeDefined();
|
||||
expect(simpleCommand?.description).toContain('Custom command from');
|
||||
} finally {
|
||||
loader['getCommandDirectories'] = originalMethod;
|
||||
}
|
||||
});
|
||||
|
||||
it('should load both toml and markdown commands', async () => {
|
||||
// Create both TOML and Markdown files
|
||||
const tomlContent = `prompt = "TOML prompt"
|
||||
description = "TOML command"`;
|
||||
|
||||
const mdContent = `---
|
||||
description: Markdown command
|
||||
---
|
||||
|
||||
Markdown prompt`;
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'toml-cmd.toml'),
|
||||
tomlContent,
|
||||
'utf-8',
|
||||
);
|
||||
await fs.writeFile(path.join(tempDir, 'md-cmd.md'), mdContent, 'utf-8');
|
||||
|
||||
const loader = new FileCommandLoader(null);
|
||||
const originalMethod = loader['getCommandDirectories'];
|
||||
loader['getCommandDirectories'] = () => [{ path: tempDir }];
|
||||
|
||||
try {
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
const tomlCommand = commands.find((cmd) => cmd.name === 'toml-cmd');
|
||||
const mdCommand = commands.find((cmd) => cmd.name === 'md-cmd');
|
||||
|
||||
expect(tomlCommand).toBeDefined();
|
||||
expect(tomlCommand?.description).toBe('TOML command');
|
||||
|
||||
expect(mdCommand).toBeDefined();
|
||||
expect(mdCommand?.description).toBe('Markdown command');
|
||||
} finally {
|
||||
loader['getCommandDirectories'] = originalMethod;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -568,9 +568,9 @@ describe('FileCommandLoader', () => {
|
||||
|
||||
expect(commands).toHaveLength(3);
|
||||
const commandNames = commands.map((cmd) => cmd.name);
|
||||
expect(commandNames).toEqual(['user', 'project', 'ext']);
|
||||
expect(commandNames).toEqual(['user', 'project', 'test-ext:ext']);
|
||||
|
||||
const extCommand = commands.find((cmd) => cmd.name === 'ext');
|
||||
const extCommand = commands.find((cmd) => cmd.name === 'test-ext:ext');
|
||||
expect(extCommand?.extensionName).toBe('test-ext');
|
||||
expect(extCommand?.description).toMatch(/^\[test-ext\]/);
|
||||
});
|
||||
@@ -656,14 +656,14 @@ describe('FileCommandLoader', () => {
|
||||
expect(result1.content).toEqual([{ text: 'Project deploy command' }]);
|
||||
}
|
||||
|
||||
expect(commands[2].name).toBe('deploy');
|
||||
expect(commands[2].name).toBe('test-ext:deploy');
|
||||
expect(commands[2].extensionName).toBe('test-ext');
|
||||
expect(commands[2].description).toMatch(/^\[test-ext\]/);
|
||||
const result2 = await commands[2].action?.(
|
||||
createMockCommandContext({
|
||||
invocation: {
|
||||
raw: '/deploy',
|
||||
name: 'deploy',
|
||||
raw: '/test-ext:deploy',
|
||||
name: 'test-ext:deploy',
|
||||
args: '',
|
||||
},
|
||||
}),
|
||||
@@ -729,7 +729,7 @@ describe('FileCommandLoader', () => {
|
||||
const commands = await loader.loadCommands(signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('active');
|
||||
expect(commands[0].name).toBe('active-ext:active');
|
||||
expect(commands[0].extensionName).toBe('active-ext');
|
||||
expect(commands[0].description).toMatch(/^\[active-ext\]/);
|
||||
});
|
||||
@@ -803,17 +803,17 @@ describe('FileCommandLoader', () => {
|
||||
expect(commands).toHaveLength(3);
|
||||
|
||||
const commandNames = commands.map((cmd) => cmd.name).sort();
|
||||
expect(commandNames).toEqual(['b:c', 'b:d:e', 'simple']);
|
||||
expect(commandNames).toEqual(['a:b:c', 'a:b:d:e', 'a:simple']);
|
||||
|
||||
const nestedCmd = commands.find((cmd) => cmd.name === 'b:c');
|
||||
const nestedCmd = commands.find((cmd) => cmd.name === 'a:b:c');
|
||||
expect(nestedCmd?.extensionName).toBe('a');
|
||||
expect(nestedCmd?.description).toMatch(/^\[a\]/);
|
||||
expect(nestedCmd).toBeDefined();
|
||||
const result = await nestedCmd!.action?.(
|
||||
createMockCommandContext({
|
||||
invocation: {
|
||||
raw: '/b:c',
|
||||
name: 'b:c',
|
||||
raw: '/a:b:c',
|
||||
name: 'a:b:c',
|
||||
args: '',
|
||||
},
|
||||
}),
|
||||
|
||||
@@ -5,34 +5,23 @@
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import * as fsSync from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import toml from '@iarna/toml';
|
||||
import { glob } from 'glob';
|
||||
import { z } from 'zod';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import { Storage } from '@qwen-code/qwen-code-core';
|
||||
import { EXTENSIONS_CONFIG_FILENAME, Storage } from '@qwen-code/qwen-code-core';
|
||||
import type { ICommandLoader } from './types.js';
|
||||
import type {
|
||||
CommandContext,
|
||||
SlashCommand,
|
||||
SlashCommandActionReturn,
|
||||
} from '../ui/commands/types.js';
|
||||
import { CommandKind } from '../ui/commands/types.js';
|
||||
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
|
||||
import type {
|
||||
IPromptProcessor,
|
||||
PromptPipelineContent,
|
||||
} from './prompt-processors/types.js';
|
||||
import {
|
||||
SHORTHAND_ARGS_PLACEHOLDER,
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
} from './prompt-processors/types.js';
|
||||
parseMarkdownCommand,
|
||||
MarkdownCommandDefSchema,
|
||||
} from './markdown-command-parser.js';
|
||||
import {
|
||||
ConfirmationRequiredError,
|
||||
ShellProcessor,
|
||||
} from './prompt-processors/shellProcessor.js';
|
||||
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
|
||||
createSlashCommandFromDefinition,
|
||||
type CommandDefinition,
|
||||
} from './command-factory.js';
|
||||
import type { SlashCommand } from '../ui/commands/types.js';
|
||||
|
||||
interface CommandDirectory {
|
||||
path: string;
|
||||
@@ -96,7 +85,12 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
const commandDirs = this.getCommandDirectories();
|
||||
for (const dirInfo of commandDirs) {
|
||||
try {
|
||||
const files = await glob('**/*.toml', {
|
||||
// Scan both .toml and .md files
|
||||
const tomlFiles = await glob('**/*.toml', {
|
||||
...globOptions,
|
||||
cwd: dirInfo.path,
|
||||
});
|
||||
const mdFiles = await glob('**/*.md', {
|
||||
...globOptions,
|
||||
cwd: dirInfo.path,
|
||||
});
|
||||
@@ -105,18 +99,28 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
return [];
|
||||
}
|
||||
|
||||
const commandPromises = files.map((file) =>
|
||||
this.parseAndAdaptFile(
|
||||
// Process TOML files
|
||||
const tomlCommandPromises = tomlFiles.map((file) =>
|
||||
this.parseAndAdaptTomlFile(
|
||||
path.join(dirInfo.path, file),
|
||||
dirInfo.path,
|
||||
dirInfo.extensionName,
|
||||
),
|
||||
);
|
||||
|
||||
const commands = (await Promise.all(commandPromises)).filter(
|
||||
(cmd): cmd is SlashCommand => cmd !== null,
|
||||
// Process Markdown files
|
||||
const mdCommandPromises = mdFiles.map((file) =>
|
||||
this.parseAndAdaptMarkdownFile(
|
||||
path.join(dirInfo.path, file),
|
||||
dirInfo.path,
|
||||
dirInfo.extensionName,
|
||||
),
|
||||
);
|
||||
|
||||
const commands = (
|
||||
await Promise.all([...tomlCommandPromises, ...mdCommandPromises])
|
||||
).filter((cmd): cmd is SlashCommand => cmd !== null);
|
||||
|
||||
// Add all commands without deduplication
|
||||
allCommands.push(...commands);
|
||||
} catch (error) {
|
||||
@@ -159,17 +163,73 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
.filter((ext) => ext.isActive)
|
||||
.sort((a, b) => a.name.localeCompare(b.name)); // Sort alphabetically for deterministic loading
|
||||
|
||||
const extensionCommandDirs = activeExtensions.map((ext) => ({
|
||||
path: path.join(ext.path, 'commands'),
|
||||
extensionName: ext.name,
|
||||
}));
|
||||
// Collect command directories from each extension
|
||||
for (const ext of activeExtensions) {
|
||||
// Get commands paths from extension config
|
||||
const commandsPaths = this.getExtensionCommandsPaths(ext);
|
||||
|
||||
dirs.push(...extensionCommandDirs);
|
||||
for (const cmdPath of commandsPaths) {
|
||||
dirs.push({
|
||||
path: cmdPath,
|
||||
extensionName: ext.name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dirs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get commands paths from an extension.
|
||||
* Returns paths from config.commands if specified, otherwise defaults to 'commands' directory.
|
||||
*/
|
||||
private getExtensionCommandsPaths(ext: {
|
||||
path: string;
|
||||
name: string;
|
||||
}): string[] {
|
||||
// Try to get extension config
|
||||
try {
|
||||
const configPath = path.join(ext.path, EXTENSIONS_CONFIG_FILENAME);
|
||||
if (fsSync.existsSync(configPath)) {
|
||||
const configContent = fsSync.readFileSync(configPath, 'utf-8');
|
||||
const config = JSON.parse(configContent);
|
||||
|
||||
if (config.commands) {
|
||||
const commandsArray = Array.isArray(config.commands)
|
||||
? config.commands
|
||||
: [config.commands];
|
||||
|
||||
return commandsArray
|
||||
.map((cmdPath: string) =>
|
||||
path.isAbsolute(cmdPath) ? cmdPath : path.join(ext.path, cmdPath),
|
||||
)
|
||||
.filter((cmdPath: string) => {
|
||||
try {
|
||||
return fsSync.existsSync(cmdPath);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Failed to read extension config for ${ext.name}:`, error);
|
||||
}
|
||||
|
||||
// Default fallback: use 'commands' directory
|
||||
const defaultPath = path.join(ext.path, 'commands');
|
||||
try {
|
||||
if (fsSync.existsSync(defaultPath)) {
|
||||
return [defaultPath];
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a single .toml file and transforms it into a SlashCommand object.
|
||||
* @param filePath The absolute path to the .toml file.
|
||||
@@ -177,7 +237,7 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
* @param extensionName Optional extension name to prefix commands with.
|
||||
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
|
||||
*/
|
||||
private async parseAndAdaptFile(
|
||||
private async parseAndAdaptTomlFile(
|
||||
filePath: string,
|
||||
baseDir: string,
|
||||
extensionName?: string,
|
||||
@@ -216,104 +276,79 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
|
||||
const validDef = validationResult.data;
|
||||
|
||||
const relativePathWithExt = path.relative(baseDir, filePath);
|
||||
const relativePath = relativePathWithExt.substring(
|
||||
0,
|
||||
relativePathWithExt.length - 5, // length of '.toml'
|
||||
);
|
||||
const baseCommandName = relativePath
|
||||
.split(path.sep)
|
||||
// Sanitize each path segment to prevent ambiguity. Since ':' is our
|
||||
// namespace separator, we replace any literal colons in filenames
|
||||
// with underscores to avoid naming conflicts.
|
||||
.map((segment) => segment.replaceAll(':', '_'))
|
||||
.join(':');
|
||||
|
||||
// Add extension name tag for extension commands
|
||||
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
|
||||
let description = validDef.description || defaultDescription;
|
||||
if (extensionName) {
|
||||
description = `[${extensionName}] ${description}`;
|
||||
}
|
||||
|
||||
const processors: IPromptProcessor[] = [];
|
||||
const usesArgs = validDef.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
|
||||
const usesShellInjection = validDef.prompt.includes(
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
);
|
||||
const usesAtFileInjection = validDef.prompt.includes(
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
);
|
||||
|
||||
// 1. @-File Injection (Security First).
|
||||
// This runs first to ensure we're not executing shell commands that
|
||||
// could dynamically generate malicious @-paths.
|
||||
if (usesAtFileInjection) {
|
||||
processors.push(new AtFileProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 2. Argument and Shell Injection.
|
||||
// This runs after file content has been safely injected.
|
||||
if (usesShellInjection || usesArgs) {
|
||||
processors.push(new ShellProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 3. Default Argument Handling.
|
||||
// Appends the raw invocation if no explicit {{args}} are used.
|
||||
if (!usesArgs) {
|
||||
processors.push(new DefaultArgumentProcessor());
|
||||
}
|
||||
|
||||
return {
|
||||
name: baseCommandName,
|
||||
description,
|
||||
kind: CommandKind.FILE,
|
||||
// Use factory to create command
|
||||
return createSlashCommandFromDefinition(
|
||||
filePath,
|
||||
baseDir,
|
||||
validDef,
|
||||
extensionName,
|
||||
action: async (
|
||||
context: CommandContext,
|
||||
_args: string,
|
||||
): Promise<SlashCommandActionReturn> => {
|
||||
if (!context.invocation) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Critical error: Command '${baseCommandName}' was executed without invocation context.`,
|
||||
);
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: [{ text: validDef.prompt }], // Fallback to unprocessed prompt
|
||||
};
|
||||
}
|
||||
'.toml',
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
let processedContent: PromptPipelineContent = [
|
||||
{ text: validDef.prompt },
|
||||
];
|
||||
for (const processor of processors) {
|
||||
processedContent = await processor.process(
|
||||
processedContent,
|
||||
context,
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Parses a single .md file and transforms it into a SlashCommand object.
|
||||
* @param filePath The absolute path to the .md file.
|
||||
* @param baseDir The root command directory for name calculation.
|
||||
* @param extensionName Optional extension name to prefix commands with.
|
||||
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
|
||||
*/
|
||||
private async parseAndAdaptMarkdownFile(
|
||||
filePath: string,
|
||||
baseDir: string,
|
||||
extensionName?: string,
|
||||
): Promise<SlashCommand | null> {
|
||||
let fileContent: string;
|
||||
try {
|
||||
fileContent = await fs.readFile(filePath, 'utf-8');
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Failed to read file ${filePath}:`,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: processedContent,
|
||||
};
|
||||
} catch (e) {
|
||||
// Check if it's our specific error type
|
||||
if (e instanceof ConfirmationRequiredError) {
|
||||
// Halt and request confirmation from the UI layer.
|
||||
return {
|
||||
type: 'confirm_shell_commands',
|
||||
commandsToConfirm: e.commandsToConfirm,
|
||||
originalInvocation: {
|
||||
raw: context.invocation.raw,
|
||||
},
|
||||
};
|
||||
}
|
||||
// Re-throw other errors to be handled by the global error handler.
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
let parsed: ReturnType<typeof parseMarkdownCommand>;
|
||||
try {
|
||||
parsed = parseMarkdownCommand(fileContent);
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Failed to parse Markdown file ${filePath}:`,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const validationResult = MarkdownCommandDefSchema.safeParse(parsed);
|
||||
|
||||
if (!validationResult.success) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Skipping invalid command file: ${filePath}. Validation errors:`,
|
||||
validationResult.error.flatten(),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const validDef = validationResult.data;
|
||||
|
||||
// Convert to CommandDefinition format
|
||||
const definition: CommandDefinition = {
|
||||
prompt: validDef.prompt,
|
||||
description:
|
||||
validDef.frontmatter?.description &&
|
||||
typeof validDef.frontmatter.description === 'string'
|
||||
? validDef.frontmatter.description
|
||||
: undefined,
|
||||
};
|
||||
|
||||
// Use factory to create command
|
||||
return createSlashCommandFromDefinition(
|
||||
filePath,
|
||||
baseDir,
|
||||
definition,
|
||||
extensionName,
|
||||
'.md',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
159
packages/cli/src/services/command-factory.ts
Normal file
159
packages/cli/src/services/command-factory.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* This file contains helper functions for FileCommandLoader to create SlashCommand
|
||||
* objects from parsed command definitions (TOML or Markdown).
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import type {
|
||||
CommandContext,
|
||||
SlashCommand,
|
||||
SlashCommandActionReturn,
|
||||
} from '../ui/commands/types.js';
|
||||
import { CommandKind } from '../ui/commands/types.js';
|
||||
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
|
||||
import type {
|
||||
IPromptProcessor,
|
||||
PromptPipelineContent,
|
||||
} from './prompt-processors/types.js';
|
||||
import {
|
||||
SHORTHAND_ARGS_PLACEHOLDER,
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
} from './prompt-processors/types.js';
|
||||
import {
|
||||
ConfirmationRequiredError,
|
||||
ShellProcessor,
|
||||
} from './prompt-processors/shellProcessor.js';
|
||||
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
|
||||
|
||||
export interface CommandDefinition {
|
||||
prompt: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a SlashCommand from a parsed command definition.
|
||||
* This function is used by both TOML and Markdown command loaders.
|
||||
*
|
||||
* @param filePath The absolute path to the command file
|
||||
* @param baseDir The root command directory for name calculation
|
||||
* @param definition The parsed command definition (prompt and optional description)
|
||||
* @param extensionName Optional extension name to prefix commands with
|
||||
* @param fileExtension The file extension (e.g., '.toml' or '.md')
|
||||
* @returns A SlashCommand object
|
||||
*/
|
||||
export function createSlashCommandFromDefinition(
|
||||
filePath: string,
|
||||
baseDir: string,
|
||||
definition: CommandDefinition,
|
||||
extensionName: string | undefined,
|
||||
fileExtension: string,
|
||||
): SlashCommand {
|
||||
const relativePathWithExt = path.relative(baseDir, filePath);
|
||||
const relativePath = relativePathWithExt.substring(
|
||||
0,
|
||||
relativePathWithExt.length - fileExtension.length,
|
||||
);
|
||||
const baseCommandName = relativePath
|
||||
.split(path.sep)
|
||||
// Sanitize each path segment to prevent ambiguity. Since ':' is our
|
||||
// namespace separator, we replace any literal colons in filenames
|
||||
// with underscores to avoid naming conflicts.
|
||||
.map((segment) => segment.replaceAll(':', '_'))
|
||||
.join(':');
|
||||
|
||||
// Prefix command name with extension name if provided
|
||||
const commandName = extensionName
|
||||
? `${extensionName}:${baseCommandName}`
|
||||
: baseCommandName;
|
||||
|
||||
// Add extension name tag for extension commands
|
||||
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
|
||||
let description = definition.description || defaultDescription;
|
||||
if (extensionName) {
|
||||
description = `[${extensionName}] ${description}`;
|
||||
}
|
||||
|
||||
const processors: IPromptProcessor[] = [];
|
||||
const usesArgs = definition.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
|
||||
const usesShellInjection = definition.prompt.includes(
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
);
|
||||
const usesAtFileInjection = definition.prompt.includes(
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
);
|
||||
|
||||
// 1. @-File Injection (Security First).
|
||||
// This runs first to ensure we're not executing shell commands that
|
||||
// could dynamically generate malicious @-paths.
|
||||
if (usesAtFileInjection) {
|
||||
processors.push(new AtFileProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 2. Argument and Shell Injection.
|
||||
// This runs after file content has been safely injected.
|
||||
if (usesShellInjection || usesArgs) {
|
||||
processors.push(new ShellProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 3. Default Argument Handling.
|
||||
// Appends the raw invocation if no explicit {{args}} are used.
|
||||
if (!usesArgs) {
|
||||
processors.push(new DefaultArgumentProcessor());
|
||||
}
|
||||
|
||||
return {
|
||||
name: commandName,
|
||||
description,
|
||||
kind: CommandKind.FILE,
|
||||
extensionName,
|
||||
action: async (
|
||||
context: CommandContext,
|
||||
_args: string,
|
||||
): Promise<SlashCommandActionReturn> => {
|
||||
if (!context.invocation) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Critical error: Command '${commandName}' was executed without invocation context.`,
|
||||
);
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: [{ text: definition.prompt }], // Fallback to unprocessed prompt
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
let processedContent: PromptPipelineContent = [
|
||||
{ text: definition.prompt },
|
||||
];
|
||||
for (const processor of processors) {
|
||||
processedContent = await processor.process(processedContent, context);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: processedContent,
|
||||
};
|
||||
} catch (e) {
|
||||
// Check if it's our specific error type
|
||||
if (e instanceof ConfirmationRequiredError) {
|
||||
// Halt and request confirmation from the UI layer.
|
||||
return {
|
||||
type: 'confirm_shell_commands',
|
||||
commandsToConfirm: e.commandsToConfirm,
|
||||
originalInvocation: {
|
||||
raw: context.invocation.raw,
|
||||
},
|
||||
};
|
||||
}
|
||||
// Re-throw other errors to be handled by the global error handler.
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
253
packages/cli/src/services/command-migration-tool.test.ts
Normal file
253
packages/cli/src/services/command-migration-tool.test.ts
Normal file
@@ -0,0 +1,253 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import os from 'node:os';
|
||||
import {
|
||||
detectTomlCommands,
|
||||
migrateTomlCommands,
|
||||
generateMigrationPrompt,
|
||||
} from './command-migration-tool.js';
|
||||
|
||||
describe('command-migration-tool', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-migration-test-'));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('detectTomlCommands', () => {
|
||||
it('should detect TOML files in directory', async () => {
|
||||
// Create some TOML files
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'cmd1.toml'),
|
||||
'prompt = "test"',
|
||||
'utf-8',
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'cmd2.toml'),
|
||||
'prompt = "test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const tomlFiles = await detectTomlCommands(tempDir);
|
||||
|
||||
expect(tomlFiles).toHaveLength(2);
|
||||
expect(tomlFiles).toContain('cmd1.toml');
|
||||
expect(tomlFiles).toContain('cmd2.toml');
|
||||
});
|
||||
|
||||
it('should detect TOML files in subdirectories', async () => {
|
||||
const subdir = path.join(tempDir, 'subdir');
|
||||
await fs.mkdir(subdir);
|
||||
await fs.writeFile(
|
||||
path.join(subdir, 'nested.toml'),
|
||||
'prompt = "test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const tomlFiles = await detectTomlCommands(tempDir);
|
||||
|
||||
expect(tomlFiles).toContain('subdir/nested.toml');
|
||||
});
|
||||
|
||||
it('should return empty array for non-existent directory', async () => {
|
||||
const nonExistent = path.join(tempDir, 'does-not-exist');
|
||||
|
||||
const tomlFiles = await detectTomlCommands(nonExistent);
|
||||
|
||||
expect(tomlFiles).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not detect non-TOML files', async () => {
|
||||
await fs.writeFile(path.join(tempDir, 'file.txt'), 'text', 'utf-8');
|
||||
await fs.writeFile(path.join(tempDir, 'file.md'), 'markdown', 'utf-8');
|
||||
|
||||
const tomlFiles = await detectTomlCommands(tempDir);
|
||||
|
||||
expect(tomlFiles).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateTomlCommands', () => {
|
||||
it('should migrate TOML file to Markdown', async () => {
|
||||
const tomlContent = `prompt = "Test prompt"
|
||||
description = "Test description"`;
|
||||
|
||||
await fs.writeFile(path.join(tempDir, 'test.toml'), tomlContent, 'utf-8');
|
||||
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: true,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.convertedFiles).toContain('test.toml');
|
||||
expect(result.failedFiles).toHaveLength(0);
|
||||
|
||||
// Check Markdown file was created
|
||||
const mdPath = path.join(tempDir, 'test.md');
|
||||
const mdContent = await fs.readFile(mdPath, 'utf-8');
|
||||
expect(mdContent).toContain('description: Test description');
|
||||
expect(mdContent).toContain('Test prompt');
|
||||
|
||||
// Check backup was created (original renamed to .toml.backup)
|
||||
const backupPath = path.join(tempDir, 'test.toml.backup');
|
||||
const backupExists = await fs
|
||||
.access(backupPath)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(backupExists).toBe(true);
|
||||
|
||||
// Original .toml file should not exist (renamed to .backup)
|
||||
const tomlExists = await fs
|
||||
.access(path.join(tempDir, 'test.toml'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(tomlExists).toBe(false);
|
||||
});
|
||||
|
||||
it('should delete original TOML when deleteOriginal is true', async () => {
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'delete-me.toml'),
|
||||
'prompt = "Test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: false,
|
||||
deleteOriginal: true,
|
||||
});
|
||||
|
||||
// Original should be deleted
|
||||
const tomlExists = await fs
|
||||
.access(path.join(tempDir, 'delete-me.toml'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(tomlExists).toBe(false);
|
||||
|
||||
// Markdown should exist
|
||||
const mdExists = await fs
|
||||
.access(path.join(tempDir, 'delete-me.md'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(mdExists).toBe(true);
|
||||
|
||||
// Backup should not exist (createBackup was false)
|
||||
const backupExists = await fs
|
||||
.access(path.join(tempDir, 'delete-me.toml.backup'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(backupExists).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail if Markdown file already exists', async () => {
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'existing.toml'),
|
||||
'prompt = "Test"',
|
||||
'utf-8',
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'existing.md'),
|
||||
'Already exists',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.failedFiles).toHaveLength(1);
|
||||
expect(result.failedFiles[0].file).toBe('existing.toml');
|
||||
expect(result.failedFiles[0].error).toContain('already exists');
|
||||
});
|
||||
|
||||
it('should handle migration without backup', async () => {
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'no-backup.toml'),
|
||||
'prompt = "Test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: false,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Original TOML file should still exist (no backup, no delete)
|
||||
const tomlExists = await fs
|
||||
.access(path.join(tempDir, 'no-backup.toml'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(tomlExists).toBe(true);
|
||||
|
||||
// Backup should not exist
|
||||
const backupExists = await fs
|
||||
.access(path.join(tempDir, 'no-backup.toml.backup'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(backupExists).toBe(false);
|
||||
});
|
||||
|
||||
it('should return success with empty results for no TOML files', async () => {
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.convertedFiles).toHaveLength(0);
|
||||
expect(result.failedFiles).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateMigrationPrompt', () => {
|
||||
it('should generate prompt for few files', () => {
|
||||
const files = ['cmd1.toml', 'cmd2.toml'];
|
||||
|
||||
const prompt = generateMigrationPrompt(files);
|
||||
|
||||
expect(prompt).toContain('Found 2 command files');
|
||||
expect(prompt).toContain('cmd1.toml');
|
||||
expect(prompt).toContain('cmd2.toml');
|
||||
expect(prompt).toContain('qwen-code migrate-commands');
|
||||
});
|
||||
|
||||
it('should truncate file list for many files', () => {
|
||||
const files = Array.from({ length: 10 }, (_, i) => `cmd${i}.toml`);
|
||||
|
||||
const prompt = generateMigrationPrompt(files);
|
||||
|
||||
expect(prompt).toContain('Found 10 command files');
|
||||
expect(prompt).toContain('... and 7 more');
|
||||
});
|
||||
|
||||
it('should return empty string for no files', () => {
|
||||
const prompt = generateMigrationPrompt([]);
|
||||
|
||||
expect(prompt).toBe('');
|
||||
});
|
||||
|
||||
it('should use singular form for single file', () => {
|
||||
const prompt = generateMigrationPrompt(['single.toml']);
|
||||
|
||||
expect(prompt).toContain('Found 1 command file');
|
||||
// Don't check for plural since "files" appears in other parts of the message
|
||||
});
|
||||
});
|
||||
});
|
||||
169
packages/cli/src/services/command-migration-tool.ts
Normal file
169
packages/cli/src/services/command-migration-tool.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Tool for migrating TOML commands to Markdown format.
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import { convertTomlToMarkdown } from '@qwen-code/qwen-code-core';
|
||||
|
||||
export interface MigrationResult {
|
||||
success: boolean;
|
||||
convertedFiles: string[];
|
||||
failedFiles: Array<{ file: string; error: string }>;
|
||||
}
|
||||
|
||||
export interface MigrationOptions {
|
||||
/** Directory containing command files */
|
||||
commandDir: string;
|
||||
/** Whether to create backups (default: true) */
|
||||
createBackup?: boolean;
|
||||
/** Whether to delete original TOML files after migration (default: false) */
|
||||
deleteOriginal?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a directory for TOML command files.
|
||||
* @param commandDir Directory to scan
|
||||
* @returns Array of TOML file paths (relative to commandDir)
|
||||
*/
|
||||
export async function detectTomlCommands(
|
||||
commandDir: string,
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
await fs.access(commandDir);
|
||||
} catch {
|
||||
// Directory doesn't exist
|
||||
return [];
|
||||
}
|
||||
|
||||
const tomlFiles = await glob('**/*.toml', {
|
||||
cwd: commandDir,
|
||||
nodir: true,
|
||||
dot: false,
|
||||
});
|
||||
|
||||
return tomlFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates TOML command files to Markdown format.
|
||||
* @param options Migration options
|
||||
* @returns Migration result with details
|
||||
*/
|
||||
export async function migrateTomlCommands(
|
||||
options: MigrationOptions,
|
||||
): Promise<MigrationResult> {
|
||||
const { commandDir, createBackup = true, deleteOriginal = false } = options;
|
||||
|
||||
const result: MigrationResult = {
|
||||
success: true,
|
||||
convertedFiles: [],
|
||||
failedFiles: [],
|
||||
};
|
||||
|
||||
// Detect TOML files
|
||||
const tomlFiles = await detectTomlCommands(commandDir);
|
||||
|
||||
if (tomlFiles.length === 0) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Process each TOML file
|
||||
for (const relativeFile of tomlFiles) {
|
||||
const tomlPath = path.join(commandDir, relativeFile);
|
||||
|
||||
try {
|
||||
// Read TOML file
|
||||
const tomlContent = await fs.readFile(tomlPath, 'utf-8');
|
||||
|
||||
// Convert to Markdown
|
||||
const markdownContent = convertTomlToMarkdown(tomlContent);
|
||||
|
||||
// Generate Markdown file path (same location, .md extension)
|
||||
const markdownPath = tomlPath.replace(/\.toml$/, '.md');
|
||||
|
||||
// Check if Markdown file already exists
|
||||
try {
|
||||
await fs.access(markdownPath);
|
||||
throw new Error(
|
||||
`Markdown file already exists: ${path.basename(markdownPath)}`,
|
||||
);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
// File doesn't exist, continue
|
||||
}
|
||||
|
||||
// Write Markdown file
|
||||
await fs.writeFile(markdownPath, markdownContent, 'utf-8');
|
||||
|
||||
// Backup original if requested (rename to .toml.backup)
|
||||
if (createBackup) {
|
||||
const backupPath = `${tomlPath}.backup`;
|
||||
await fs.rename(tomlPath, backupPath);
|
||||
} else if (deleteOriginal) {
|
||||
// Delete original if requested and no backup
|
||||
await fs.unlink(tomlPath);
|
||||
}
|
||||
|
||||
result.convertedFiles.push(relativeFile);
|
||||
} catch (error) {
|
||||
result.success = false;
|
||||
result.failedFiles.push({
|
||||
file: relativeFile,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a migration report message.
|
||||
* @param tomlFiles List of TOML files found
|
||||
* @returns Human-readable migration prompt message
|
||||
*/
|
||||
export function generateMigrationPrompt(tomlFiles: string[]): string {
|
||||
if (tomlFiles.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const count = tomlFiles.length;
|
||||
const fileList =
|
||||
tomlFiles.length <= 5
|
||||
? tomlFiles.map((f) => ` - ${f}`).join('\n')
|
||||
: ` - ${tomlFiles.slice(0, 3).join('\n - ')}\n - ... and ${tomlFiles.length - 3} more`;
|
||||
|
||||
return `
|
||||
⚠️ TOML Command Format Deprecation Notice
|
||||
|
||||
Found ${count} command file${count > 1 ? 's' : ''} in TOML format:
|
||||
${fileList}
|
||||
|
||||
The TOML format for commands is being deprecated in favor of Markdown format.
|
||||
Markdown format is more readable and easier to edit.
|
||||
|
||||
You can migrate these files automatically using:
|
||||
qwen-code migrate-commands
|
||||
|
||||
Or manually convert each file:
|
||||
- TOML: prompt = "..." / description = "..."
|
||||
- Markdown: YAML frontmatter + content
|
||||
|
||||
The migration tool will:
|
||||
✓ Convert TOML files to Markdown
|
||||
✓ Create backups of original files
|
||||
✓ Preserve all command functionality
|
||||
|
||||
TOML format will continue to work for now, but migration is recommended.
|
||||
`.trim();
|
||||
}
|
||||
144
packages/cli/src/services/markdown-command-parser.test.ts
Normal file
144
packages/cli/src/services/markdown-command-parser.test.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
parseMarkdownCommand,
|
||||
MarkdownCommandDefSchema,
|
||||
} from './markdown-command-parser.js';
|
||||
|
||||
describe('parseMarkdownCommand', () => {
|
||||
it('should parse markdown with YAML frontmatter', () => {
|
||||
const content = `---
|
||||
description: Test command
|
||||
---
|
||||
|
||||
This is the prompt content.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result).toEqual({
|
||||
frontmatter: {
|
||||
description: 'Test command',
|
||||
},
|
||||
prompt: 'This is the prompt content.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse markdown without frontmatter', () => {
|
||||
const content = 'This is just a prompt without frontmatter.';
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result).toEqual({
|
||||
prompt: 'This is just a prompt without frontmatter.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multi-line prompts', () => {
|
||||
const content = `---
|
||||
description: Multi-line test
|
||||
---
|
||||
|
||||
First line of prompt.
|
||||
Second line of prompt.
|
||||
Third line of prompt.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result.prompt).toBe(
|
||||
'First line of prompt.\nSecond line of prompt.\nThird line of prompt.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should trim whitespace from prompt', () => {
|
||||
const content = `---
|
||||
description: Whitespace test
|
||||
---
|
||||
|
||||
Prompt with leading and trailing spaces
|
||||
`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result.prompt).toBe('Prompt with leading and trailing spaces');
|
||||
});
|
||||
|
||||
it('should handle empty frontmatter', () => {
|
||||
const content = `---
|
||||
---
|
||||
|
||||
Prompt content after empty frontmatter.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
// Empty YAML frontmatter returns undefined, not {}
|
||||
expect(result.frontmatter).toBeUndefined();
|
||||
expect(result.prompt).toBe('Prompt content after empty frontmatter.');
|
||||
});
|
||||
|
||||
it('should handle invalid YAML frontmatter gracefully', () => {
|
||||
// The YAML parser we use is quite tolerant, so most "invalid" YAML
|
||||
// actually parses successfully. This test verifies that behavior.
|
||||
const content = `---
|
||||
description: test
|
||||
---
|
||||
|
||||
Prompt content.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result.frontmatter).toBeDefined();
|
||||
expect(result.prompt).toBe('Prompt content.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('MarkdownCommandDefSchema', () => {
|
||||
it('should validate valid markdown command def', () => {
|
||||
const validDef = {
|
||||
frontmatter: {
|
||||
description: 'Test description',
|
||||
},
|
||||
prompt: 'Test prompt',
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(validDef);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate markdown command def without frontmatter', () => {
|
||||
const validDef = {
|
||||
prompt: 'Test prompt',
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(validDef);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject command def without prompt', () => {
|
||||
const invalidDef = {
|
||||
frontmatter: {
|
||||
description: 'Test description',
|
||||
},
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject command def with non-string prompt', () => {
|
||||
const invalidDef = {
|
||||
prompt: 123,
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
64
packages/cli/src/services/markdown-command-parser.ts
Normal file
64
packages/cli/src/services/markdown-command-parser.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
import { parse as parseYaml } from '@qwen-code/qwen-code-core';
|
||||
|
||||
/**
|
||||
* Defines the Zod schema for a Markdown command definition file.
|
||||
* The frontmatter contains optional metadata, and the body is the prompt.
|
||||
*/
|
||||
export const MarkdownCommandDefSchema = z.object({
|
||||
frontmatter: z
|
||||
.object({
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
prompt: z.string({
|
||||
required_error: 'The prompt content is required.',
|
||||
invalid_type_error: 'The prompt content must be a string.',
|
||||
}),
|
||||
});
|
||||
|
||||
export type MarkdownCommandDef = z.infer<typeof MarkdownCommandDefSchema>;
|
||||
|
||||
/**
|
||||
* Parses a Markdown command file with optional YAML frontmatter.
|
||||
* @param content The file content
|
||||
* @returns Parsed command definition with frontmatter and prompt
|
||||
*/
|
||||
export function parseMarkdownCommand(content: string): MarkdownCommandDef {
|
||||
// Match YAML frontmatter pattern: ---\n...\n---\n
|
||||
// Allow empty frontmatter: ---\n---\n // Use (?:[\s\S]*?) to make the frontmatter content optional
|
||||
const frontmatterRegex = /^---\n([\s\S]*?)---\n([\s\S]*)$/;
|
||||
const match = content.match(frontmatterRegex);
|
||||
|
||||
if (!match) {
|
||||
// No frontmatter, entire content is the prompt
|
||||
return {
|
||||
prompt: content.trim(),
|
||||
};
|
||||
}
|
||||
|
||||
const [, frontmatterYaml, body] = match;
|
||||
|
||||
// Parse YAML frontmatter if not empty
|
||||
let frontmatter: Record<string, unknown> | undefined;
|
||||
if (frontmatterYaml.trim()) {
|
||||
try {
|
||||
frontmatter = parseYaml(frontmatterYaml) as Record<string, unknown>;
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to parse YAML frontmatter: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
frontmatter,
|
||||
prompt: body.trim(),
|
||||
};
|
||||
}
|
||||
5
packages/cli/src/services/test-commands/example.md
Normal file
5
packages/cli/src/services/test-commands/example.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
description: Example markdown command
|
||||
---
|
||||
|
||||
This is an example prompt from a markdown file.
|
||||
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
INSTALL_METADATA_FILENAME,
|
||||
} from '../config/extension.js';
|
||||
import {
|
||||
type MCPServerConfig,
|
||||
type ExtensionInstallMetadata,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
|
||||
export function createExtension({
|
||||
extensionsDir = 'extensions-dir',
|
||||
name = 'my-extension',
|
||||
version = '1.0.0',
|
||||
addContextFile = false,
|
||||
contextFileName = undefined as string | undefined,
|
||||
mcpServers = {} as Record<string, MCPServerConfig>,
|
||||
installMetadata = undefined as ExtensionInstallMetadata | undefined,
|
||||
} = {}): string {
|
||||
const extDir = path.join(extensionsDir, name);
|
||||
fs.mkdirSync(extDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name, version, contextFileName, mcpServers }),
|
||||
);
|
||||
|
||||
if (addContextFile) {
|
||||
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
|
||||
}
|
||||
|
||||
if (contextFileName) {
|
||||
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
|
||||
}
|
||||
|
||||
if (installMetadata) {
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, INSTALL_METADATA_FILENAME),
|
||||
JSON.stringify(installMetadata),
|
||||
);
|
||||
}
|
||||
return extDir;
|
||||
}
|
||||
@@ -76,7 +76,6 @@ vi.mock('./hooks/useFolderTrust.js');
|
||||
vi.mock('./hooks/useIdeTrustListener.js');
|
||||
vi.mock('./hooks/useMessageQueue.js');
|
||||
vi.mock('./hooks/useAutoAcceptIndicator.js');
|
||||
vi.mock('./hooks/useWorkspaceMigration.js');
|
||||
vi.mock('./hooks/useGitBranchName.js');
|
||||
vi.mock('./contexts/VimModeContext.js');
|
||||
vi.mock('./contexts/SessionContext.js');
|
||||
@@ -103,7 +102,6 @@ import { useFolderTrust } from './hooks/useFolderTrust.js';
|
||||
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
|
||||
import { useMessageQueue } from './hooks/useMessageQueue.js';
|
||||
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
|
||||
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
|
||||
import { useGitBranchName } from './hooks/useGitBranchName.js';
|
||||
import { useVimMode } from './contexts/VimModeContext.js';
|
||||
import { useSessionStats } from './contexts/SessionContext.js';
|
||||
@@ -134,7 +132,6 @@ describe('AppContainer State Management', () => {
|
||||
const mockedUseIdeTrustListener = useIdeTrustListener as Mock;
|
||||
const mockedUseMessageQueue = useMessageQueue as Mock;
|
||||
const mockedUseAutoAcceptIndicator = useAutoAcceptIndicator as Mock;
|
||||
const mockedUseWorkspaceMigration = useWorkspaceMigration as Mock;
|
||||
const mockedUseGitBranchName = useGitBranchName as Mock;
|
||||
const mockedUseVimMode = useVimMode as Mock;
|
||||
const mockedUseSessionStats = useSessionStats as Mock;
|
||||
@@ -239,12 +236,6 @@ describe('AppContainer State Management', () => {
|
||||
getQueuedMessagesText: vi.fn().mockReturnValue(''),
|
||||
});
|
||||
mockedUseAutoAcceptIndicator.mockReturnValue(false);
|
||||
mockedUseWorkspaceMigration.mockReturnValue({
|
||||
showWorkspaceMigrationDialog: false,
|
||||
workspaceExtensions: [],
|
||||
onWorkspaceMigrationDialogOpen: vi.fn(),
|
||||
onWorkspaceMigrationDialogClose: vi.fn(),
|
||||
});
|
||||
mockedUseGitBranchName.mockReturnValue('main');
|
||||
mockedUseVimMode.mockReturnValue({
|
||||
isVimEnabled: false,
|
||||
|
||||
@@ -37,6 +37,7 @@ import {
|
||||
getErrorMessage,
|
||||
getAllGeminiMdFilenames,
|
||||
ShellExecutionService,
|
||||
Storage,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { buildResumedHistoryItems } from './utils/resumeHistoryUtils.js';
|
||||
import { validateAuthMethod } from '../config/auth.js';
|
||||
@@ -75,6 +76,9 @@ import { useLoadingIndicator } from './hooks/useLoadingIndicator.js';
|
||||
import { useFolderTrust } from './hooks/useFolderTrust.js';
|
||||
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
|
||||
import { type IdeIntegrationNudgeResult } from './IdeIntegrationNudge.js';
|
||||
import { type CommandMigrationNudgeResult } from './CommandFormatMigrationNudge.js';
|
||||
import { useCommandMigration } from './hooks/useCommandMigration.js';
|
||||
import { migrateTomlCommands } from '../services/command-migration-tool.js';
|
||||
import { appEvents, AppEvent } from '../utils/events.js';
|
||||
import { type UpdateObject } from './utils/updateCheck.js';
|
||||
import { setUpdateHandler } from '../utils/handleAutoUpdate.js';
|
||||
@@ -82,10 +86,12 @@ import { ConsolePatcher } from './utils/ConsolePatcher.js';
|
||||
import { registerCleanup, runExitCleanup } from '../utils/cleanup.js';
|
||||
import { useMessageQueue } from './hooks/useMessageQueue.js';
|
||||
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
|
||||
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
|
||||
import { useSessionStats } from './contexts/SessionContext.js';
|
||||
import { useGitBranchName } from './hooks/useGitBranchName.js';
|
||||
import { useExtensionUpdates } from './hooks/useExtensionUpdates.js';
|
||||
import {
|
||||
useExtensionUpdates,
|
||||
useConfirmUpdateRequests,
|
||||
} from './hooks/useExtensionUpdates.js';
|
||||
import { ShellFocusContext } from './contexts/ShellFocusContext.js';
|
||||
import { t } from '../i18n/index.js';
|
||||
import { useWelcomeBack } from './hooks/useWelcomeBack.js';
|
||||
@@ -96,6 +102,7 @@ import { processVisionSwitchOutcome } from './hooks/useVisionAutoSwitch.js';
|
||||
import { useSubagentCreateDialog } from './hooks/useSubagentCreateDialog.js';
|
||||
import { useAgentsManagerDialog } from './hooks/useAgentsManagerDialog.js';
|
||||
import { useAttentionNotifications } from './hooks/useAttentionNotifications.js';
|
||||
import { requestConsentInteractive } from '../commands/extensions/consent.js';
|
||||
|
||||
const CTRL_EXIT_PROMPT_DURATION_MS = 1000;
|
||||
|
||||
@@ -156,15 +163,21 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
config.isTrustedFolder(),
|
||||
);
|
||||
|
||||
const extensions = config.getExtensions();
|
||||
const extensionManager = config.getExtensionManager();
|
||||
|
||||
extensionManager.setRequestConsent(async (description) =>
|
||||
requestConsentInteractive(description, addConfirmUpdateExtensionRequest),
|
||||
);
|
||||
|
||||
const { addConfirmUpdateExtensionRequest, confirmUpdateExtensionRequests } =
|
||||
useConfirmUpdateRequests();
|
||||
|
||||
const {
|
||||
extensionsUpdateState,
|
||||
extensionsUpdateStateInternal,
|
||||
dispatchExtensionStateUpdate,
|
||||
confirmUpdateExtensionRequests,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
} = useExtensionUpdates(
|
||||
extensions,
|
||||
extensionManager,
|
||||
historyManager.addItem,
|
||||
config.getWorkingDir(),
|
||||
);
|
||||
@@ -429,13 +442,6 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
remount: refreshStatic,
|
||||
});
|
||||
|
||||
const {
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
} = useWorkspaceMigration(settings);
|
||||
|
||||
const { toggleVimEnabled } = useVimMode();
|
||||
|
||||
const {
|
||||
@@ -571,11 +577,11 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
: [],
|
||||
config.getDebugMode(),
|
||||
config.getFileService(),
|
||||
settings.merged,
|
||||
config.getExtensionContextFilePaths(),
|
||||
config.isTrustedFolder(),
|
||||
settings.merged.context?.importFormat || 'tree', // Use setting or default to 'tree'
|
||||
config.getFileFilteringOptions(),
|
||||
config.getDiscoveryMaxDirs(),
|
||||
);
|
||||
|
||||
config.setUserMemory(memoryContent);
|
||||
@@ -838,6 +844,13 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
!idePromptAnswered,
|
||||
);
|
||||
|
||||
// Command migration nudge
|
||||
const {
|
||||
showMigrationNudge: shouldShowCommandMigrationNudge,
|
||||
tomlFiles: commandMigrationTomlFiles,
|
||||
setShowMigrationNudge: setShowCommandMigrationNudge,
|
||||
} = useCommandMigration(settings, config.storage);
|
||||
|
||||
const [showErrorDetails, setShowErrorDetails] = useState<boolean>(false);
|
||||
const [showToolDescriptions, setShowToolDescriptions] =
|
||||
useState<boolean>(false);
|
||||
@@ -933,6 +946,92 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
[handleSlashCommand, settings],
|
||||
);
|
||||
|
||||
const handleCommandMigrationComplete = useCallback(
|
||||
async (result: CommandMigrationNudgeResult) => {
|
||||
setShowCommandMigrationNudge(false);
|
||||
|
||||
if (result.userSelection === 'yes') {
|
||||
// Perform migration for both workspace and user levels
|
||||
try {
|
||||
const results = [];
|
||||
|
||||
// Migrate workspace commands
|
||||
const workspaceCommandsDir = config.storage.getProjectCommandsDir();
|
||||
const workspaceResult = await migrateTomlCommands({
|
||||
commandDir: workspaceCommandsDir,
|
||||
createBackup: true,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
if (
|
||||
workspaceResult.convertedFiles.length > 0 ||
|
||||
workspaceResult.failedFiles.length > 0
|
||||
) {
|
||||
results.push({ level: 'workspace', result: workspaceResult });
|
||||
}
|
||||
|
||||
// Migrate user commands
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const userResult = await migrateTomlCommands({
|
||||
commandDir: userCommandsDir,
|
||||
createBackup: true,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
if (
|
||||
userResult.convertedFiles.length > 0 ||
|
||||
userResult.failedFiles.length > 0
|
||||
) {
|
||||
results.push({ level: 'user', result: userResult });
|
||||
}
|
||||
|
||||
// Report results
|
||||
for (const { level, result: migrationResult } of results) {
|
||||
if (
|
||||
migrationResult.success &&
|
||||
migrationResult.convertedFiles.length > 0
|
||||
) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: `[${level}] Successfully migrated ${migrationResult.convertedFiles.length} command file${migrationResult.convertedFiles.length > 1 ? 's' : ''} to Markdown format. Original files backed up as .toml.backup`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
|
||||
if (migrationResult.failedFiles.length > 0) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: `[${level}] Failed to migrate ${migrationResult.failedFiles.length} file${migrationResult.failedFiles.length > 1 ? 's' : ''}:\n${migrationResult.failedFiles.map((f) => ` • ${f.file}: ${f.error}`).join('\n')}`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (results.length === 0) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'No TOML files found to migrate.',
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: `❌ Migration failed: ${getErrorMessage(error)}`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
[historyManager, setShowCommandMigrationNudge, config.storage],
|
||||
);
|
||||
|
||||
const { elapsedTime, currentLoadingPhrase } = useLoadingIndicator(
|
||||
streamingState,
|
||||
settings.merged.ui?.customWittyPhrases,
|
||||
@@ -1175,8 +1274,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
|
||||
const dialogsVisible =
|
||||
showWelcomeBackDialog ||
|
||||
showWorkspaceMigrationDialog ||
|
||||
shouldShowIdePrompt ||
|
||||
shouldShowCommandMigrationNudge ||
|
||||
isFolderTrustDialogOpen ||
|
||||
!!shellConfirmationRequest ||
|
||||
!!confirmationRequest ||
|
||||
@@ -1242,6 +1341,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
suggestionsWidth,
|
||||
isInputActive,
|
||||
shouldShowIdePrompt,
|
||||
shouldShowCommandMigrationNudge,
|
||||
commandMigrationTomlFiles,
|
||||
isFolderTrustDialogOpen: isFolderTrustDialogOpen ?? false,
|
||||
isTrustedFolder,
|
||||
constrainHeight,
|
||||
@@ -1258,8 +1359,6 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
historyRemountKey,
|
||||
messageQueue,
|
||||
showAutoAcceptIndicator,
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
currentModel,
|
||||
contextFileNames,
|
||||
errorCount,
|
||||
@@ -1331,6 +1430,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
suggestionsWidth,
|
||||
isInputActive,
|
||||
shouldShowIdePrompt,
|
||||
shouldShowCommandMigrationNudge,
|
||||
commandMigrationTomlFiles,
|
||||
isFolderTrustDialogOpen,
|
||||
isTrustedFolder,
|
||||
constrainHeight,
|
||||
@@ -1347,8 +1448,6 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
historyRemountKey,
|
||||
messageQueue,
|
||||
showAutoAcceptIndicator,
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
contextFileNames,
|
||||
errorCount,
|
||||
availableTerminalHeight,
|
||||
@@ -1402,14 +1501,13 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
setShellModeActive,
|
||||
vimHandleInput,
|
||||
handleIdePromptComplete,
|
||||
handleCommandMigrationComplete,
|
||||
handleFolderTrustSelect,
|
||||
setConstrainHeight,
|
||||
onEscapePromptChange: handleEscapePromptChange,
|
||||
refreshStatic,
|
||||
handleFinalSubmit,
|
||||
handleClearScreen,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
// Vision switch dialog
|
||||
handleVisionSwitchSelect,
|
||||
// Welcome back dialog
|
||||
@@ -1439,14 +1537,13 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
setShellModeActive,
|
||||
vimHandleInput,
|
||||
handleIdePromptComplete,
|
||||
handleCommandMigrationComplete,
|
||||
handleFolderTrustSelect,
|
||||
setConstrainHeight,
|
||||
handleEscapePromptChange,
|
||||
refreshStatic,
|
||||
handleFinalSubmit,
|
||||
handleClearScreen,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
handleVisionSwitchSelect,
|
||||
handleWelcomeBackSelection,
|
||||
handleWelcomeBackClose,
|
||||
|
||||
90
packages/cli/src/ui/CommandFormatMigrationNudge.tsx
Normal file
90
packages/cli/src/ui/CommandFormatMigrationNudge.tsx
Normal file
@@ -0,0 +1,90 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Box, Text } from 'ink';
|
||||
import type { RadioSelectItem } from './components/shared/RadioButtonSelect.js';
|
||||
import { RadioButtonSelect } from './components/shared/RadioButtonSelect.js';
|
||||
import { useKeypress } from './hooks/useKeypress.js';
|
||||
import { theme } from './semantic-colors.js';
|
||||
|
||||
export type CommandMigrationNudgeResult = {
|
||||
userSelection: 'yes' | 'no';
|
||||
};
|
||||
|
||||
interface CommandFormatMigrationNudgeProps {
|
||||
tomlFiles: string[];
|
||||
onComplete: (result: CommandMigrationNudgeResult) => void;
|
||||
}
|
||||
|
||||
export function CommandFormatMigrationNudge({
|
||||
tomlFiles,
|
||||
onComplete,
|
||||
}: CommandFormatMigrationNudgeProps) {
|
||||
useKeypress(
|
||||
(key) => {
|
||||
if (key.name === 'escape') {
|
||||
onComplete({
|
||||
userSelection: 'no',
|
||||
});
|
||||
}
|
||||
},
|
||||
{ isActive: true },
|
||||
);
|
||||
|
||||
const OPTIONS: Array<RadioSelectItem<CommandMigrationNudgeResult>> = [
|
||||
{
|
||||
label: 'Yes',
|
||||
value: {
|
||||
userSelection: 'yes',
|
||||
},
|
||||
key: 'Yes',
|
||||
},
|
||||
{
|
||||
label: 'No (esc)',
|
||||
value: {
|
||||
userSelection: 'no',
|
||||
},
|
||||
key: 'No (esc)',
|
||||
},
|
||||
];
|
||||
|
||||
const count = tomlFiles.length;
|
||||
const fileList =
|
||||
count <= 3
|
||||
? tomlFiles.map((f) => ` • ${f}`).join('\n')
|
||||
: ` • ${tomlFiles.slice(0, 2).join('\n • ')}\n • ... and ${count - 2} more`;
|
||||
|
||||
return (
|
||||
<Box
|
||||
flexDirection="column"
|
||||
borderStyle="round"
|
||||
borderColor={theme.status.warning}
|
||||
padding={1}
|
||||
width="100%"
|
||||
marginLeft={1}
|
||||
>
|
||||
<Box marginBottom={1} flexDirection="column">
|
||||
<Text>
|
||||
<Text color={theme.status.warning}>{'⚠️ '}</Text>
|
||||
<Text bold>Command Format Migration</Text>
|
||||
</Text>
|
||||
<Text color={theme.text.secondary}>
|
||||
{`Found ${count} TOML command file${count > 1 ? 's' : ''}:`}
|
||||
</Text>
|
||||
<Text color={theme.text.secondary}>{fileList}</Text>
|
||||
<Text>{''}</Text>
|
||||
<Text color={theme.text.secondary}>
|
||||
The TOML format is deprecated. Would you like to migrate them to
|
||||
Markdown format?
|
||||
</Text>
|
||||
<Text color={theme.text.secondary}>
|
||||
(Backups will be created and original files will be preserved)
|
||||
</Text>
|
||||
</Box>
|
||||
<RadioButtonSelect items={OPTIONS} onSelect={onComplete} />
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -4,13 +4,6 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { requestConsentInteractive } from '../../config/extension.js';
|
||||
import {
|
||||
updateAllUpdatableExtensions,
|
||||
type ExtensionUpdateInfo,
|
||||
updateExtension,
|
||||
checkForAllExtensionUpdates,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { ExtensionUpdateState } from '../state/extensions.js';
|
||||
import { MessageType } from '../types.js';
|
||||
@@ -20,8 +13,34 @@ import {
|
||||
CommandKind,
|
||||
} from './types.js';
|
||||
import { t } from '../../i18n/index.js';
|
||||
import type { ExtensionUpdateInfo } from '@qwen-code/qwen-code-core';
|
||||
|
||||
function showMessageIfNoExtensions(
|
||||
context: CommandContext,
|
||||
extensions: unknown[],
|
||||
): boolean {
|
||||
if (extensions.length === 0) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'No extensions installed. Run `/extensions explore` to check out the gallery.',
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async function listAction(context: CommandContext) {
|
||||
const extensions = context.services.config
|
||||
? context.services.config.getExtensions()
|
||||
: [];
|
||||
|
||||
if (showMessageIfNoExtensions(context, extensions)) {
|
||||
return;
|
||||
}
|
||||
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.EXTENSIONS_LIST,
|
||||
@@ -34,7 +53,6 @@ async function updateAction(context: CommandContext, args: string) {
|
||||
const updateArgs = args.split(' ').filter((value) => value.length > 0);
|
||||
const all = updateArgs.length === 1 && updateArgs[0] === '--all';
|
||||
const names = all ? undefined : updateArgs;
|
||||
let updateInfos: ExtensionUpdateInfo[] = [];
|
||||
|
||||
if (!all && names?.length === 0) {
|
||||
context.ui.addItem(
|
||||
@@ -47,29 +65,40 @@ async function updateAction(context: CommandContext, args: string) {
|
||||
return;
|
||||
}
|
||||
|
||||
let updateInfos: ExtensionUpdateInfo[] = [];
|
||||
|
||||
const extensionManager = context.services.config!.getExtensionManager();
|
||||
const extensions = context.services.config
|
||||
? context.services.config.getExtensions()
|
||||
: [];
|
||||
|
||||
if (showMessageIfNoExtensions(context, extensions)) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
try {
|
||||
await checkForAllExtensionUpdates(
|
||||
context.services.config!.getExtensions(),
|
||||
context.ui.dispatchExtensionStateUpdate,
|
||||
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
|
||||
await extensionManager.checkForAllExtensionUpdates((extensionName, state) =>
|
||||
context.ui.dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
}),
|
||||
);
|
||||
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
|
||||
|
||||
context.ui.setPendingItem({
|
||||
type: MessageType.EXTENSIONS_LIST,
|
||||
});
|
||||
if (all) {
|
||||
updateInfos = await updateAllUpdatableExtensions(
|
||||
context.services.config!.getWorkingDir(),
|
||||
// We don't have the ability to prompt for consent yet in this flow.
|
||||
(description) =>
|
||||
requestConsentInteractive(
|
||||
description,
|
||||
context.ui.addConfirmUpdateExtensionRequest,
|
||||
),
|
||||
context.services.config!.getExtensions(),
|
||||
updateInfos = await extensionManager.updateAllUpdatableExtensions(
|
||||
context.ui.extensionsUpdateState,
|
||||
context.ui.dispatchExtensionStateUpdate,
|
||||
(extensionName, state) =>
|
||||
context.ui.dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
}),
|
||||
);
|
||||
} else if (names?.length) {
|
||||
const workingDir = context.services.config!.getWorkingDir();
|
||||
const extensions = context.services.config!.getExtensions();
|
||||
for (const name of names) {
|
||||
const extension = extensions.find(
|
||||
@@ -85,17 +114,15 @@ async function updateAction(context: CommandContext, args: string) {
|
||||
);
|
||||
continue;
|
||||
}
|
||||
const updateInfo = await updateExtension(
|
||||
const updateInfo = await extensionManager.updateExtension(
|
||||
extension,
|
||||
workingDir,
|
||||
(description) =>
|
||||
requestConsentInteractive(
|
||||
description,
|
||||
context.ui.addConfirmUpdateExtensionRequest,
|
||||
),
|
||||
context.ui.extensionsUpdateState.get(extension.name)?.status ??
|
||||
ExtensionUpdateState.UNKNOWN,
|
||||
context.ui.dispatchExtensionStateUpdate,
|
||||
(extensionName, state) =>
|
||||
context.ui.dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
}),
|
||||
);
|
||||
if (updateInfo) updateInfos.push(updateInfo);
|
||||
}
|
||||
|
||||
132
packages/cli/src/ui/commands/skillsCommand.ts
Normal file
132
packages/cli/src/ui/commands/skillsCommand.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Qwen
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import {
|
||||
CommandKind,
|
||||
type CommandCompletionItem,
|
||||
type CommandContext,
|
||||
type SlashCommand,
|
||||
} from './types.js';
|
||||
import { MessageType, type HistoryItemSkillsList } from '../types.js';
|
||||
import { t } from '../../i18n/index.js';
|
||||
import { AsyncFzf } from 'fzf';
|
||||
import type { SkillConfig } from '@qwen-code/qwen-code-core';
|
||||
|
||||
export const skillsCommand: SlashCommand = {
|
||||
name: 'skills',
|
||||
get description() {
|
||||
return t('List available skills.');
|
||||
},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
action: async (context: CommandContext, args?: string) => {
|
||||
const rawArgs = args?.trim() ?? '';
|
||||
const [skillName = ''] = rawArgs.split(/\s+/);
|
||||
|
||||
const skillManager = context.services.config?.getSkillManager();
|
||||
if (!skillManager) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: t('Could not retrieve skill manager.'),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const skills = await skillManager.listSkills();
|
||||
if (skills.length === 0) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: t('No skills are currently available.'),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!skillName) {
|
||||
const sortedSkills = [...skills].sort((left, right) =>
|
||||
left.name.localeCompare(right.name),
|
||||
);
|
||||
const skillsListItem: HistoryItemSkillsList = {
|
||||
type: MessageType.SKILLS_LIST,
|
||||
skills: sortedSkills.map((skill) => ({ name: skill.name })),
|
||||
};
|
||||
context.ui.addItem(skillsListItem, Date.now());
|
||||
return;
|
||||
}
|
||||
const normalizedName = skillName.toLowerCase();
|
||||
const hasSkill = skills.some(
|
||||
(skill) => skill.name.toLowerCase() === normalizedName,
|
||||
);
|
||||
|
||||
if (!hasSkill) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: t('Unknown skill: {{name}}', { name: skillName }),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const rawInput = context.invocation?.raw ?? `/skills ${rawArgs}`;
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: [{ text: rawInput }],
|
||||
};
|
||||
},
|
||||
completion: async (
|
||||
context: CommandContext,
|
||||
partialArg: string,
|
||||
): Promise<CommandCompletionItem[]> => {
|
||||
const skillManager = context.services.config?.getSkillManager();
|
||||
if (!skillManager) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const skills = await skillManager.listSkills();
|
||||
const normalizedPartial = partialArg.trim();
|
||||
const matches = await getSkillMatches(skills, normalizedPartial);
|
||||
|
||||
return matches.map((skill) => ({
|
||||
value: skill.name,
|
||||
description: skill.description,
|
||||
}));
|
||||
},
|
||||
};
|
||||
|
||||
async function getSkillMatches(
|
||||
skills: SkillConfig[],
|
||||
query: string,
|
||||
): Promise<SkillConfig[]> {
|
||||
if (!query) {
|
||||
return skills;
|
||||
}
|
||||
|
||||
const names = skills.map((skill) => skill.name);
|
||||
const skillMap = new Map(skills.map((skill) => [skill.name, skill]));
|
||||
|
||||
try {
|
||||
const fzf = new AsyncFzf(names, {
|
||||
fuzzy: 'v2',
|
||||
casing: 'case-insensitive',
|
||||
});
|
||||
const results = (await fzf.find(query)) as Array<{ item: string }>;
|
||||
return results
|
||||
.map((result) => skillMap.get(result.item))
|
||||
.filter((skill): skill is SkillConfig => !!skill);
|
||||
} catch (error) {
|
||||
console.error('[skillsCommand] Fuzzy match failed:', error);
|
||||
const lowerQuery = query.toLowerCase();
|
||||
return skills.filter((skill) =>
|
||||
skill.name.toLowerCase().startsWith(lowerQuery),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -209,6 +209,12 @@ export enum CommandKind {
|
||||
MCP_PROMPT = 'mcp-prompt',
|
||||
}
|
||||
|
||||
export interface CommandCompletionItem {
|
||||
value: string;
|
||||
label?: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
// The standardized contract for any command in the system.
|
||||
export interface SlashCommand {
|
||||
name: string;
|
||||
@@ -234,7 +240,7 @@ export interface SlashCommand {
|
||||
completion?: (
|
||||
context: CommandContext,
|
||||
partialArg: string,
|
||||
) => Promise<string[]>;
|
||||
) => Promise<Array<string | CommandCompletionItem> | null>;
|
||||
|
||||
subCommands?: SlashCommand[];
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
import { Box, Text } from 'ink';
|
||||
import { IdeIntegrationNudge } from '../IdeIntegrationNudge.js';
|
||||
import { CommandFormatMigrationNudge } from '../CommandFormatMigrationNudge.js';
|
||||
import { LoopDetectionConfirmation } from './LoopDetectionConfirmation.js';
|
||||
import { FolderTrustDialog } from './FolderTrustDialog.js';
|
||||
import { ShellConfirmationDialog } from './ShellConfirmationDialog.js';
|
||||
@@ -16,7 +17,6 @@ import { QwenOAuthProgress } from './QwenOAuthProgress.js';
|
||||
import { AuthDialog } from '../auth/AuthDialog.js';
|
||||
import { OpenAIKeyPrompt } from './OpenAIKeyPrompt.js';
|
||||
import { EditorSettingsDialog } from './EditorSettingsDialog.js';
|
||||
import { WorkspaceMigrationDialog } from './WorkspaceMigrationDialog.js';
|
||||
import { PermissionsModifyTrustDialog } from './PermissionsModifyTrustDialog.js';
|
||||
import { ModelDialog } from './ModelDialog.js';
|
||||
import { ApprovalModeDialog } from './ApprovalModeDialog.js';
|
||||
@@ -76,15 +76,6 @@ export const DialogManager = ({
|
||||
if (uiState.showIdeRestartPrompt) {
|
||||
return <IdeTrustChangeDialog reason={uiState.ideTrustRestartReason} />;
|
||||
}
|
||||
if (uiState.showWorkspaceMigrationDialog) {
|
||||
return (
|
||||
<WorkspaceMigrationDialog
|
||||
workspaceExtensions={uiState.workspaceExtensions}
|
||||
onOpen={uiActions.onWorkspaceMigrationDialogOpen}
|
||||
onClose={uiActions.onWorkspaceMigrationDialogClose}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (uiState.shouldShowIdePrompt) {
|
||||
return (
|
||||
<IdeIntegrationNudge
|
||||
@@ -93,6 +84,14 @@ export const DialogManager = ({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (uiState.shouldShowCommandMigrationNudge) {
|
||||
return (
|
||||
<CommandFormatMigrationNudge
|
||||
tomlFiles={uiState.commandMigrationTomlFiles}
|
||||
onComplete={uiActions.handleCommandMigrationComplete}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (uiState.isFolderTrustDialogOpen) {
|
||||
return (
|
||||
<FolderTrustDialog
|
||||
|
||||
@@ -30,6 +30,7 @@ import { Help } from './Help.js';
|
||||
import type { SlashCommand } from '../commands/types.js';
|
||||
import { ExtensionsList } from './views/ExtensionsList.js';
|
||||
import { getMCPServerStatus } from '@qwen-code/qwen-code-core';
|
||||
import { SkillsList } from './views/SkillsList.js';
|
||||
import { ToolsList } from './views/ToolsList.js';
|
||||
import { McpStatus } from './views/McpStatus.js';
|
||||
|
||||
@@ -153,6 +154,9 @@ const HistoryItemDisplayComponent: React.FC<HistoryItemDisplayProps> = ({
|
||||
showDescriptions={itemForDisplay.showDescriptions}
|
||||
/>
|
||||
)}
|
||||
{itemForDisplay.type === 'skills_list' && (
|
||||
<SkillsList skills={itemForDisplay.skills} />
|
||||
)}
|
||||
{itemForDisplay.type === 'mcp_status' && (
|
||||
<McpStatus {...itemForDisplay} serverStatus={getMCPServerStatus} />
|
||||
)}
|
||||
|
||||
@@ -106,7 +106,7 @@ export function SuggestionsDisplay({
|
||||
</Box>
|
||||
|
||||
{suggestion.description && (
|
||||
<Box flexGrow={1} paddingLeft={3}>
|
||||
<Box flexGrow={1} paddingLeft={2}>
|
||||
<Text color={textColor} wrap="truncate">
|
||||
{suggestion.description}
|
||||
</Text>
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Box, Text } from 'ink';
|
||||
import {
|
||||
type Extension,
|
||||
performWorkspaceExtensionMigration,
|
||||
} from '../../config/extension.js';
|
||||
import { RadioButtonSelect } from './shared/RadioButtonSelect.js';
|
||||
import { theme } from '../semantic-colors.js';
|
||||
import { useState } from 'react';
|
||||
import { useKeypress } from '../hooks/useKeypress.js';
|
||||
|
||||
export function WorkspaceMigrationDialog(props: {
|
||||
workspaceExtensions: Extension[];
|
||||
onOpen: () => void;
|
||||
onClose: () => void;
|
||||
}) {
|
||||
const { workspaceExtensions, onOpen, onClose } = props;
|
||||
const [migrationComplete, setMigrationComplete] = useState(false);
|
||||
const [failedExtensions, setFailedExtensions] = useState<string[]>([]);
|
||||
onOpen();
|
||||
const onMigrate = async () => {
|
||||
const failed = await performWorkspaceExtensionMigration(
|
||||
workspaceExtensions,
|
||||
// We aren't updating extensions, just moving them around, don't need to ask for consent.
|
||||
async (_) => true,
|
||||
);
|
||||
setFailedExtensions(failed);
|
||||
setMigrationComplete(true);
|
||||
};
|
||||
|
||||
useKeypress(
|
||||
(key) => {
|
||||
if (migrationComplete && key.sequence === 'q') {
|
||||
process.exit(0);
|
||||
}
|
||||
},
|
||||
{ isActive: true },
|
||||
);
|
||||
|
||||
if (migrationComplete) {
|
||||
return (
|
||||
<Box
|
||||
flexDirection="column"
|
||||
borderStyle="round"
|
||||
borderColor={theme.border.default}
|
||||
padding={1}
|
||||
>
|
||||
{failedExtensions.length > 0 ? (
|
||||
<>
|
||||
<Text color={theme.text.primary}>
|
||||
The following extensions failed to migrate. Please try installing
|
||||
them manually. To see other changes, Qwen Code must be restarted.
|
||||
Press 'q' to quit.
|
||||
</Text>
|
||||
<Box flexDirection="column" marginTop={1} marginLeft={2}>
|
||||
{failedExtensions.map((failed) => (
|
||||
<Text key={failed}>- {failed}</Text>
|
||||
))}
|
||||
</Box>
|
||||
</>
|
||||
) : (
|
||||
<Text color={theme.text.primary}>
|
||||
Migration complete. To see changes, Qwen Code must be restarted.
|
||||
Press 'q' to quit.
|
||||
</Text>
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Box
|
||||
flexDirection="column"
|
||||
borderStyle="round"
|
||||
borderColor={theme.border.default}
|
||||
padding={1}
|
||||
>
|
||||
<Text bold color={theme.text.primary}>
|
||||
Workspace-level extensions are deprecated{'\n'}
|
||||
</Text>
|
||||
<Text color={theme.text.primary}>
|
||||
Would you like to install them at the user level?
|
||||
</Text>
|
||||
<Text color={theme.text.primary}>
|
||||
The extension definition will remain in your workspace directory.
|
||||
</Text>
|
||||
<Text color={theme.text.primary}>
|
||||
If you opt to skip, you can install them manually using the extensions
|
||||
install command.
|
||||
</Text>
|
||||
|
||||
<Box flexDirection="column" marginTop={1} marginLeft={2}>
|
||||
{workspaceExtensions.map((extension) => (
|
||||
<Text key={extension.config.name}>- {extension.config.name}</Text>
|
||||
))}
|
||||
</Box>
|
||||
<Box marginTop={1}>
|
||||
<RadioButtonSelect
|
||||
items={[
|
||||
{ label: 'Install all', value: 'migrate', key: 'migrate' },
|
||||
{ label: 'Skip', value: 'skip', key: 'skip' },
|
||||
]}
|
||||
onSelect={(value: string) => {
|
||||
if (value === 'migrate') {
|
||||
onMigrate();
|
||||
} else {
|
||||
onClose();
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -23,7 +23,7 @@ export const InfoMessage: React.FC<InfoMessageProps> = ({ text }) => {
|
||||
const prefixWidth = prefix.length;
|
||||
|
||||
return (
|
||||
<Box flexDirection="row" marginTop={1}>
|
||||
<Box flexDirection="row" marginBottom={1}>
|
||||
<Box width={prefixWidth}>
|
||||
<Text color={theme.status.warning}>{prefix}</Text>
|
||||
</Box>
|
||||
|
||||
@@ -18,7 +18,7 @@ export const WarningMessage: React.FC<WarningMessageProps> = ({ text }) => {
|
||||
const prefixWidth = 3;
|
||||
|
||||
return (
|
||||
<Box flexDirection="row" marginTop={1}>
|
||||
<Box flexDirection="row" marginBottom={1}>
|
||||
<Box width={prefixWidth}>
|
||||
<Text color={Colors.AccentYellow}>{prefix}</Text>
|
||||
</Box>
|
||||
|
||||
@@ -58,7 +58,11 @@ export const ActionSelectionStep = ({
|
||||
},
|
||||
];
|
||||
|
||||
const actions = selectedAgent?.isBuiltin
|
||||
// Extension-level agents are also read-only (like builtin)
|
||||
const isReadOnly =
|
||||
selectedAgent?.isBuiltin || selectedAgent?.level === 'extension';
|
||||
|
||||
const actions = isReadOnly
|
||||
? allActions.filter(
|
||||
(action) => action.value === 'view' || action.value === 'back',
|
||||
)
|
||||
|
||||
@@ -12,10 +12,11 @@ import { type SubagentConfig } from '@qwen-code/qwen-code-core';
|
||||
import { t } from '../../../../i18n/index.js';
|
||||
|
||||
interface NavigationState {
|
||||
currentBlock: 'project' | 'user' | 'builtin';
|
||||
currentBlock: 'project' | 'user' | 'builtin' | 'extension';
|
||||
projectIndex: number;
|
||||
userIndex: number;
|
||||
builtinIndex: number;
|
||||
extensionIndex: number;
|
||||
}
|
||||
|
||||
interface AgentSelectionStepProps {
|
||||
@@ -32,6 +33,7 @@ export const AgentSelectionStep = ({
|
||||
projectIndex: 0,
|
||||
userIndex: 0,
|
||||
builtinIndex: 0,
|
||||
extensionIndex: 0,
|
||||
});
|
||||
|
||||
// Group agents by level
|
||||
@@ -47,6 +49,10 @@ export const AgentSelectionStep = ({
|
||||
() => availableAgents.filter((agent) => agent.level === 'builtin'),
|
||||
[availableAgents],
|
||||
);
|
||||
const extensionAgents = useMemo(
|
||||
() => availableAgents.filter((agent) => agent.level === 'extension'),
|
||||
[availableAgents],
|
||||
);
|
||||
const projectNames = useMemo(
|
||||
() => new Set(projectAgents.map((agent) => agent.name)),
|
||||
[projectAgents],
|
||||
@@ -60,8 +66,10 @@ export const AgentSelectionStep = ({
|
||||
setNavigation((prev) => ({ ...prev, currentBlock: 'user' }));
|
||||
} else if (builtinAgents.length > 0) {
|
||||
setNavigation((prev) => ({ ...prev, currentBlock: 'builtin' }));
|
||||
} else if (extensionAgents.length > 0) {
|
||||
setNavigation((prev) => ({ ...prev, currentBlock: 'extension' }));
|
||||
}
|
||||
}, [projectAgents, userAgents, builtinAgents]);
|
||||
}, [projectAgents, userAgents, builtinAgents, extensionAgents]);
|
||||
|
||||
// Custom keyboard navigation
|
||||
useKeypress(
|
||||
@@ -87,6 +95,13 @@ export const AgentSelectionStep = ({
|
||||
currentBlock: 'user',
|
||||
userIndex: userAgents.length - 1,
|
||||
};
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to last item in extension block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'extension',
|
||||
extensionIndex: extensionAgents.length - 1,
|
||||
};
|
||||
} else {
|
||||
// Wrap to last item in project block
|
||||
return { ...prev, projectIndex: projectAgents.length - 1 };
|
||||
@@ -108,11 +123,18 @@ export const AgentSelectionStep = ({
|
||||
currentBlock: 'builtin',
|
||||
builtinIndex: builtinAgents.length - 1,
|
||||
};
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to last item in extension block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'extension',
|
||||
extensionIndex: extensionAgents.length - 1,
|
||||
};
|
||||
} else {
|
||||
// Wrap to last item in user block
|
||||
return { ...prev, userIndex: userAgents.length - 1 };
|
||||
}
|
||||
} else {
|
||||
} else if (prev.currentBlock === 'builtin') {
|
||||
// builtin block
|
||||
if (prev.builtinIndex > 0) {
|
||||
return { ...prev, builtinIndex: prev.builtinIndex - 1 };
|
||||
@@ -130,10 +152,46 @@ export const AgentSelectionStep = ({
|
||||
currentBlock: 'project',
|
||||
projectIndex: projectAgents.length - 1,
|
||||
};
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to last item in extension block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'extension',
|
||||
extensionIndex: extensionAgents.length - 1,
|
||||
};
|
||||
} else {
|
||||
// Wrap to last item in builtin block
|
||||
return { ...prev, builtinIndex: builtinAgents.length - 1 };
|
||||
}
|
||||
} else {
|
||||
// extension block
|
||||
if (prev.extensionIndex > 0) {
|
||||
return { ...prev, extensionIndex: prev.extensionIndex - 1 };
|
||||
} else if (userAgents.length > 0) {
|
||||
// Move to last item in user block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'user',
|
||||
userIndex: userAgents.length - 1,
|
||||
};
|
||||
} else if (projectAgents.length > 0) {
|
||||
// Move to last item in project block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'project',
|
||||
projectIndex: projectAgents.length - 1,
|
||||
};
|
||||
} else if (builtinAgents.length > 0) {
|
||||
// Move to last item in builtin block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'builtin',
|
||||
builtinIndex: builtinAgents.length - 1,
|
||||
};
|
||||
} else {
|
||||
// Wrap to last item in extension block
|
||||
return { ...prev, extensionIndex: extensionAgents.length - 1 };
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (name === 'down' || name === 'j') {
|
||||
@@ -147,6 +205,9 @@ export const AgentSelectionStep = ({
|
||||
} else if (builtinAgents.length > 0) {
|
||||
// Move to first item in builtin block
|
||||
return { ...prev, currentBlock: 'builtin', builtinIndex: 0 };
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to first item in extension block
|
||||
return { ...prev, currentBlock: 'extension', extensionIndex: 0 };
|
||||
} else {
|
||||
// Wrap to first item in project block
|
||||
return { ...prev, projectIndex: 0 };
|
||||
@@ -157,6 +218,9 @@ export const AgentSelectionStep = ({
|
||||
} else if (builtinAgents.length > 0) {
|
||||
// Move to first item in builtin block
|
||||
return { ...prev, currentBlock: 'builtin', builtinIndex: 0 };
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to first item in extension block
|
||||
return { ...prev, currentBlock: 'extension', extensionIndex: 0 };
|
||||
} else if (projectAgents.length > 0) {
|
||||
// Move to first item in project block
|
||||
return { ...prev, currentBlock: 'project', projectIndex: 0 };
|
||||
@@ -164,10 +228,13 @@ export const AgentSelectionStep = ({
|
||||
// Wrap to first item in user block
|
||||
return { ...prev, userIndex: 0 };
|
||||
}
|
||||
} else {
|
||||
} else if (prev.currentBlock === 'builtin') {
|
||||
// builtin block
|
||||
if (prev.builtinIndex < builtinAgents.length - 1) {
|
||||
return { ...prev, builtinIndex: prev.builtinIndex + 1 };
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to first item in extension block
|
||||
return { ...prev, currentBlock: 'extension', extensionIndex: 0 };
|
||||
} else if (projectAgents.length > 0) {
|
||||
// Move to first item in project block
|
||||
return { ...prev, currentBlock: 'project', projectIndex: 0 };
|
||||
@@ -178,6 +245,23 @@ export const AgentSelectionStep = ({
|
||||
// Wrap to first item in builtin block
|
||||
return { ...prev, builtinIndex: 0 };
|
||||
}
|
||||
} else {
|
||||
// extension block
|
||||
if (prev.extensionIndex < extensionAgents.length - 1) {
|
||||
return { ...prev, extensionIndex: prev.extensionIndex + 1 };
|
||||
} else if (projectAgents.length > 0) {
|
||||
// Move to first item in project block
|
||||
return { ...prev, currentBlock: 'project', projectIndex: 0 };
|
||||
} else if (userAgents.length > 0) {
|
||||
// Move to first item in user block
|
||||
return { ...prev, currentBlock: 'user', userIndex: 0 };
|
||||
} else if (builtinAgents.length > 0) {
|
||||
// Move to first item in builtin block
|
||||
return { ...prev, currentBlock: 'builtin', builtinIndex: 0 };
|
||||
} else {
|
||||
// Wrap to first item in extension block
|
||||
return { ...prev, extensionIndex: 0 };
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (name === 'return' || name === 'space') {
|
||||
@@ -188,11 +272,17 @@ export const AgentSelectionStep = ({
|
||||
} else if (navigation.currentBlock === 'user') {
|
||||
// User agents come after project agents in the availableAgents array
|
||||
globalIndex = projectAgents.length + navigation.userIndex;
|
||||
} else {
|
||||
// builtin block
|
||||
} else if (navigation.currentBlock === 'builtin') {
|
||||
// Builtin agents come after project and user agents in the availableAgents array
|
||||
globalIndex =
|
||||
projectAgents.length + userAgents.length + navigation.builtinIndex;
|
||||
} else {
|
||||
// Extension agents come after project, user, and builtin agents
|
||||
globalIndex =
|
||||
projectAgents.length +
|
||||
userAgents.length +
|
||||
builtinAgents.length +
|
||||
navigation.extensionIndex;
|
||||
}
|
||||
|
||||
if (globalIndex >= 0 && globalIndex < availableAgents.length) {
|
||||
@@ -218,7 +308,7 @@ export const AgentSelectionStep = ({
|
||||
const renderAgentItem = (
|
||||
agent: {
|
||||
name: string;
|
||||
level: 'project' | 'user' | 'builtin' | 'session';
|
||||
level: 'project' | 'user' | 'builtin' | 'session' | 'extension';
|
||||
isBuiltin?: boolean;
|
||||
},
|
||||
index: number,
|
||||
@@ -258,7 +348,8 @@ export const AgentSelectionStep = ({
|
||||
const enabledAgentsCount =
|
||||
projectAgents.length +
|
||||
userAgents.filter((agent) => !projectNames.has(agent.name)).length +
|
||||
builtinAgents.length;
|
||||
builtinAgents.length +
|
||||
extensionAgents.length;
|
||||
|
||||
return (
|
||||
<Box flexDirection="column">
|
||||
@@ -305,7 +396,10 @@ export const AgentSelectionStep = ({
|
||||
|
||||
{/* Built-in Agents */}
|
||||
{builtinAgents.length > 0 && (
|
||||
<Box flexDirection="column">
|
||||
<Box
|
||||
flexDirection="column"
|
||||
marginBottom={extensionAgents.length > 0 ? 1 : 0}
|
||||
>
|
||||
<Text color={theme.text.primary} bold>
|
||||
{t('Built-in Agents')}
|
||||
</Text>
|
||||
@@ -320,10 +414,28 @@ export const AgentSelectionStep = ({
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{/* Extension Agents */}
|
||||
{extensionAgents.length > 0 && (
|
||||
<Box flexDirection="column">
|
||||
<Text color={theme.text.primary} bold>
|
||||
{t('Extension Agents')}
|
||||
</Text>
|
||||
<Box marginTop={1} flexDirection="column">
|
||||
{extensionAgents.map((agent, index) => {
|
||||
const isSelected =
|
||||
navigation.currentBlock === 'extension' &&
|
||||
navigation.extensionIndex === index;
|
||||
return renderAgentItem(agent, index, isSelected);
|
||||
})}
|
||||
</Box>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{/* Agent count summary */}
|
||||
{(projectAgents.length > 0 ||
|
||||
userAgents.length > 0 ||
|
||||
builtinAgents.length > 0) && (
|
||||
builtinAgents.length > 0 ||
|
||||
extensionAgents.length > 0) && (
|
||||
<Box marginTop={1}>
|
||||
<Text color={theme.text.secondary}>
|
||||
{t('Using: {{count}} agents', {
|
||||
|
||||
@@ -95,7 +95,11 @@ export function AgentsManagerDialog({
|
||||
|
||||
try {
|
||||
const subagentManager = config.getSubagentManager();
|
||||
await subagentManager.deleteSubagent(agent.name, agent.level);
|
||||
await subagentManager.deleteSubagent(
|
||||
agent.name,
|
||||
agent.level,
|
||||
agent.extensionName,
|
||||
);
|
||||
|
||||
// Reload agents to get updated state
|
||||
await loadAgents();
|
||||
|
||||
@@ -18,7 +18,6 @@ const mockUseUIState = vi.mocked(useUIState);
|
||||
const mockExtensions = [
|
||||
{ name: 'ext-one', version: '1.0.0', isActive: true },
|
||||
{ name: 'ext-two', version: '2.1.0', isActive: true },
|
||||
{ name: 'ext-disabled', version: '3.0.0', isActive: false },
|
||||
];
|
||||
|
||||
describe('<ExtensionsList />', () => {
|
||||
@@ -29,7 +28,6 @@ describe('<ExtensionsList />', () => {
|
||||
const mockUIState = (
|
||||
extensions: unknown[],
|
||||
extensionsUpdateState: Map<string, ExtensionUpdateState>,
|
||||
disabledExtensions: string[] = [],
|
||||
) => {
|
||||
mockUseUIState.mockReturnValue({
|
||||
commandContext: createMockCommandContext({
|
||||
@@ -37,13 +35,6 @@ describe('<ExtensionsList />', () => {
|
||||
config: {
|
||||
getExtensions: () => extensions,
|
||||
},
|
||||
settings: {
|
||||
merged: {
|
||||
extensions: {
|
||||
disabled: disabledExtensions,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
extensionsUpdateState,
|
||||
@@ -58,12 +49,11 @@ describe('<ExtensionsList />', () => {
|
||||
});
|
||||
|
||||
it('should render a list of extensions with their version and status', () => {
|
||||
mockUIState(mockExtensions, new Map(), ['ext-disabled']);
|
||||
mockUIState(mockExtensions, new Map());
|
||||
const { lastFrame } = render(<ExtensionsList />);
|
||||
const output = lastFrame();
|
||||
expect(output).toContain('ext-one (v1.0.0) - active');
|
||||
expect(output).toContain('ext-two (v2.1.0) - active');
|
||||
expect(output).toContain('ext-disabled (v3.0.0) - disabled');
|
||||
});
|
||||
|
||||
it('should display "unknown state" if an extension has no update state', () => {
|
||||
|
||||
@@ -9,12 +9,10 @@ import { useUIState } from '../../contexts/UIStateContext.js';
|
||||
import { ExtensionUpdateState } from '../../state/extensions.js';
|
||||
|
||||
export const ExtensionsList = () => {
|
||||
const { commandContext, extensionsUpdateState } = useUIState();
|
||||
const allExtensions = commandContext.services.config!.getExtensions();
|
||||
const settings = commandContext.services.settings;
|
||||
const disabledExtensions = settings.merged.extensions?.disabled ?? [];
|
||||
const { extensionsUpdateState, commandContext } = useUIState();
|
||||
const extensions = commandContext.services.config?.getExtensions() || [];
|
||||
|
||||
if (allExtensions.length === 0) {
|
||||
if (extensions.length === 0) {
|
||||
return <Text>No extensions installed.</Text>;
|
||||
}
|
||||
|
||||
@@ -22,10 +20,11 @@ export const ExtensionsList = () => {
|
||||
<Box flexDirection="column" marginTop={1} marginBottom={1}>
|
||||
<Text>Installed extensions:</Text>
|
||||
<Box flexDirection="column" paddingLeft={2}>
|
||||
{allExtensions.map((ext) => {
|
||||
{extensions.map((ext) => {
|
||||
const state = extensionsUpdateState.get(ext.name);
|
||||
const isActive = !disabledExtensions.includes(ext.name);
|
||||
const isActive = ext.isActive;
|
||||
const activeString = isActive ? 'active' : 'disabled';
|
||||
const activeColor = isActive ? 'green' : 'grey';
|
||||
|
||||
let stateColor = 'gray';
|
||||
const stateText = state || 'unknown state';
|
||||
@@ -44,6 +43,7 @@ export const ExtensionsList = () => {
|
||||
break;
|
||||
case ExtensionUpdateState.UP_TO_DATE:
|
||||
case ExtensionUpdateState.NOT_UPDATABLE:
|
||||
case ExtensionUpdateState.UPDATED:
|
||||
stateColor = 'green';
|
||||
break;
|
||||
default:
|
||||
@@ -52,12 +52,22 @@ export const ExtensionsList = () => {
|
||||
}
|
||||
|
||||
return (
|
||||
<Box key={ext.name}>
|
||||
<Box key={ext.name} flexDirection="column" marginBottom={1}>
|
||||
<Text>
|
||||
<Text color="cyan">{`${ext.name} (v${ext.version})`}</Text>
|
||||
{` - ${activeString}`}
|
||||
<Text color={activeColor}>{` - ${activeString}`}</Text>
|
||||
{<Text color={stateColor}>{` (${stateText})`}</Text>}
|
||||
</Text>
|
||||
{ext.resolvedSettings && ext.resolvedSettings.length > 0 && (
|
||||
<Box flexDirection="column" paddingLeft={2}>
|
||||
<Text>settings:</Text>
|
||||
{ext.resolvedSettings.map((setting) => (
|
||||
<Text key={setting.name}>
|
||||
- {setting.name}: {setting.value}
|
||||
</Text>
|
||||
))}
|
||||
</Box>
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
})}
|
||||
|
||||
36
packages/cli/src/ui/components/views/SkillsList.tsx
Normal file
36
packages/cli/src/ui/components/views/SkillsList.tsx
Normal file
@@ -0,0 +1,36 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Qwen
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type React from 'react';
|
||||
import { Box, Text } from 'ink';
|
||||
import { theme } from '../../semantic-colors.js';
|
||||
import { type SkillDefinition } from '../../types.js';
|
||||
import { t } from '../../../i18n/index.js';
|
||||
|
||||
interface SkillsListProps {
|
||||
skills: readonly SkillDefinition[];
|
||||
}
|
||||
|
||||
export const SkillsList: React.FC<SkillsListProps> = ({ skills }) => (
|
||||
<Box flexDirection="column" marginBottom={1}>
|
||||
<Text bold color={theme.text.primary}>
|
||||
{t('Available skills:')}
|
||||
</Text>
|
||||
<Box height={1} />
|
||||
{skills.length > 0 ? (
|
||||
skills.map((skill) => (
|
||||
<Box key={skill.name} flexDirection="row">
|
||||
<Text color={theme.text.primary}>{' '}- </Text>
|
||||
<Text bold color={theme.text.accent}>
|
||||
{skill.name}
|
||||
</Text>
|
||||
</Box>
|
||||
))
|
||||
) : (
|
||||
<Text color={theme.text.primary}> {t('No skills available')}</Text>
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
@@ -7,6 +7,7 @@
|
||||
import { createContext, useContext } from 'react';
|
||||
import { type Key } from '../hooks/useKeypress.js';
|
||||
import { type IdeIntegrationNudgeResult } from '../IdeIntegrationNudge.js';
|
||||
import { type CommandMigrationNudgeResult } from '../CommandFormatMigrationNudge.js';
|
||||
import { type FolderTrustChoice } from '../components/FolderTrustDialog.js';
|
||||
import {
|
||||
type AuthType,
|
||||
@@ -46,14 +47,13 @@ export interface UIActions {
|
||||
setShellModeActive: (value: boolean) => void;
|
||||
vimHandleInput: (key: Key) => boolean;
|
||||
handleIdePromptComplete: (result: IdeIntegrationNudgeResult) => void;
|
||||
handleCommandMigrationComplete: (result: CommandMigrationNudgeResult) => void;
|
||||
handleFolderTrustSelect: (choice: FolderTrustChoice) => void;
|
||||
setConstrainHeight: (value: boolean) => void;
|
||||
onEscapePromptChange: (show: boolean) => void;
|
||||
refreshStatic: () => void;
|
||||
handleFinalSubmit: (value: string) => void;
|
||||
handleClearScreen: () => void;
|
||||
onWorkspaceMigrationDialogOpen: () => void;
|
||||
onWorkspaceMigrationDialogClose: () => void;
|
||||
// Vision switch dialog
|
||||
handleVisionSwitchSelect: (outcome: VisionSwitchOutcome) => void;
|
||||
// Welcome back dialog
|
||||
|
||||
@@ -72,6 +72,8 @@ export interface UIState {
|
||||
suggestionsWidth: number;
|
||||
isInputActive: boolean;
|
||||
shouldShowIdePrompt: boolean;
|
||||
shouldShowCommandMigrationNudge: boolean;
|
||||
commandMigrationTomlFiles: string[];
|
||||
isFolderTrustDialogOpen: boolean;
|
||||
isTrustedFolder: boolean | undefined;
|
||||
constrainHeight: boolean;
|
||||
@@ -87,9 +89,6 @@ export interface UIState {
|
||||
historyRemountKey: number;
|
||||
messageQueue: string[];
|
||||
showAutoAcceptIndicator: ApprovalMode;
|
||||
showWorkspaceMigrationDialog: boolean;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
workspaceExtensions: any[]; // Extension[]
|
||||
// Quota-related state
|
||||
currentModel: string;
|
||||
contextFileNames: string[];
|
||||
|
||||
51
packages/cli/src/ui/hooks/useCommandMigration.ts
Normal file
51
packages/cli/src/ui/hooks/useCommandMigration.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Storage } from '@qwen-code/qwen-code-core';
|
||||
import { detectTomlCommands } from '../../services/command-migration-tool.js';
|
||||
import type { LoadedSettings } from '../../config/settings.js';
|
||||
|
||||
/**
|
||||
* Hook to detect TOML command files and manage migration nudge visibility.
|
||||
* Checks all command directories: workspace, user, and global levels.
|
||||
*/
|
||||
export function useCommandMigration(
|
||||
settings: LoadedSettings,
|
||||
storage: Storage,
|
||||
) {
|
||||
const [showMigrationNudge, setShowMigrationNudge] = useState(false);
|
||||
const [tomlFiles, setTomlFiles] = useState<string[]>([]);
|
||||
|
||||
useEffect(() => {
|
||||
const checkTomlCommands = async () => {
|
||||
const allFiles: string[] = [];
|
||||
|
||||
// Check workspace commands directory (.qwen/commands)
|
||||
const workspaceCommandsDir = storage.getProjectCommandsDir();
|
||||
const workspaceFiles = await detectTomlCommands(workspaceCommandsDir);
|
||||
allFiles.push(...workspaceFiles.map((f) => `workspace: ${f}`));
|
||||
|
||||
// Check user commands directory (~/.qwen/commands)
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const userFiles = await detectTomlCommands(userCommandsDir);
|
||||
allFiles.push(...userFiles.map((f) => `user: ${f}`));
|
||||
|
||||
if (allFiles.length > 0) {
|
||||
setTomlFiles(allFiles);
|
||||
setShowMigrationNudge(true);
|
||||
}
|
||||
};
|
||||
|
||||
checkTomlCommands();
|
||||
}, [storage]);
|
||||
|
||||
return {
|
||||
showMigrationNudge,
|
||||
tomlFiles,
|
||||
setShowMigrationNudge,
|
||||
};
|
||||
}
|
||||
@@ -4,26 +4,21 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi } from 'vitest';
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
ExtensionStorage,
|
||||
annotateActiveExtensions,
|
||||
loadExtension,
|
||||
} from '../../config/extension.js';
|
||||
import { createExtension } from '../../test-utils/createExtension.js';
|
||||
|
||||
import { useExtensionUpdates } from './useExtensionUpdates.js';
|
||||
import { QWEN_DIR, type GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
QWEN_DIR,
|
||||
type ExtensionManager,
|
||||
type Extension,
|
||||
type ExtensionUpdateInfo,
|
||||
ExtensionUpdateState,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { MessageType } from '../types.js';
|
||||
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
|
||||
import {
|
||||
checkForAllExtensionUpdates,
|
||||
updateExtension,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { ExtensionUpdateState } from '../state/extensions.js';
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const mockedOs = await importOriginal<typeof os>();
|
||||
@@ -33,63 +28,85 @@ vi.mock('os', async (importOriginal) => {
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../../config/extensions/update.js', () => ({
|
||||
checkForAllExtensionUpdates: vi.fn(),
|
||||
updateExtension: vi.fn(),
|
||||
}));
|
||||
function createMockExtension(overrides: Partial<Extension> = {}): Extension {
|
||||
return {
|
||||
id: 'test-extension-id',
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
path: '/some/path',
|
||||
isActive: true,
|
||||
config: {
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
},
|
||||
contextFiles: [],
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo',
|
||||
autoUpdate: false,
|
||||
},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createMockExtensionManager(
|
||||
extensions: Extension[],
|
||||
checkCallback?: (
|
||||
callback: (extensionName: string, state: ExtensionUpdateState) => void,
|
||||
) => Promise<void>,
|
||||
updateResult?: ExtensionUpdateInfo | undefined,
|
||||
): ExtensionManager {
|
||||
return {
|
||||
getLoadedExtensions: vi.fn(() => extensions),
|
||||
checkForAllExtensionUpdates: vi.fn(
|
||||
async (
|
||||
callback: (extensionName: string, state: ExtensionUpdateState) => void,
|
||||
) => {
|
||||
if (checkCallback) {
|
||||
await checkCallback(callback);
|
||||
}
|
||||
},
|
||||
),
|
||||
updateExtension: vi.fn(async () => updateResult),
|
||||
} as unknown as ExtensionManager;
|
||||
}
|
||||
|
||||
describe('useExtensionUpdates', () => {
|
||||
let tempHomeDir: string;
|
||||
let userExtensionsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
|
||||
);
|
||||
tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'qwen-cli-test-home-'));
|
||||
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
|
||||
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
|
||||
fs.mkdirSync(userExtensionsDir, { recursive: true });
|
||||
vi.mocked(checkForAllExtensionUpdates).mockReset();
|
||||
vi.mocked(updateExtension).mockReset();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should check for updates and log a message if an update is available', async () => {
|
||||
const extensions = [
|
||||
{
|
||||
name: 'test-extension',
|
||||
const extension = createMockExtension({
|
||||
name: 'test-extension',
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
version: '1.0.0',
|
||||
path: '/some/path',
|
||||
isActive: true,
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo',
|
||||
autoUpdate: false,
|
||||
},
|
||||
source: 'https://some/repo',
|
||||
autoUpdate: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
const addItem = vi.fn();
|
||||
const cwd = '/test/cwd';
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
const extensionManager = createMockExtensionManager(
|
||||
[extension],
|
||||
async (callback) => {
|
||||
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
);
|
||||
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
|
||||
);
|
||||
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(addItem).toHaveBeenCalledWith(
|
||||
@@ -103,43 +120,32 @@ describe('useExtensionUpdates', () => {
|
||||
});
|
||||
|
||||
it('should check for updates and automatically update if autoUpdate is true', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
const extension = createMockExtension({
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
source: 'https://some.git/repo',
|
||||
autoUpdate: true,
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[loadExtension({ extensionDir, workspaceDir: tempHomeDir })!],
|
||||
tempHomeDir,
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
const addItem = vi.fn();
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
const extensionManager = createMockExtensionManager(
|
||||
[extension],
|
||||
async (callback) => {
|
||||
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
{
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: 'test-extension',
|
||||
},
|
||||
);
|
||||
|
||||
vi.mocked(updateExtension).mockResolvedValue({
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: '',
|
||||
});
|
||||
|
||||
renderHook(() => useExtensionUpdates([extension], addItem, tempHomeDir));
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
|
||||
);
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
@@ -156,77 +162,64 @@ describe('useExtensionUpdates', () => {
|
||||
});
|
||||
|
||||
it('should batch update notifications for multiple extensions', async () => {
|
||||
const extensionDir1 = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
const extension1 = createMockExtension({
|
||||
id: 'test-extension-1-id',
|
||||
name: 'test-extension-1',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo1',
|
||||
type: 'git',
|
||||
source: 'https://some.git/repo1',
|
||||
autoUpdate: true,
|
||||
},
|
||||
});
|
||||
const extensionDir2 = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
const extension2 = createMockExtension({
|
||||
id: 'test-extension-2-id',
|
||||
name: 'test-extension-2',
|
||||
version: '2.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo2',
|
||||
type: 'git',
|
||||
source: 'https://some.git/repo2',
|
||||
autoUpdate: true,
|
||||
},
|
||||
});
|
||||
|
||||
const extensions = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: extensionDir1,
|
||||
workspaceDir: tempHomeDir,
|
||||
})!,
|
||||
loadExtension({
|
||||
extensionDir: extensionDir2,
|
||||
workspaceDir: tempHomeDir,
|
||||
})!,
|
||||
],
|
||||
tempHomeDir,
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
);
|
||||
|
||||
const addItem = vi.fn();
|
||||
let updateCallCount = 0;
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
const extensionManager = {
|
||||
getLoadedExtensions: vi.fn(() => [extension1, extension2]),
|
||||
checkForAllExtensionUpdates: vi.fn(
|
||||
async (
|
||||
callback: (
|
||||
extensionName: string,
|
||||
state: ExtensionUpdateState,
|
||||
) => void,
|
||||
) => {
|
||||
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
),
|
||||
updateExtension: vi.fn(async () => {
|
||||
updateCallCount++;
|
||||
if (updateCallCount === 1) {
|
||||
return {
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: 'test-extension-1',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension-2',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
originalVersion: '2.0.0',
|
||||
updatedVersion: '2.1.0',
|
||||
name: 'test-extension-2',
|
||||
};
|
||||
}),
|
||||
} as unknown as ExtensionManager;
|
||||
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
|
||||
);
|
||||
|
||||
vi.mocked(updateExtension)
|
||||
.mockResolvedValueOnce({
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: '',
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
originalVersion: '2.0.0',
|
||||
updatedVersion: '2.1.0',
|
||||
name: '',
|
||||
});
|
||||
|
||||
renderHook(() => useExtensionUpdates(extensions, addItem, tempHomeDir));
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(addItem).toHaveBeenCalledTimes(2);
|
||||
@@ -250,60 +243,40 @@ describe('useExtensionUpdates', () => {
|
||||
});
|
||||
|
||||
it('should batch update notifications for multiple extensions with autoUpdate: false', async () => {
|
||||
const extensions = [
|
||||
{
|
||||
name: 'test-extension-1',
|
||||
const extension1 = createMockExtension({
|
||||
id: 'test-extension-1-id',
|
||||
name: 'test-extension-1',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
version: '1.0.0',
|
||||
path: '/some/path1',
|
||||
isActive: true,
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo1',
|
||||
autoUpdate: false,
|
||||
},
|
||||
source: 'https://some/repo1',
|
||||
autoUpdate: false,
|
||||
},
|
||||
{
|
||||
name: 'test-extension-2',
|
||||
});
|
||||
const extension2 = createMockExtension({
|
||||
id: 'test-extension-2-id',
|
||||
name: 'test-extension-2',
|
||||
version: '2.0.0',
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
version: '2.0.0',
|
||||
path: '/some/path2',
|
||||
isActive: true,
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo2',
|
||||
autoUpdate: false,
|
||||
},
|
||||
source: 'https://some/repo2',
|
||||
autoUpdate: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
const addItem = vi.fn();
|
||||
const cwd = '/test/cwd';
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({ type: 'BATCH_CHECK_START' });
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension-1',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
const extensionManager = createMockExtensionManager(
|
||||
[extension1, extension2],
|
||||
async (callback) => {
|
||||
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
await new Promise((r) => setTimeout(r, 50));
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension-2',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
dispatch({ type: 'BATCH_CHECK_END' });
|
||||
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
);
|
||||
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
|
||||
);
|
||||
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(addItem).toHaveBeenCalledTimes(1);
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import type { ExtensionManager } from '@qwen-code/qwen-code-core';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import {
|
||||
ExtensionUpdateState,
|
||||
@@ -14,11 +14,6 @@ import {
|
||||
import { useCallback, useEffect, useMemo, useReducer } from 'react';
|
||||
import type { UseHistoryManagerReturn } from './useHistoryManager.js';
|
||||
import { MessageType, type ConfirmationRequest } from '../types.js';
|
||||
import {
|
||||
checkForAllExtensionUpdates,
|
||||
updateExtension,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { requestConsentInteractive } from '../../config/extension.js';
|
||||
import { checkExhaustive } from '../../utils/checks.js';
|
||||
|
||||
type ConfirmationRequestWrapper = {
|
||||
@@ -45,15 +40,7 @@ function confirmationRequestsReducer(
|
||||
}
|
||||
}
|
||||
|
||||
export const useExtensionUpdates = (
|
||||
extensions: GeminiCLIExtension[],
|
||||
addItem: UseHistoryManagerReturn['addItem'],
|
||||
cwd: string,
|
||||
) => {
|
||||
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
|
||||
extensionUpdatesReducer,
|
||||
initialExtensionUpdatesState,
|
||||
);
|
||||
export const useConfirmUpdateRequests = () => {
|
||||
const [
|
||||
confirmUpdateExtensionRequests,
|
||||
dispatchConfirmUpdateExtensionRequests,
|
||||
@@ -78,15 +65,52 @@ export const useExtensionUpdates = (
|
||||
},
|
||||
[dispatchConfirmUpdateExtensionRequests],
|
||||
);
|
||||
return {
|
||||
addConfirmUpdateExtensionRequest,
|
||||
confirmUpdateExtensionRequests,
|
||||
dispatchConfirmUpdateExtensionRequests,
|
||||
};
|
||||
};
|
||||
|
||||
export const useExtensionUpdates = (
|
||||
extensionManager: ExtensionManager,
|
||||
addItem: UseHistoryManagerReturn['addItem'],
|
||||
cwd: string,
|
||||
) => {
|
||||
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
|
||||
extensionUpdatesReducer,
|
||||
initialExtensionUpdatesState,
|
||||
);
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
await checkForAllExtensionUpdates(
|
||||
extensions,
|
||||
dispatchExtensionStateUpdate,
|
||||
const extensionsToCheck = extensions.filter((extension) => {
|
||||
const currentStatus = extensionsUpdateState.extensionStatuses.get(
|
||||
extension.name,
|
||||
);
|
||||
if (!currentStatus) return true;
|
||||
const currentState = currentStatus.status;
|
||||
return !currentState || currentState === ExtensionUpdateState.UNKNOWN;
|
||||
});
|
||||
if (extensionsToCheck.length === 0) return;
|
||||
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
|
||||
await extensionManager.checkForAllExtensionUpdates(
|
||||
(extensionName: string, state: ExtensionUpdateState) => {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
});
|
||||
},
|
||||
);
|
||||
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
|
||||
})();
|
||||
}, [extensions, extensions.length, dispatchExtensionStateUpdate]);
|
||||
}, [
|
||||
extensions,
|
||||
extensionManager,
|
||||
extensionsUpdateState.extensionStatuses,
|
||||
dispatchExtensionStateUpdate,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (extensionsUpdateState.batchChecksInProgress > 0) {
|
||||
@@ -113,17 +137,17 @@ export const useExtensionUpdates = (
|
||||
});
|
||||
|
||||
if (extension.installMetadata?.autoUpdate) {
|
||||
updateExtension(
|
||||
extension,
|
||||
cwd,
|
||||
(description) =>
|
||||
requestConsentInteractive(
|
||||
description,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
),
|
||||
currentState.status,
|
||||
dispatchExtensionStateUpdate,
|
||||
)
|
||||
extensionManager
|
||||
.updateExtension(
|
||||
extension,
|
||||
currentState.status,
|
||||
(extensionName, state) => {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
});
|
||||
},
|
||||
)
|
||||
.then((result) => {
|
||||
if (!result) return;
|
||||
addItem(
|
||||
@@ -157,13 +181,7 @@ export const useExtensionUpdates = (
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}, [
|
||||
extensions,
|
||||
extensionsUpdateState,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
addItem,
|
||||
cwd,
|
||||
]);
|
||||
}, [extensions, extensionManager, extensionsUpdateState, addItem, cwd]);
|
||||
|
||||
const extensionsUpdateStateComputed = useMemo(() => {
|
||||
const result = new Map<string, ExtensionUpdateState>();
|
||||
@@ -180,7 +198,5 @@ export const useExtensionUpdates = (
|
||||
extensionsUpdateState: extensionsUpdateStateComputed,
|
||||
extensionsUpdateStateInternal: extensionsUpdateState.extensionStatuses,
|
||||
dispatchExtensionStateUpdate,
|
||||
confirmUpdateExtensionRequests,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -573,6 +573,45 @@ describe('useSlashCompletion', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should map completion items with descriptions for argument suggestions', async () => {
|
||||
const mockCompletionFn = vi.fn().mockResolvedValue([
|
||||
{ value: 'pdf', description: 'Create PDF documents' },
|
||||
{ value: 'xlsx', description: 'Work with spreadsheets' },
|
||||
]);
|
||||
|
||||
const slashCommands = [
|
||||
createTestCommand({
|
||||
name: 'skills',
|
||||
description: 'List available skills',
|
||||
completion: mockCompletionFn,
|
||||
}),
|
||||
];
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useTestHarnessForSlashCompletion(
|
||||
true,
|
||||
'/skills ',
|
||||
slashCommands,
|
||||
mockCommandContext,
|
||||
),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.suggestions).toEqual([
|
||||
{
|
||||
label: 'pdf',
|
||||
value: 'pdf',
|
||||
description: 'Create PDF documents',
|
||||
},
|
||||
{
|
||||
label: 'xlsx',
|
||||
value: 'xlsx',
|
||||
description: 'Work with spreadsheets',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should call command.completion with an empty string when args start with a space', async () => {
|
||||
const mockCompletionFn = vi
|
||||
.fn()
|
||||
|
||||
@@ -9,6 +9,7 @@ import { AsyncFzf } from 'fzf';
|
||||
import type { Suggestion } from '../components/SuggestionsDisplay.js';
|
||||
import {
|
||||
CommandKind,
|
||||
type CommandCompletionItem,
|
||||
type CommandContext,
|
||||
type SlashCommand,
|
||||
} from '../commands/types.js';
|
||||
@@ -215,10 +216,9 @@ function useCommandSuggestions(
|
||||
)) || [];
|
||||
|
||||
if (!signal.aborted) {
|
||||
const finalSuggestions = results.map((s) => ({
|
||||
label: s,
|
||||
value: s,
|
||||
}));
|
||||
const finalSuggestions = results
|
||||
.map((item) => toSuggestion(item))
|
||||
.filter((suggestion): suggestion is Suggestion => !!suggestion);
|
||||
setSuggestions(finalSuggestions);
|
||||
setIsLoading(false);
|
||||
}
|
||||
@@ -310,6 +310,20 @@ function useCommandSuggestions(
|
||||
return { suggestions, isLoading };
|
||||
}
|
||||
|
||||
function toSuggestion(item: string | CommandCompletionItem): Suggestion | null {
|
||||
if (typeof item === 'string') {
|
||||
return { label: item, value: item };
|
||||
}
|
||||
if (!item.value) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
label: item.label ?? item.value,
|
||||
value: item.value,
|
||||
description: item.description,
|
||||
};
|
||||
}
|
||||
|
||||
function useCompletionPositions(
|
||||
query: string | null,
|
||||
parserResult: CommandParserResult,
|
||||
|
||||
0
packages/cli/src/ui/hooks/useTomlMigration.ts
Normal file
0
packages/cli/src/ui/hooks/useTomlMigration.ts
Normal file
@@ -1,70 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { useState, useEffect } from 'react';
|
||||
import {
|
||||
type Extension,
|
||||
getWorkspaceExtensions,
|
||||
} from '../../config/extension.js';
|
||||
import { type LoadedSettings, SettingScope } from '../../config/settings.js';
|
||||
import process from 'node:process';
|
||||
|
||||
export function useWorkspaceMigration(settings: LoadedSettings) {
|
||||
const [showWorkspaceMigrationDialog, setShowWorkspaceMigrationDialog] =
|
||||
useState(false);
|
||||
const [workspaceExtensions, setWorkspaceExtensions] = useState<Extension[]>(
|
||||
[],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
// Default to true if not set.
|
||||
if (!(settings.merged.experimental?.extensionManagement ?? true)) {
|
||||
return;
|
||||
}
|
||||
const cwd = process.cwd();
|
||||
const extensions = getWorkspaceExtensions(cwd);
|
||||
if (
|
||||
extensions.length > 0 &&
|
||||
!settings.merged.extensions?.workspacesWithMigrationNudge?.includes(cwd)
|
||||
) {
|
||||
setWorkspaceExtensions(extensions);
|
||||
setShowWorkspaceMigrationDialog(true);
|
||||
console.log(settings.merged.extensions);
|
||||
}
|
||||
}, [
|
||||
settings.merged.extensions,
|
||||
settings.merged.experimental?.extensionManagement,
|
||||
]);
|
||||
|
||||
const onWorkspaceMigrationDialogOpen = () => {
|
||||
const userSettings = settings.forScope(SettingScope.User);
|
||||
const extensionSettings = userSettings.settings.extensions || {
|
||||
disabled: [],
|
||||
};
|
||||
const workspacesWithMigrationNudge =
|
||||
extensionSettings.workspacesWithMigrationNudge || [];
|
||||
|
||||
const cwd = process.cwd();
|
||||
if (!workspacesWithMigrationNudge.includes(cwd)) {
|
||||
workspacesWithMigrationNudge.push(cwd);
|
||||
}
|
||||
|
||||
extensionSettings.workspacesWithMigrationNudge =
|
||||
workspacesWithMigrationNudge;
|
||||
settings.setValue(SettingScope.User, 'extensions', extensionSettings);
|
||||
};
|
||||
|
||||
const onWorkspaceMigrationDialogClose = () => {
|
||||
setShowWorkspaceMigrationDialog(false);
|
||||
};
|
||||
|
||||
return {
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
};
|
||||
}
|
||||
@@ -10,6 +10,7 @@ export enum ExtensionUpdateState {
|
||||
CHECKING_FOR_UPDATES = 'checking for updates',
|
||||
UPDATED_NEEDS_RESTART = 'updated, needs restart',
|
||||
UPDATING = 'updating',
|
||||
UPDATED = 'updated',
|
||||
UPDATE_AVAILABLE = 'update available',
|
||||
UP_TO_DATE = 'up to date',
|
||||
ERROR = 'error',
|
||||
|
||||
@@ -201,12 +201,21 @@ export interface ToolDefinition {
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface SkillDefinition {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export type HistoryItemToolsList = HistoryItemBase & {
|
||||
type: 'tools_list';
|
||||
tools: ToolDefinition[];
|
||||
showDescriptions: boolean;
|
||||
};
|
||||
|
||||
export type HistoryItemSkillsList = HistoryItemBase & {
|
||||
type: 'skills_list';
|
||||
skills: SkillDefinition[];
|
||||
};
|
||||
|
||||
// JSON-friendly types for using as a simple data model showing info about an
|
||||
// MCP Server.
|
||||
export interface JsonMcpTool {
|
||||
@@ -268,6 +277,7 @@ export type HistoryItemWithoutId =
|
||||
| HistoryItemCompression
|
||||
| HistoryItemExtensionsList
|
||||
| HistoryItemToolsList
|
||||
| HistoryItemSkillsList
|
||||
| HistoryItemMcpStatus;
|
||||
|
||||
export type HistoryItem = HistoryItemWithoutId & { id: number };
|
||||
@@ -289,6 +299,7 @@ export enum MessageType {
|
||||
SUMMARY = 'summary',
|
||||
EXTENSIONS_LIST = 'extensions_list',
|
||||
TOOLS_LIST = 'tools_list',
|
||||
SKILLS_LIST = 'skills_list',
|
||||
MCP_STATUS = 'mcp_status',
|
||||
}
|
||||
|
||||
|
||||
@@ -17,10 +17,16 @@
|
||||
* resolveEnvVarsInString("URL: ${BASE_URL}/api") // Returns "URL: https://api.example.com/api"
|
||||
* resolveEnvVarsInString("Missing: $UNDEFINED_VAR") // Returns "Missing: $UNDEFINED_VAR"
|
||||
*/
|
||||
export function resolveEnvVarsInString(value: string): string {
|
||||
export function resolveEnvVarsInString(
|
||||
value: string,
|
||||
customEnv?: Record<string, string>,
|
||||
): string {
|
||||
const envVarRegex = /\$(?:(\w+)|{([^}]+)})/g; // Find $VAR_NAME or ${VAR_NAME}
|
||||
return value.replace(envVarRegex, (match, varName1, varName2) => {
|
||||
const varName = varName1 || varName2;
|
||||
if (customEnv && typeof customEnv[varName] === 'string') {
|
||||
return customEnv[varName];
|
||||
}
|
||||
if (process && process.env && typeof process.env[varName] === 'string') {
|
||||
return process.env[varName]!;
|
||||
}
|
||||
@@ -47,8 +53,11 @@ export function resolveEnvVarsInString(value: string): string {
|
||||
* };
|
||||
* const resolved = resolveEnvVarsInObject(config);
|
||||
*/
|
||||
export function resolveEnvVarsInObject<T>(obj: T): T {
|
||||
return resolveEnvVarsInObjectInternal(obj, new WeakSet());
|
||||
export function resolveEnvVarsInObject<T>(
|
||||
obj: T,
|
||||
customEnv?: Record<string, string>,
|
||||
): T {
|
||||
return resolveEnvVarsInObjectInternal(obj, new WeakSet(), customEnv);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -61,6 +70,7 @@ export function resolveEnvVarsInObject<T>(obj: T): T {
|
||||
function resolveEnvVarsInObjectInternal<T>(
|
||||
obj: T,
|
||||
visited: WeakSet<object>,
|
||||
customEnv?: Record<string, string>,
|
||||
): T {
|
||||
if (
|
||||
obj === null ||
|
||||
@@ -72,7 +82,7 @@ function resolveEnvVarsInObjectInternal<T>(
|
||||
}
|
||||
|
||||
if (typeof obj === 'string') {
|
||||
return resolveEnvVarsInString(obj) as unknown as T;
|
||||
return resolveEnvVarsInString(obj, customEnv) as unknown as T;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
@@ -84,7 +94,7 @@ function resolveEnvVarsInObjectInternal<T>(
|
||||
|
||||
visited.add(obj);
|
||||
const result = obj.map((item) =>
|
||||
resolveEnvVarsInObjectInternal(item, visited),
|
||||
resolveEnvVarsInObjectInternal(item, visited, customEnv),
|
||||
) as unknown as T;
|
||||
visited.delete(obj);
|
||||
return result;
|
||||
@@ -101,7 +111,11 @@ function resolveEnvVarsInObjectInternal<T>(
|
||||
const newObj = { ...obj } as T;
|
||||
for (const key in newObj) {
|
||||
if (Object.prototype.hasOwnProperty.call(newObj, key)) {
|
||||
newObj[key] = resolveEnvVarsInObjectInternal(newObj[key], visited);
|
||||
newObj[key] = resolveEnvVarsInObjectInternal(
|
||||
newObj[key],
|
||||
visited,
|
||||
customEnv,
|
||||
);
|
||||
}
|
||||
}
|
||||
visited.delete(obj as object);
|
||||
|
||||
@@ -117,8 +117,33 @@ describe('errors', () => {
|
||||
expect(getErrorMessage(undefined)).toBe('undefined');
|
||||
});
|
||||
|
||||
it('should handle objects', () => {
|
||||
const obj = { message: 'test' };
|
||||
it('should extract message from error-like objects', () => {
|
||||
const obj = { message: 'test error message' };
|
||||
expect(getErrorMessage(obj)).toBe('test error message');
|
||||
});
|
||||
|
||||
it('should stringify plain objects without message property', () => {
|
||||
const obj = { code: 500, details: 'internal error' };
|
||||
expect(getErrorMessage(obj)).toBe(
|
||||
'{"code":500,"details":"internal error"}',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle empty objects', () => {
|
||||
expect(getErrorMessage({})).toBe('{}');
|
||||
});
|
||||
|
||||
it('should handle objects with non-string message property', () => {
|
||||
const obj = { message: 123 };
|
||||
expect(getErrorMessage(obj)).toBe('{"message":123}');
|
||||
});
|
||||
|
||||
it('should fallback to String() when toJSON returns undefined', () => {
|
||||
const obj = {
|
||||
toJSON() {
|
||||
return undefined;
|
||||
},
|
||||
};
|
||||
expect(getErrorMessage(obj)).toBe('[object Object]');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -18,6 +18,29 @@ export function getErrorMessage(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
// Handle objects with message property (error-like objects)
|
||||
if (
|
||||
error !== null &&
|
||||
typeof error === 'object' &&
|
||||
'message' in error &&
|
||||
typeof (error as { message: unknown }).message === 'string'
|
||||
) {
|
||||
return (error as { message: string }).message;
|
||||
}
|
||||
|
||||
// Handle plain objects by stringifying them
|
||||
if (error !== null && typeof error === 'object') {
|
||||
try {
|
||||
const stringified = JSON.stringify(error);
|
||||
// JSON.stringify can return undefined for objects with toJSON() returning undefined
|
||||
return stringified ?? String(error);
|
||||
} catch {
|
||||
// If JSON.stringify fails (circular reference, etc.), fall back to String
|
||||
return String(error);
|
||||
}
|
||||
}
|
||||
|
||||
return String(error);
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
type ContentGeneratorConfigSources,
|
||||
resolveModelConfig,
|
||||
type ModelConfigSourcesInput,
|
||||
type ProviderModelConfig,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import type { Settings } from '../config/settings.js';
|
||||
|
||||
@@ -81,6 +82,21 @@ export function resolveCliGenerationConfig(
|
||||
|
||||
const authType = selectedAuthType;
|
||||
|
||||
// Find modelProvider from settings.modelProviders based on authType and model
|
||||
let modelProvider: ProviderModelConfig | undefined;
|
||||
if (authType && settings.modelProviders) {
|
||||
const providers = settings.modelProviders[authType];
|
||||
if (providers && Array.isArray(providers)) {
|
||||
// Try to find by requested model (from CLI or settings)
|
||||
const requestedModel = argv.model || settings.model?.name;
|
||||
if (requestedModel) {
|
||||
modelProvider = providers.find((p) => p.id === requestedModel) as
|
||||
| ProviderModelConfig
|
||||
| undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const configSources: ModelConfigSourcesInput = {
|
||||
authType,
|
||||
cli: {
|
||||
@@ -96,6 +112,7 @@ export function resolveCliGenerationConfig(
|
||||
| Partial<ContentGeneratorConfig>
|
||||
| undefined,
|
||||
},
|
||||
modelProvider,
|
||||
env,
|
||||
};
|
||||
|
||||
|
||||
@@ -360,10 +360,10 @@ export async function start_sandbox(
|
||||
//
|
||||
// note this can only be done with binary linked from gemini-cli repo
|
||||
if (process.env['BUILD_SANDBOX']) {
|
||||
if (!gcPath.includes('gemini-cli/packages/')) {
|
||||
if (!gcPath.includes('qwen-code/packages/')) {
|
||||
throw new FatalSandboxError(
|
||||
'Cannot build sandbox using installed gemini binary; ' +
|
||||
'run `npm link ./packages/cli` under gemini-cli repo to switch to linked binary.',
|
||||
'Cannot build sandbox using installed Qwen Code binary; ' +
|
||||
'run `npm link ./packages/cli` under QwenCode-cli repo to switch to linked binary.',
|
||||
);
|
||||
} else {
|
||||
console.error('building sandbox ...');
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
"src/**/*.ts",
|
||||
"src/**/*.tsx",
|
||||
"src/**/*.json",
|
||||
"./package.json"
|
||||
"./package.json",
|
||||
"../core/src/utils/toml-to-markdown-converter.test.ts",
|
||||
"../core/src/utils/toml-to-markdown-converter.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
"@google/genai": "1.30.0",
|
||||
"@modelcontextprotocol/sdk": "^1.25.1",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"async-mutex": "^0.5.0",
|
||||
"@opentelemetry/exporter-logs-otlp-grpc": "^0.203.0",
|
||||
"@opentelemetry/exporter-logs-otlp-http": "^0.203.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-grpc": "^0.203.0",
|
||||
@@ -40,7 +39,9 @@
|
||||
"@xterm/headless": "5.5.0",
|
||||
"ajv": "^8.17.1",
|
||||
"ajv-formats": "^3.0.0",
|
||||
"async-mutex": "^0.5.0",
|
||||
"chardet": "^2.1.0",
|
||||
"chokidar": "^4.0.3",
|
||||
"diff": "^7.0.0",
|
||||
"dotenv": "^17.1.0",
|
||||
"fast-levenshtein": "^2.0.6",
|
||||
|
||||
@@ -80,6 +80,10 @@ import {
|
||||
type TelemetryTarget,
|
||||
uiTelemetryService,
|
||||
} from '../telemetry/index.js';
|
||||
import {
|
||||
ExtensionManager,
|
||||
type Extension,
|
||||
} from '../extension/extensionManager.js';
|
||||
|
||||
// Utils
|
||||
import { shouldAttemptBrowserLaunch } from '../utils/browser.js';
|
||||
@@ -102,6 +106,7 @@ import {
|
||||
type ResumedSessionData,
|
||||
} from '../services/sessionService.js';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { loadServerHierarchicalMemory } from '../utils/memoryDiscovery.js';
|
||||
|
||||
import {
|
||||
ModelsConfig,
|
||||
@@ -198,20 +203,17 @@ export interface GitCoAuthorSettings {
|
||||
email?: string;
|
||||
}
|
||||
|
||||
export interface GeminiCLIExtension {
|
||||
name: string;
|
||||
version: string;
|
||||
isActive: boolean;
|
||||
path: string;
|
||||
installMetadata?: ExtensionInstallMetadata;
|
||||
}
|
||||
|
||||
export interface ExtensionInstallMetadata {
|
||||
source: string;
|
||||
type: 'git' | 'local' | 'link' | 'github-release';
|
||||
type: 'git' | 'local' | 'link' | 'github-release' | 'marketplace';
|
||||
releaseTag?: string; // Only present for github-release installs.
|
||||
ref?: string;
|
||||
autoUpdate?: boolean;
|
||||
allowPreRelease?: boolean;
|
||||
marketplace?: {
|
||||
marketplaceSource: string;
|
||||
pluginName: string;
|
||||
};
|
||||
}
|
||||
|
||||
export const DEFAULT_TRUNCATE_TOOL_OUTPUT_THRESHOLD = 25_000;
|
||||
@@ -309,14 +311,15 @@ export interface ConfigParameters {
|
||||
includeDirectories?: string[];
|
||||
bugCommand?: BugCommandSettings;
|
||||
model?: string;
|
||||
extensionContextFilePaths?: string[];
|
||||
outputLanguageFilePath?: string;
|
||||
maxSessionTurns?: number;
|
||||
sessionTokenLimit?: number;
|
||||
experimentalSkills?: boolean;
|
||||
experimentalZedIntegration?: boolean;
|
||||
listExtensions?: boolean;
|
||||
extensions?: GeminiCLIExtension[];
|
||||
blockedMcpServers?: Array<{ name: string; extensionName: string }>;
|
||||
overrideExtensions?: string[];
|
||||
allowedMcpServers?: string[];
|
||||
excludedMcpServers?: string[];
|
||||
noBrowser?: boolean;
|
||||
summarizeToolOutput?: Record<string, SummarizeToolOutputSettings>;
|
||||
folderTrustFeature?: boolean;
|
||||
@@ -331,6 +334,8 @@ export interface ConfigParameters {
|
||||
generationConfigSources?: ContentGeneratorConfigSources;
|
||||
cliVersion?: string;
|
||||
loadMemoryFromIncludeDirectories?: boolean;
|
||||
importFormat?: 'tree' | 'flat';
|
||||
discoveryMaxDirs?: number;
|
||||
chatRecording?: boolean;
|
||||
// Web search providers
|
||||
webSearch?: {
|
||||
@@ -349,7 +354,6 @@ export interface ConfigParameters {
|
||||
shouldUseNodePtyShell?: boolean;
|
||||
skipNextSpeakerCheck?: boolean;
|
||||
shellExecutionConfig?: ShellExecutionConfig;
|
||||
extensionManagement?: boolean;
|
||||
skipLoopDetection?: boolean;
|
||||
vlmSwitchMode?: string;
|
||||
truncateToolOutputThreshold?: number;
|
||||
@@ -404,6 +408,7 @@ export class Config {
|
||||
private toolRegistry!: ToolRegistry;
|
||||
private promptRegistry!: PromptRegistry;
|
||||
private subagentManager!: SubagentManager;
|
||||
private extensionManager!: ExtensionManager;
|
||||
private skillManager!: SkillManager;
|
||||
private fileSystemService: FileSystemService;
|
||||
private contentGeneratorConfig!: ContentGeneratorConfig;
|
||||
@@ -429,6 +434,8 @@ export class Config {
|
||||
private readonly toolCallCommand: string | undefined;
|
||||
private readonly mcpServerCommand: string | undefined;
|
||||
private mcpServers: Record<string, MCPServerConfig> | undefined;
|
||||
private readonly allowedMcpServers?: string[];
|
||||
private readonly excludedMcpServers?: string[];
|
||||
private sessionSubagents: SubagentConfig[];
|
||||
private userMemory: string;
|
||||
private sdkMode: boolean;
|
||||
@@ -451,11 +458,12 @@ export class Config {
|
||||
private gitService: GitService | undefined = undefined;
|
||||
private sessionService: SessionService | undefined = undefined;
|
||||
private chatRecordingService: ChatRecordingService | undefined = undefined;
|
||||
private extensionContextFilePaths: string[] = [];
|
||||
private readonly checkpointing: boolean;
|
||||
private readonly proxy: string | undefined;
|
||||
private readonly cwd: string;
|
||||
private readonly bugCommand: BugCommandSettings | undefined;
|
||||
private readonly extensionContextFilePaths: string[];
|
||||
private readonly outputLanguageFilePath?: string;
|
||||
private readonly noBrowser: boolean;
|
||||
private readonly folderTrustFeature: boolean;
|
||||
private readonly folderTrust: boolean;
|
||||
@@ -464,11 +472,8 @@ export class Config {
|
||||
private readonly maxSessionTurns: number;
|
||||
private readonly sessionTokenLimit: number;
|
||||
private readonly listExtensions: boolean;
|
||||
private readonly _extensions: GeminiCLIExtension[];
|
||||
private readonly _blockedMcpServers: Array<{
|
||||
name: string;
|
||||
extensionName: string;
|
||||
}>;
|
||||
private readonly overrideExtensions?: string[];
|
||||
|
||||
private readonly summarizeToolOutput:
|
||||
| Record<string, SummarizeToolOutputSettings>
|
||||
| undefined;
|
||||
@@ -477,6 +482,8 @@ export class Config {
|
||||
private readonly experimentalSkills: boolean = false;
|
||||
private readonly chatRecordingEnabled: boolean;
|
||||
private readonly loadMemoryFromIncludeDirectories: boolean = false;
|
||||
private readonly importFormat: 'tree' | 'flat';
|
||||
private readonly discoveryMaxDirs: number;
|
||||
private readonly webSearch?: {
|
||||
provider: Array<{
|
||||
type: 'tavily' | 'google' | 'dashscope';
|
||||
@@ -493,7 +500,6 @@ export class Config {
|
||||
private readonly shouldUseNodePtyShell: boolean;
|
||||
private readonly skipNextSpeakerCheck: boolean;
|
||||
private shellExecutionConfig: ShellExecutionConfig;
|
||||
private readonly extensionManagement: boolean = true;
|
||||
private readonly skipLoopDetection: boolean;
|
||||
private readonly skipStartupContext: boolean;
|
||||
private readonly vlmSwitchMode: string | undefined;
|
||||
@@ -534,6 +540,8 @@ export class Config {
|
||||
this.toolCallCommand = params.toolCallCommand;
|
||||
this.mcpServerCommand = params.mcpServerCommand;
|
||||
this.mcpServers = params.mcpServers;
|
||||
this.allowedMcpServers = params.allowedMcpServers;
|
||||
this.excludedMcpServers = params.excludedMcpServers;
|
||||
this.sessionSubagents = params.sessionSubagents ?? [];
|
||||
this.sdkMode = params.sdkMode ?? false;
|
||||
this.userMemory = params.userMemory ?? '';
|
||||
@@ -556,6 +564,7 @@ export class Config {
|
||||
email: 'qwen-coder@alibabacloud.com',
|
||||
};
|
||||
this.usageStatisticsEnabled = params.usageStatisticsEnabled ?? true;
|
||||
this.outputLanguageFilePath = params.outputLanguageFilePath;
|
||||
|
||||
this.fileFiltering = {
|
||||
respectGitIgnore: params.fileFiltering?.respectGitIgnore ?? true,
|
||||
@@ -569,15 +578,13 @@ export class Config {
|
||||
this.cwd = params.cwd ?? process.cwd();
|
||||
this.fileDiscoveryService = params.fileDiscoveryService ?? null;
|
||||
this.bugCommand = params.bugCommand;
|
||||
this.extensionContextFilePaths = params.extensionContextFilePaths ?? [];
|
||||
this.maxSessionTurns = params.maxSessionTurns ?? -1;
|
||||
this.sessionTokenLimit = params.sessionTokenLimit ?? -1;
|
||||
this.experimentalZedIntegration =
|
||||
params.experimentalZedIntegration ?? false;
|
||||
this.experimentalSkills = params.experimentalSkills ?? false;
|
||||
this.listExtensions = params.listExtensions ?? false;
|
||||
this._extensions = params.extensions ?? [];
|
||||
this._blockedMcpServers = params.blockedMcpServers ?? [];
|
||||
this.overrideExtensions = params.overrideExtensions;
|
||||
this.noBrowser = params.noBrowser ?? false;
|
||||
this.summarizeToolOutput = params.summarizeToolOutput;
|
||||
this.folderTrustFeature = params.folderTrustFeature ?? false;
|
||||
@@ -590,6 +597,8 @@ export class Config {
|
||||
|
||||
this.loadMemoryFromIncludeDirectories =
|
||||
params.loadMemoryFromIncludeDirectories ?? false;
|
||||
this.importFormat = params.importFormat ?? 'tree';
|
||||
this.discoveryMaxDirs = params.discoveryMaxDirs ?? 200;
|
||||
this.chatCompression = params.chatCompression;
|
||||
this.interactive = params.interactive ?? false;
|
||||
this.trustedFolder = params.trustedFolder;
|
||||
@@ -615,7 +624,6 @@ export class Config {
|
||||
params.truncateToolOutputLines ?? DEFAULT_TRUNCATE_TOOL_OUTPUT_LINES;
|
||||
this.enableToolOutputTruncation = params.enableToolOutputTruncation ?? true;
|
||||
this.useSmartEdit = params.useSmartEdit ?? false;
|
||||
this.extensionManagement = params.extensionManagement ?? true;
|
||||
this.channel = params.channel;
|
||||
this.storage = new Storage(this.targetDir);
|
||||
this.vlmSwitchMode = params.vlmSwitchMode;
|
||||
@@ -653,6 +661,11 @@ export class Config {
|
||||
this.chatRecordingService = this.chatRecordingEnabled
|
||||
? new ChatRecordingService(this)
|
||||
: undefined;
|
||||
this.extensionManager = new ExtensionManager({
|
||||
workspaceDir: this.targetDir,
|
||||
enabledExtensionOverrides: this.overrideExtensions,
|
||||
isWorkspaceTrusted: this.isTrustedFolder(),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -671,14 +684,28 @@ export class Config {
|
||||
await this.getGitService();
|
||||
}
|
||||
this.promptRegistry = new PromptRegistry();
|
||||
this.extensionManager.setConfig(this);
|
||||
await this.extensionManager.refreshCache();
|
||||
|
||||
this.subagentManager = new SubagentManager(this);
|
||||
this.skillManager = new SkillManager(this);
|
||||
await this.skillManager.startWatching();
|
||||
|
||||
// Load session subagents if they were provided before initialization
|
||||
if (this.sessionSubagents.length > 0) {
|
||||
this.subagentManager.loadSessionSubagents(this.sessionSubagents);
|
||||
}
|
||||
|
||||
await this.extensionManager.refreshCache();
|
||||
const activeExtensions = await this.extensionManager
|
||||
.getLoadedExtensions()
|
||||
.filter((e) => e.isActive);
|
||||
this.extensionContextFilePaths = activeExtensions.flatMap(
|
||||
(e) => e.contextFiles,
|
||||
);
|
||||
|
||||
await this.refreshHierarchicalMemory();
|
||||
|
||||
this.toolRegistry = await this.createToolRegistry(
|
||||
options?.sendSdkMcpMessage,
|
||||
);
|
||||
@@ -688,6 +715,24 @@ export class Config {
|
||||
logStartSession(this, new StartSessionEvent(this));
|
||||
}
|
||||
|
||||
async refreshHierarchicalMemory(): Promise<void> {
|
||||
const { memoryContent, fileCount } = await loadServerHierarchicalMemory(
|
||||
this.getWorkingDir(),
|
||||
this.shouldLoadMemoryFromIncludeDirectories()
|
||||
? this.getWorkspaceContext().getDirectories()
|
||||
: [],
|
||||
this.getDebugMode(),
|
||||
this.getFileService(),
|
||||
this.getExtensionContextFilePaths(),
|
||||
this.getFolderTrust(),
|
||||
this.getImportFormat(),
|
||||
this.getFileFilteringOptions(),
|
||||
this.getDiscoveryMaxDirs(),
|
||||
);
|
||||
this.setUserMemory(memoryContent);
|
||||
this.setGeminiMdFileCount(fileCount);
|
||||
}
|
||||
|
||||
getContentGenerator(): ContentGenerator {
|
||||
return this.contentGenerator;
|
||||
}
|
||||
@@ -773,6 +818,13 @@ export class Config {
|
||||
return this.sessionId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Releases resources owned by the config instance.
|
||||
*/
|
||||
async shutdown(): Promise<void> {
|
||||
this.skillManager?.stopWatching();
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a new session and resets session-scoped services.
|
||||
*/
|
||||
@@ -802,6 +854,14 @@ export class Config {
|
||||
return this.loadMemoryFromIncludeDirectories;
|
||||
}
|
||||
|
||||
getImportFormat(): 'tree' | 'flat' {
|
||||
return this.importFormat;
|
||||
}
|
||||
|
||||
getDiscoveryMaxDirs(): number {
|
||||
return this.discoveryMaxDirs;
|
||||
}
|
||||
|
||||
getContentGeneratorConfig(): ContentGeneratorConfig {
|
||||
return this.contentGeneratorConfig;
|
||||
}
|
||||
@@ -995,7 +1055,14 @@ export class Config {
|
||||
}
|
||||
|
||||
getExcludeTools(): string[] | undefined {
|
||||
return this.excludeTools;
|
||||
const allExcludeTools = new Set(this.excludeTools || []);
|
||||
const extensions = this.getActiveExtensions();
|
||||
for (const extension of extensions) {
|
||||
for (const tool of extension.config.excludeTools || []) {
|
||||
allExcludeTools.add(tool);
|
||||
}
|
||||
}
|
||||
return [...allExcludeTools];
|
||||
}
|
||||
|
||||
getToolDiscoveryCommand(): string | undefined {
|
||||
@@ -1011,7 +1078,37 @@ export class Config {
|
||||
}
|
||||
|
||||
getMcpServers(): Record<string, MCPServerConfig> | undefined {
|
||||
return this.mcpServers;
|
||||
let mcpServers = { ...(this.mcpServers || {}) };
|
||||
const extensions = this.getActiveExtensions();
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) return;
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if (this.allowedMcpServers) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(([key]) =>
|
||||
this.allowedMcpServers?.includes(key),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (this.excludedMcpServers) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(
|
||||
([key]) => !this.excludedMcpServers?.includes(key),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
addMcpServers(servers: Record<string, MCPServerConfig>): void {
|
||||
@@ -1190,7 +1287,10 @@ export class Config {
|
||||
}
|
||||
|
||||
getExtensionContextFilePaths(): string[] {
|
||||
return this.extensionContextFilePaths;
|
||||
return [
|
||||
...this.extensionContextFilePaths,
|
||||
...(this.outputLanguageFilePath ? [this.outputLanguageFilePath] : []),
|
||||
];
|
||||
}
|
||||
|
||||
getExperimentalZedIntegration(): boolean {
|
||||
@@ -1205,16 +1305,54 @@ export class Config {
|
||||
return this.listExtensions;
|
||||
}
|
||||
|
||||
getExtensionManagement(): boolean {
|
||||
return this.extensionManagement;
|
||||
getExtensionManager(): ExtensionManager {
|
||||
return this.extensionManager;
|
||||
}
|
||||
|
||||
getExtensions(): GeminiCLIExtension[] {
|
||||
return this._extensions;
|
||||
getExtensions(): Extension[] {
|
||||
const extensions = this.extensionManager.getLoadedExtensions();
|
||||
if (this.overrideExtensions) {
|
||||
return extensions.filter((e) =>
|
||||
this.overrideExtensions?.includes(e.name),
|
||||
);
|
||||
} else {
|
||||
return extensions;
|
||||
}
|
||||
}
|
||||
|
||||
getActiveExtensions(): Extension[] {
|
||||
return this.getExtensions().filter((e) => e.isActive);
|
||||
}
|
||||
|
||||
getBlockedMcpServers(): Array<{ name: string; extensionName: string }> {
|
||||
return this._blockedMcpServers;
|
||||
const mcpServers = { ...(this.mcpServers || {}) };
|
||||
const extensions = this.getActiveExtensions();
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) return;
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
const blockedMcpServers: Array<{ name: string; extensionName: string }> =
|
||||
[];
|
||||
|
||||
if (this.allowedMcpServers) {
|
||||
Object.entries(mcpServers).forEach(([key, server]) => {
|
||||
const isAllowed = this.allowedMcpServers?.includes(key);
|
||||
if (!isAllowed) {
|
||||
blockedMcpServers.push({
|
||||
name: key,
|
||||
extensionName: server.extensionName || '',
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
return blockedMcpServers;
|
||||
}
|
||||
|
||||
getNoBrowser(): boolean {
|
||||
|
||||
@@ -10,6 +10,7 @@ import type {
|
||||
GenerateContentParameters,
|
||||
} from '@google/genai';
|
||||
import { FinishReason, GenerateContentResponse } from '@google/genai';
|
||||
import type { ContentGeneratorConfig } from '../contentGenerator.js';
|
||||
|
||||
// Mock the request tokenizer module BEFORE importing the class that uses it.
|
||||
const mockTokenizer = {
|
||||
@@ -127,6 +128,32 @@ describe('AnthropicContentGenerator', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('merges customHeaders into defaultHeaders (does not replace defaults)', async () => {
|
||||
const { AnthropicContentGenerator } = await importGenerator();
|
||||
void new AnthropicContentGenerator(
|
||||
{
|
||||
model: 'claude-test',
|
||||
apiKey: 'test-key',
|
||||
baseUrl: 'https://example.invalid',
|
||||
timeout: 10_000,
|
||||
maxRetries: 2,
|
||||
samplingParams: {},
|
||||
schemaCompliance: 'auto',
|
||||
reasoning: { effort: 'medium' },
|
||||
customHeaders: {
|
||||
'X-Custom': '1',
|
||||
},
|
||||
} as unknown as Record<string, unknown> as ContentGeneratorConfig,
|
||||
mockConfig,
|
||||
);
|
||||
|
||||
const headers = (anthropicState.constructorOptions?.['defaultHeaders'] ||
|
||||
{}) as Record<string, string>;
|
||||
expect(headers['User-Agent']).toContain('QwenCode/1.2.3');
|
||||
expect(headers['anthropic-beta']).toContain('effort-2025-11-24');
|
||||
expect(headers['X-Custom']).toBe('1');
|
||||
});
|
||||
|
||||
it('adds the effort beta header when reasoning.effort is set', async () => {
|
||||
const { AnthropicContentGenerator } = await importGenerator();
|
||||
void new AnthropicContentGenerator(
|
||||
|
||||
@@ -141,6 +141,7 @@ export class AnthropicContentGenerator implements ContentGenerator {
|
||||
private buildHeaders(): Record<string, string> {
|
||||
const version = this.cliConfig.getCliVersion() || 'unknown';
|
||||
const userAgent = `QwenCode/${version} (${process.platform}; ${process.arch})`;
|
||||
const { customHeaders } = this.contentGeneratorConfig;
|
||||
|
||||
const betas: string[] = [];
|
||||
const reasoning = this.contentGeneratorConfig.reasoning;
|
||||
@@ -163,7 +164,7 @@ export class AnthropicContentGenerator implements ContentGenerator {
|
||||
headers['anthropic-beta'] = betas.join(',');
|
||||
}
|
||||
|
||||
return headers;
|
||||
return customHeaders ? { ...headers, ...customHeaders } : headers;
|
||||
}
|
||||
|
||||
private async buildRequest(
|
||||
|
||||
@@ -91,6 +91,8 @@ export type ContentGeneratorConfig = {
|
||||
userAgent?: string;
|
||||
// Schema compliance mode for tool definitions
|
||||
schemaCompliance?: 'auto' | 'openapi_30';
|
||||
// Custom HTTP headers to be sent with requests
|
||||
customHeaders?: Record<string, string>;
|
||||
};
|
||||
|
||||
// Keep the public ContentGeneratorConfigSources API, but reuse the generic
|
||||
|
||||
@@ -39,6 +39,41 @@ describe('GeminiContentGenerator', () => {
|
||||
mockGoogleGenAI = vi.mocked(GoogleGenAI).mock.results[0].value;
|
||||
});
|
||||
|
||||
it('should merge customHeaders into existing httpOptions.headers', async () => {
|
||||
vi.mocked(GoogleGenAI).mockClear();
|
||||
|
||||
void new GeminiContentGenerator(
|
||||
{
|
||||
apiKey: 'test-api-key',
|
||||
httpOptions: {
|
||||
headers: {
|
||||
'X-Base': 'base',
|
||||
'X-Override': 'base',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
customHeaders: {
|
||||
'X-Custom': 'custom',
|
||||
'X-Override': 'custom',
|
||||
},
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} as any,
|
||||
);
|
||||
|
||||
expect(vi.mocked(GoogleGenAI)).toHaveBeenCalledTimes(1);
|
||||
expect(vi.mocked(GoogleGenAI)).toHaveBeenCalledWith({
|
||||
apiKey: 'test-api-key',
|
||||
httpOptions: {
|
||||
headers: {
|
||||
'X-Base': 'base',
|
||||
'X-Custom': 'custom',
|
||||
'X-Override': 'custom',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call generateContent on the underlying model', async () => {
|
||||
const request = { model: 'gemini-1.5-flash', contents: [] };
|
||||
const expectedResponse = { responseId: 'test-id' };
|
||||
|
||||
@@ -35,7 +35,26 @@ export class GeminiContentGenerator implements ContentGenerator {
|
||||
},
|
||||
contentGeneratorConfig?: ContentGeneratorConfig,
|
||||
) {
|
||||
this.googleGenAI = new GoogleGenAI(options);
|
||||
const customHeaders = contentGeneratorConfig?.customHeaders;
|
||||
const finalOptions = customHeaders
|
||||
? (() => {
|
||||
const baseHttpOptions = options.httpOptions;
|
||||
const baseHeaders = baseHttpOptions?.headers ?? {};
|
||||
|
||||
return {
|
||||
...options,
|
||||
httpOptions: {
|
||||
...(baseHttpOptions ?? {}),
|
||||
headers: {
|
||||
...baseHeaders,
|
||||
...customHeaders,
|
||||
},
|
||||
},
|
||||
};
|
||||
})()
|
||||
: options;
|
||||
|
||||
this.googleGenAI = new GoogleGenAI(finalOptions);
|
||||
this.contentGeneratorConfig = contentGeneratorConfig;
|
||||
}
|
||||
|
||||
|
||||
@@ -142,6 +142,27 @@ describe('DashScopeOpenAICompatibleProvider', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge custom headers with DashScope defaults', () => {
|
||||
const providerWithCustomHeaders = new DashScopeOpenAICompatibleProvider(
|
||||
{
|
||||
...mockContentGeneratorConfig,
|
||||
customHeaders: {
|
||||
'X-Custom': '1',
|
||||
'X-DashScope-CacheControl': 'disable',
|
||||
},
|
||||
} as ContentGeneratorConfig,
|
||||
mockCliConfig,
|
||||
);
|
||||
|
||||
const headers = providerWithCustomHeaders.buildHeaders();
|
||||
|
||||
expect(headers['User-Agent']).toContain('QwenCode/1.0.0');
|
||||
expect(headers['X-DashScope-UserAgent']).toContain('QwenCode/1.0.0');
|
||||
expect(headers['X-DashScope-AuthType']).toBe(AuthType.QWEN_OAUTH);
|
||||
expect(headers['X-Custom']).toBe('1');
|
||||
expect(headers['X-DashScope-CacheControl']).toBe('disable');
|
||||
});
|
||||
|
||||
it('should handle unknown CLI version', () => {
|
||||
(
|
||||
mockCliConfig.getCliVersion as MockedFunction<
|
||||
|
||||
@@ -47,13 +47,17 @@ export class DashScopeOpenAICompatibleProvider
|
||||
buildHeaders(): Record<string, string | undefined> {
|
||||
const version = this.cliConfig.getCliVersion() || 'unknown';
|
||||
const userAgent = `QwenCode/${version} (${process.platform}; ${process.arch})`;
|
||||
const { authType } = this.contentGeneratorConfig;
|
||||
return {
|
||||
const { authType, customHeaders } = this.contentGeneratorConfig;
|
||||
const defaultHeaders = {
|
||||
'User-Agent': userAgent,
|
||||
'X-DashScope-CacheControl': 'enable',
|
||||
'X-DashScope-UserAgent': userAgent,
|
||||
'X-DashScope-AuthType': authType,
|
||||
};
|
||||
|
||||
return customHeaders
|
||||
? { ...defaultHeaders, ...customHeaders }
|
||||
: defaultHeaders;
|
||||
}
|
||||
|
||||
buildClient(): OpenAI {
|
||||
|
||||
@@ -73,6 +73,26 @@ describe('DefaultOpenAICompatibleProvider', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge customHeaders with defaults (and allow overrides)', () => {
|
||||
const providerWithCustomHeaders = new DefaultOpenAICompatibleProvider(
|
||||
{
|
||||
...mockContentGeneratorConfig,
|
||||
customHeaders: {
|
||||
'X-Custom': '1',
|
||||
'User-Agent': 'custom-agent',
|
||||
},
|
||||
} as ContentGeneratorConfig,
|
||||
mockCliConfig,
|
||||
);
|
||||
|
||||
const headers = providerWithCustomHeaders.buildHeaders();
|
||||
|
||||
expect(headers).toEqual({
|
||||
'User-Agent': 'custom-agent',
|
||||
'X-Custom': '1',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle unknown CLI version', () => {
|
||||
(
|
||||
mockCliConfig.getCliVersion as MockedFunction<
|
||||
|
||||
@@ -25,9 +25,14 @@ export class DefaultOpenAICompatibleProvider
|
||||
buildHeaders(): Record<string, string | undefined> {
|
||||
const version = this.cliConfig.getCliVersion() || 'unknown';
|
||||
const userAgent = `QwenCode/${version} (${process.platform}; ${process.arch})`;
|
||||
return {
|
||||
const { customHeaders } = this.contentGeneratorConfig;
|
||||
const defaultHeaders = {
|
||||
'User-Agent': userAgent,
|
||||
};
|
||||
|
||||
return customHeaders
|
||||
? { ...defaultHeaders, ...customHeaders }
|
||||
: defaultHeaders;
|
||||
}
|
||||
|
||||
buildClient(): OpenAI {
|
||||
|
||||
121
packages/core/src/extension/claude-converter.test.ts
Normal file
121
packages/core/src/extension/claude-converter.test.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
convertClaudeToQwenConfig,
|
||||
mergeClaudeConfigs,
|
||||
isClaudePluginConfig,
|
||||
type ClaudePluginConfig,
|
||||
type ClaudeMarketplacePluginConfig,
|
||||
} from './claude-converter.js';
|
||||
|
||||
describe('convertClaudeToQwenConfig', () => {
|
||||
it('should convert basic Claude config', () => {
|
||||
const claudeConfig: ClaudePluginConfig = {
|
||||
name: 'claude-plugin',
|
||||
version: '1.0.0',
|
||||
};
|
||||
|
||||
const result = convertClaudeToQwenConfig(claudeConfig);
|
||||
|
||||
expect(result.name).toBe('claude-plugin');
|
||||
expect(result.version).toBe('1.0.0');
|
||||
});
|
||||
|
||||
it('should convert config with basic fields only', () => {
|
||||
const claudeConfig: ClaudePluginConfig = {
|
||||
name: 'full-plugin',
|
||||
version: '1.0.0',
|
||||
commands: 'commands',
|
||||
agents: ['agents/agent1.md'],
|
||||
skills: ['skills/skill1'],
|
||||
};
|
||||
|
||||
const result = convertClaudeToQwenConfig(claudeConfig);
|
||||
|
||||
// Commands, skills, agents are collected as directories, not in config
|
||||
expect(result.name).toBe('full-plugin');
|
||||
expect(result.version).toBe('1.0.0');
|
||||
expect(result.mcpServers).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should throw error for missing name', () => {
|
||||
const invalidConfig = {
|
||||
version: '1.0.0',
|
||||
} as ClaudePluginConfig;
|
||||
|
||||
expect(() => convertClaudeToQwenConfig(invalidConfig)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeClaudeConfigs', () => {
|
||||
it('should merge marketplace and plugin configs', () => {
|
||||
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
|
||||
name: 'marketplace-name',
|
||||
version: '2.0.0',
|
||||
source: 'github:org/repo',
|
||||
description: 'From marketplace',
|
||||
};
|
||||
|
||||
const pluginConfig: ClaudePluginConfig = {
|
||||
name: 'plugin-name',
|
||||
version: '1.0.0',
|
||||
commands: 'commands',
|
||||
};
|
||||
|
||||
const merged = mergeClaudeConfigs(marketplacePlugin, pluginConfig);
|
||||
|
||||
// Marketplace takes precedence
|
||||
expect(merged.name).toBe('marketplace-name');
|
||||
expect(merged.version).toBe('2.0.0');
|
||||
expect(merged.description).toBe('From marketplace');
|
||||
// Plugin fields preserved
|
||||
expect(merged.commands).toBe('commands');
|
||||
});
|
||||
|
||||
it('should work with strict=false and no plugin config', () => {
|
||||
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
|
||||
name: 'standalone',
|
||||
version: '1.0.0',
|
||||
source: 'local',
|
||||
strict: false,
|
||||
commands: 'commands',
|
||||
};
|
||||
|
||||
const merged = mergeClaudeConfigs(marketplacePlugin);
|
||||
|
||||
expect(merged.name).toBe('standalone');
|
||||
expect(merged.commands).toBe('commands');
|
||||
});
|
||||
|
||||
it('should throw error for strict mode without plugin config', () => {
|
||||
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
|
||||
name: 'strict-plugin',
|
||||
version: '1.0.0',
|
||||
source: 'github:org/repo',
|
||||
strict: true,
|
||||
};
|
||||
|
||||
expect(() => mergeClaudeConfigs(marketplacePlugin)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('isClaudePluginConfig', () => {
|
||||
it('should identify Claude plugin directory', () => {
|
||||
const extensionDir = '/tmp/test-extension';
|
||||
const marketplace = {
|
||||
marketplaceSource: 'https://test.com',
|
||||
pluginName: 'test-plugin',
|
||||
};
|
||||
|
||||
// This will check if marketplace.json exists and contains the plugin
|
||||
// Note: In real usage, this requires actual file system setup
|
||||
expect(typeof isClaudePluginConfig(extensionDir, marketplace)).toBe(
|
||||
'boolean',
|
||||
);
|
||||
});
|
||||
});
|
||||
745
packages/core/src/extension/claude-converter.ts
Normal file
745
packages/core/src/extension/claude-converter.ts
Normal file
@@ -0,0 +1,745 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Converter for Claude Code plugins to Qwen Code format.
|
||||
*/
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import type { ExtensionConfig } from './extensionManager.js';
|
||||
import { ExtensionStorage } from './storage.js';
|
||||
import type {
|
||||
ExtensionInstallMetadata,
|
||||
MCPServerConfig,
|
||||
} from '../config/config.js';
|
||||
import { cloneFromGit, downloadFromGitHubRelease } from './github.js';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { copyDirectory } from './gemini-converter.js';
|
||||
import {
|
||||
parse as parseYaml,
|
||||
stringify as stringifyYaml,
|
||||
} from '../utils/yaml-parser.js';
|
||||
|
||||
export interface ClaudePluginConfig {
|
||||
name: string;
|
||||
version: string;
|
||||
description?: string;
|
||||
author?: { name?: string; email?: string; url?: string };
|
||||
homepage?: string;
|
||||
repository?: string;
|
||||
license?: string;
|
||||
keywords?: string[];
|
||||
commands?: string | string[];
|
||||
agents?: string | string[];
|
||||
skills?: string | string[];
|
||||
hooks?: string;
|
||||
mcpServers?: string | Record<string, MCPServerConfig>;
|
||||
outputStyles?: string | string[];
|
||||
lspServers?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Claude Code subagent configuration format.
|
||||
* Based on https://code.claude.com/docs/en/sub-agents
|
||||
*/
|
||||
export interface ClaudeAgentConfig {
|
||||
/** Unique identifier using lowercase letters and hyphens */
|
||||
name: string;
|
||||
/** When Claude should delegate to this subagent */
|
||||
description: string;
|
||||
/** Tools the subagent can use. Inherits all tools if omitted */
|
||||
tools?: string[];
|
||||
/** Tools to deny, removed from inherited or specified list */
|
||||
disallowedTools?: string[];
|
||||
/** Model to use: sonnet, opus, haiku, or inherit */
|
||||
model?: string;
|
||||
/** Permission mode: default, acceptEdits, dontAsk, bypassPermissions, or plan */
|
||||
permissionMode?: string;
|
||||
/** Skills to load into the subagent's context at startup */
|
||||
skills?: string[];
|
||||
/** Hooks configuration */
|
||||
hooks?: unknown;
|
||||
/** System prompt content */
|
||||
systemPrompt?: string;
|
||||
/** subagent color */
|
||||
color?: string;
|
||||
}
|
||||
|
||||
export type ClaudePluginSource =
|
||||
| { source: 'github'; repo: string }
|
||||
| { source: 'url'; url: string };
|
||||
|
||||
export interface ClaudeMarketplacePluginConfig extends ClaudePluginConfig {
|
||||
source: string | ClaudePluginSource;
|
||||
category?: string;
|
||||
strict?: boolean;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
export interface ClaudeMarketplaceConfig {
|
||||
name: string;
|
||||
owner: { name: string; email: string };
|
||||
plugins: ClaudeMarketplacePluginConfig[];
|
||||
metadata?: { description?: string; version?: string; pluginRoot?: string };
|
||||
}
|
||||
|
||||
const CLAUDE_TOOLS_MAPPING: Record<string, string | string[]> = {
|
||||
AskUserQuestion: 'None',
|
||||
Bash: 'Shell',
|
||||
BashOutput: 'None',
|
||||
Edit: 'Edit',
|
||||
ExitPlanMode: 'ExitPlanMode',
|
||||
Glob: 'Glob',
|
||||
Grep: 'Grep',
|
||||
KillShell: 'None',
|
||||
NotebookEdit: 'None',
|
||||
Read: ['ReadFile', 'ReadManyFiles'],
|
||||
Skill: 'Skill',
|
||||
Task: 'Task',
|
||||
TodoWrite: 'TodoWrite',
|
||||
WebFetch: 'WebFetch',
|
||||
WebSearch: 'WebSearch',
|
||||
Write: 'WriteFile',
|
||||
LS: 'ListFiles',
|
||||
};
|
||||
|
||||
const claudeBuildInToolsTransform = (tools: string[]): string[] => {
|
||||
const transformedTools: string[] = [];
|
||||
tools.forEach((tool) => {
|
||||
if (!CLAUDE_TOOLS_MAPPING[tool]) {
|
||||
transformedTools.push(tool);
|
||||
} else {
|
||||
if (CLAUDE_TOOLS_MAPPING[tool] === 'None') {
|
||||
return;
|
||||
} else if (Array.isArray(CLAUDE_TOOLS_MAPPING[tool])) {
|
||||
transformedTools.push(...CLAUDE_TOOLS_MAPPING[tool]);
|
||||
} else {
|
||||
transformedTools.push(CLAUDE_TOOLS_MAPPING[tool]);
|
||||
}
|
||||
}
|
||||
});
|
||||
return transformedTools;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses a value that can be either a comma-separated string or an array.
|
||||
* Claude agent config can have tools like 'Glob, Grep, Read' or ['Glob', 'Grep', 'Read']
|
||||
* @param value The value to parse
|
||||
* @returns Array of strings or undefined
|
||||
*/
|
||||
function parseStringOrArray(value: unknown): string[] | undefined {
|
||||
if (value === undefined || value === null) {
|
||||
return undefined;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(String);
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
// Split by comma and trim whitespace
|
||||
return value
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter((s) => s.length > 0);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Claude agent config to Qwen Code subagent format.
|
||||
* @param claudeAgent Claude agent configuration
|
||||
* @returns Converted agent config compatible with Qwen Code SubagentConfig
|
||||
*/
|
||||
export function convertClaudeAgentConfig(
|
||||
claudeAgent: ClaudeAgentConfig,
|
||||
): Record<string, unknown> {
|
||||
// Base config with required fields
|
||||
const qwenAgent: Record<string, unknown> = {
|
||||
name: claudeAgent.name,
|
||||
description: claudeAgent.description,
|
||||
};
|
||||
|
||||
if (claudeAgent.color) {
|
||||
qwenAgent['color'] = claudeAgent.color;
|
||||
}
|
||||
|
||||
// Convert system prompt if present
|
||||
if (claudeAgent.systemPrompt) {
|
||||
qwenAgent['systemPrompt'] = claudeAgent.systemPrompt;
|
||||
}
|
||||
|
||||
// Convert tools using claudeBuildInToolsTransform
|
||||
if (claudeAgent.tools && claudeAgent.tools.length > 0) {
|
||||
qwenAgent['tools'] = claudeBuildInToolsTransform(claudeAgent.tools);
|
||||
}
|
||||
|
||||
// Convert model to modelConfig
|
||||
if (claudeAgent.model) {
|
||||
// Map Claude model names to Qwen model config
|
||||
// Claude uses: sonnet, opus, haiku, inherit
|
||||
// We preserve the model name for now, the actual mapping will be handled at runtime
|
||||
qwenAgent['modelConfig'] = {
|
||||
model: claudeAgent.model === 'inherit' ? undefined : claudeAgent.model,
|
||||
};
|
||||
}
|
||||
|
||||
// Preserve unsupported fields as-is for potential future compatibility
|
||||
// These fields are not supported by Qwen Code SubagentConfig but we keep them
|
||||
if (claudeAgent.permissionMode) {
|
||||
qwenAgent['permissionMode'] = claudeAgent.permissionMode;
|
||||
}
|
||||
if (claudeAgent.hooks) {
|
||||
qwenAgent['hooks'] = claudeAgent.hooks;
|
||||
}
|
||||
if (claudeAgent.skills && claudeAgent.skills.length > 0) {
|
||||
qwenAgent['skills'] = claudeAgent.skills;
|
||||
}
|
||||
if (claudeAgent.disallowedTools && claudeAgent.disallowedTools.length > 0) {
|
||||
qwenAgent['disallowedTools'] = claudeAgent.disallowedTools;
|
||||
}
|
||||
|
||||
return qwenAgent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts all agent files in a directory from Claude format to Qwen format.
|
||||
* Parses the YAML frontmatter, converts the configuration, and writes back.
|
||||
* @param agentsDir Directory containing agent markdown files
|
||||
*/
|
||||
async function convertAgentFiles(agentsDir: string): Promise<void> {
|
||||
if (!fs.existsSync(agentsDir)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const files = await fs.promises.readdir(agentsDir);
|
||||
|
||||
for (const file of files) {
|
||||
if (!file.endsWith('.md')) continue;
|
||||
|
||||
const filePath = path.join(agentsDir, file);
|
||||
|
||||
try {
|
||||
const content = await fs.promises.readFile(filePath, 'utf-8');
|
||||
|
||||
// Parse frontmatter
|
||||
const frontmatterRegex = /^---\n([\s\S]*?)\n---\n([\s\S]*)$/;
|
||||
const match = content.match(frontmatterRegex);
|
||||
|
||||
if (!match) {
|
||||
// No frontmatter, skip this file
|
||||
continue;
|
||||
}
|
||||
|
||||
const [, frontmatterYaml, body] = match;
|
||||
const frontmatter = parseYaml(frontmatterYaml) as Record<string, unknown>;
|
||||
|
||||
// Build Claude agent config from frontmatter
|
||||
// Note: Claude tools/disallowedTools/skills can be comma-separated strings like 'Glob, Grep, Read'
|
||||
const claudeAgent: ClaudeAgentConfig = {
|
||||
name: String(frontmatter['name'] || ''),
|
||||
description: String(frontmatter['description'] || ''),
|
||||
tools: parseStringOrArray(frontmatter['tools']),
|
||||
disallowedTools: parseStringOrArray(frontmatter['disallowedTools']),
|
||||
model: frontmatter['model'] as string | undefined,
|
||||
permissionMode: frontmatter['permissionMode'] as string | undefined,
|
||||
skills: parseStringOrArray(frontmatter['skills']),
|
||||
hooks: frontmatter['hooks'],
|
||||
color: frontmatter['color'] as string | undefined,
|
||||
systemPrompt: body.trim(),
|
||||
};
|
||||
|
||||
// Convert to Qwen format
|
||||
const qwenAgent = convertClaudeAgentConfig(claudeAgent);
|
||||
|
||||
// Build new frontmatter (excluding systemPrompt as it goes in body)
|
||||
const newFrontmatter: Record<string, unknown> = {};
|
||||
for (const [key, value] of Object.entries(qwenAgent)) {
|
||||
if (key !== 'systemPrompt' && value !== undefined) {
|
||||
newFrontmatter[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// Write converted content back
|
||||
const newYaml = stringifyYaml(newFrontmatter);
|
||||
const systemPrompt = (qwenAgent['systemPrompt'] as string) || body.trim();
|
||||
const newContent = `---
|
||||
${newYaml}
|
||||
---
|
||||
|
||||
${systemPrompt}
|
||||
`;
|
||||
|
||||
await fs.promises.writeFile(filePath, newContent, 'utf-8');
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`[Claude Converter] Failed to convert agent file ${filePath}: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Claude plugin config to Qwen Code format.
|
||||
* @param claudeConfig Claude plugin configuration
|
||||
* @returns Qwen ExtensionConfig
|
||||
*/
|
||||
export function convertClaudeToQwenConfig(
|
||||
claudeConfig: ClaudePluginConfig,
|
||||
): ExtensionConfig {
|
||||
// Validate required fields
|
||||
if (!claudeConfig.name || !claudeConfig.version) {
|
||||
throw new Error('Claude plugin config must have name and version fields');
|
||||
}
|
||||
|
||||
// Parse MCP servers
|
||||
let mcpServers: Record<string, MCPServerConfig> | undefined;
|
||||
if (claudeConfig.mcpServers) {
|
||||
if (typeof claudeConfig.mcpServers === 'string') {
|
||||
// TODO: Load from file path
|
||||
console.warn(
|
||||
`[Claude Converter] MCP servers path not yet supported: ${claudeConfig.mcpServers}`,
|
||||
);
|
||||
} else {
|
||||
mcpServers = claudeConfig.mcpServers;
|
||||
}
|
||||
}
|
||||
|
||||
// Warn about unsupported fields
|
||||
if (claudeConfig.hooks) {
|
||||
console.warn(
|
||||
`[Claude Converter] Hooks are not yet supported in ${claudeConfig.name}`,
|
||||
);
|
||||
}
|
||||
if (claudeConfig.outputStyles) {
|
||||
console.warn(
|
||||
`[Claude Converter] Output styles are not yet supported in ${claudeConfig.name}`,
|
||||
);
|
||||
}
|
||||
if (claudeConfig.lspServers) {
|
||||
console.warn(
|
||||
`[Claude Converter] LSP servers are not yet supported in ${claudeConfig.name}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Direct field mapping - commands, skills, agents will be collected as folders
|
||||
return {
|
||||
name: claudeConfig.name,
|
||||
version: claudeConfig.version,
|
||||
mcpServers,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a complete Claude plugin package to Qwen Code format.
|
||||
* Creates a new temporary directory with:
|
||||
* 1. Converted qwen-extension.json
|
||||
* 2. Commands, skills, and agents collected to respective folders
|
||||
* 3. MCP servers resolved from JSON files if needed
|
||||
* 4. All other files preserved
|
||||
*/
|
||||
export async function convertClaudePluginPackage(
|
||||
extensionDir: string,
|
||||
pluginName: string,
|
||||
): Promise<{ config: ExtensionConfig; convertedDir: string }> {
|
||||
// Step 1: Load marketplace.json
|
||||
const marketplaceJsonPath = path.join(
|
||||
extensionDir,
|
||||
'.claude-plugin',
|
||||
'marketplace.json',
|
||||
);
|
||||
if (!fs.existsSync(marketplaceJsonPath)) {
|
||||
throw new Error(
|
||||
`Marketplace configuration not found at ${marketplaceJsonPath}`,
|
||||
);
|
||||
}
|
||||
|
||||
const marketplaceContent = fs.readFileSync(marketplaceJsonPath, 'utf-8');
|
||||
const marketplaceConfig: ClaudeMarketplaceConfig =
|
||||
JSON.parse(marketplaceContent);
|
||||
|
||||
// Find the target plugin in marketplace
|
||||
const marketplacePlugin = marketplaceConfig.plugins.find(
|
||||
(p) => p.name === pluginName,
|
||||
);
|
||||
if (!marketplacePlugin) {
|
||||
throw new Error(`Plugin ${pluginName} not found in marketplace.json`);
|
||||
}
|
||||
|
||||
// Step 2: Resolve plugin source directory based on source field
|
||||
const pluginDir = path.join(
|
||||
extensionDir,
|
||||
`plugin${createHash('sha256').update(`${extensionDir}/${pluginName}`).digest('hex')}`,
|
||||
);
|
||||
await fs.promises.mkdir(pluginDir, { recursive: true });
|
||||
|
||||
const pluginSource = await resolvePluginSource(
|
||||
marketplacePlugin,
|
||||
extensionDir,
|
||||
pluginDir,
|
||||
);
|
||||
|
||||
if (!fs.existsSync(pluginSource)) {
|
||||
throw new Error(`Plugin source directory not found: ${pluginSource}`);
|
||||
}
|
||||
|
||||
// Step 3: Load and merge plugin.json if exists (based on strict mode)
|
||||
const strict = marketplacePlugin.strict ?? true;
|
||||
let mergedConfig: ClaudePluginConfig;
|
||||
|
||||
if (strict) {
|
||||
const pluginJsonPath = path.join(
|
||||
pluginSource,
|
||||
'.claude-plugin',
|
||||
'plugin.json',
|
||||
);
|
||||
if (!fs.existsSync(pluginJsonPath)) {
|
||||
throw new Error(`Strict mode requires plugin.json at ${pluginJsonPath}`);
|
||||
}
|
||||
const pluginContent = fs.readFileSync(pluginJsonPath, 'utf-8');
|
||||
const pluginConfig: ClaudePluginConfig = JSON.parse(pluginContent);
|
||||
mergedConfig = mergeClaudeConfigs(marketplacePlugin, pluginConfig);
|
||||
} else {
|
||||
mergedConfig = marketplacePlugin as ClaudePluginConfig;
|
||||
}
|
||||
|
||||
// Step 4: Resolve MCP servers from JSON files if needed
|
||||
if (mergedConfig.mcpServers && typeof mergedConfig.mcpServers === 'string') {
|
||||
const mcpServersPath = path.isAbsolute(mergedConfig.mcpServers)
|
||||
? mergedConfig.mcpServers
|
||||
: path.join(pluginSource, mergedConfig.mcpServers);
|
||||
|
||||
if (fs.existsSync(mcpServersPath)) {
|
||||
try {
|
||||
const mcpContent = fs.readFileSync(mcpServersPath, 'utf-8');
|
||||
mergedConfig.mcpServers = JSON.parse(mcpContent) as Record<
|
||||
string,
|
||||
MCPServerConfig
|
||||
>;
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`Failed to parse MCP servers file ${mcpServersPath}: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 5: Create temporary directory for converted extension
|
||||
const tmpDir = await ExtensionStorage.createTmpDir();
|
||||
|
||||
try {
|
||||
// Step 6: Copy plugin files to temporary directory
|
||||
await copyDirectory(pluginSource, tmpDir);
|
||||
|
||||
// Step 7: Collect commands to commands folder
|
||||
if (mergedConfig.commands) {
|
||||
const commandsDestDir = path.join(tmpDir, 'commands');
|
||||
await collectResources(
|
||||
mergedConfig.commands,
|
||||
pluginSource,
|
||||
commandsDestDir,
|
||||
);
|
||||
}
|
||||
|
||||
// Step 8: Collect skills to skills folder
|
||||
if (mergedConfig.skills) {
|
||||
const skillsDestDir = path.join(tmpDir, 'skills');
|
||||
await collectResources(mergedConfig.skills, pluginSource, skillsDestDir);
|
||||
}
|
||||
|
||||
// Step 9: Collect agents to agents folder
|
||||
const agentsDestDir = path.join(tmpDir, 'agents');
|
||||
if (mergedConfig.agents) {
|
||||
await collectResources(mergedConfig.agents, pluginSource, agentsDestDir);
|
||||
}
|
||||
// Step 9.1: Convert collected agent files from Claude format to Qwen format
|
||||
await convertAgentFiles(agentsDestDir);
|
||||
|
||||
// Step 10: Convert to Qwen format config
|
||||
const qwenConfig = convertClaudeToQwenConfig(mergedConfig);
|
||||
|
||||
// Step 11: Write qwen-extension.json
|
||||
const qwenConfigPath = path.join(tmpDir, 'qwen-extension.json');
|
||||
fs.writeFileSync(
|
||||
qwenConfigPath,
|
||||
JSON.stringify(qwenConfig, null, 2),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
return {
|
||||
config: qwenConfig,
|
||||
convertedDir: tmpDir,
|
||||
};
|
||||
} catch (error) {
|
||||
// Clean up temporary directory on error
|
||||
try {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects resources (commands, skills, agents) to a destination folder.
|
||||
* If a resource is already in the destination folder, it will be skipped.
|
||||
* @param resourcePaths String or array of resource paths
|
||||
* @param pluginRoot Root directory of the plugin
|
||||
* @param destDir Destination directory for collected resources
|
||||
*/
|
||||
async function collectResources(
|
||||
resourcePaths: string | string[],
|
||||
pluginRoot: string,
|
||||
destDir: string,
|
||||
): Promise<void> {
|
||||
const paths = Array.isArray(resourcePaths) ? resourcePaths : [resourcePaths];
|
||||
|
||||
// Create destination directory
|
||||
if (!fs.existsSync(destDir)) {
|
||||
fs.mkdirSync(destDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Get the destination folder name (e.g., 'commands', 'skills', 'agents')
|
||||
const destFolderName = path.basename(destDir);
|
||||
|
||||
for (const resourcePath of paths) {
|
||||
const resolvedPath = path.isAbsolute(resourcePath)
|
||||
? resourcePath
|
||||
: path.join(pluginRoot, resourcePath);
|
||||
|
||||
if (!fs.existsSync(resolvedPath)) {
|
||||
console.warn(`Resource path not found: ${resolvedPath}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const stat = fs.statSync(resolvedPath);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
// If it's a directory, check if it's already the destination folder
|
||||
const dirName = path.basename(resolvedPath);
|
||||
const parentDir = path.dirname(resolvedPath);
|
||||
|
||||
// If the directory is already named as the destination folder (e.g., 'commands')
|
||||
// and it's at the plugin root level, skip it
|
||||
if (dirName === destFolderName && parentDir === pluginRoot) {
|
||||
console.log(
|
||||
`Skipping ${resolvedPath} as it's already in the correct location`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Copy all files from the directory
|
||||
const files = await glob('**/*', {
|
||||
cwd: resolvedPath,
|
||||
nodir: true,
|
||||
dot: false,
|
||||
});
|
||||
|
||||
for (const file of files) {
|
||||
const srcFile = path.join(resolvedPath, file);
|
||||
const destFile = path.join(destDir, file);
|
||||
|
||||
// Ensure parent directory exists
|
||||
const destFileDir = path.dirname(destFile);
|
||||
if (!fs.existsSync(destFileDir)) {
|
||||
fs.mkdirSync(destFileDir, { recursive: true });
|
||||
}
|
||||
|
||||
fs.copyFileSync(srcFile, destFile);
|
||||
}
|
||||
} else {
|
||||
// If it's a file, check if it's already in the destination folder
|
||||
const relativePath = path.relative(pluginRoot, resolvedPath);
|
||||
|
||||
// Check if the file path starts with the destination folder name
|
||||
// e.g., 'commands/test1.md' or 'commands/me/test.md' should be skipped
|
||||
const segments = relativePath.split(path.sep);
|
||||
if (segments.length > 0 && segments[0] === destFolderName) {
|
||||
console.log(
|
||||
`Skipping ${resolvedPath} as it's already in ${destFolderName}/`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Copy the file to destination
|
||||
const fileName = path.basename(resolvedPath);
|
||||
const destFile = path.join(destDir, fileName);
|
||||
fs.copyFileSync(resolvedPath, destFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges marketplace plugin config with the actual plugin.json config.
|
||||
* Marketplace config takes precedence for conflicting fields.
|
||||
* @param marketplacePlugin Marketplace plugin definition
|
||||
* @param pluginConfig Actual plugin.json config (optional if strict=false)
|
||||
* @returns Merged Claude plugin config
|
||||
*/
|
||||
export function mergeClaudeConfigs(
|
||||
marketplacePlugin: ClaudeMarketplacePluginConfig,
|
||||
pluginConfig?: ClaudePluginConfig,
|
||||
): ClaudePluginConfig {
|
||||
if (!pluginConfig && marketplacePlugin.strict !== false) {
|
||||
throw new Error(
|
||||
`Plugin ${marketplacePlugin.name} requires plugin.json (strict mode)`,
|
||||
);
|
||||
}
|
||||
|
||||
// Start with plugin.json config (if exists)
|
||||
const merged: ClaudePluginConfig = pluginConfig
|
||||
? { ...pluginConfig }
|
||||
: {
|
||||
name: marketplacePlugin.name,
|
||||
version: '1.0.0', // Default version if not in marketplace
|
||||
};
|
||||
|
||||
// Overlay marketplace config (takes precedence)
|
||||
if (marketplacePlugin.name) merged.name = marketplacePlugin.name;
|
||||
if (marketplacePlugin.version) merged.version = marketplacePlugin.version;
|
||||
if (marketplacePlugin.description)
|
||||
merged.description = marketplacePlugin.description;
|
||||
if (marketplacePlugin.author) merged.author = marketplacePlugin.author;
|
||||
if (marketplacePlugin.homepage) merged.homepage = marketplacePlugin.homepage;
|
||||
if (marketplacePlugin.repository)
|
||||
merged.repository = marketplacePlugin.repository;
|
||||
if (marketplacePlugin.license) merged.license = marketplacePlugin.license;
|
||||
if (marketplacePlugin.keywords) merged.keywords = marketplacePlugin.keywords;
|
||||
if (marketplacePlugin.commands) merged.commands = marketplacePlugin.commands;
|
||||
if (marketplacePlugin.agents) merged.agents = marketplacePlugin.agents;
|
||||
if (marketplacePlugin.skills) merged.skills = marketplacePlugin.skills;
|
||||
if (marketplacePlugin.hooks) merged.hooks = marketplacePlugin.hooks;
|
||||
if (marketplacePlugin.mcpServers)
|
||||
merged.mcpServers = marketplacePlugin.mcpServers;
|
||||
if (marketplacePlugin.outputStyles)
|
||||
merged.outputStyles = marketplacePlugin.outputStyles;
|
||||
if (marketplacePlugin.lspServers)
|
||||
merged.lspServers = marketplacePlugin.lspServers;
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a config object is in Claude plugin format.
|
||||
* @param config Configuration object to check
|
||||
* @returns true if config appears to be Claude format
|
||||
*/
|
||||
export function isClaudePluginConfig(
|
||||
extensionDir: string,
|
||||
marketplace: { marketplaceSource: string; pluginName: string },
|
||||
) {
|
||||
const marketplaceConfigFilePath = path.join(
|
||||
extensionDir,
|
||||
'.claude-plugin/marketplace.json',
|
||||
);
|
||||
if (!fs.existsSync(marketplaceConfigFilePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const marketplaceConfigContent = fs.readFileSync(
|
||||
marketplaceConfigFilePath,
|
||||
'utf-8',
|
||||
);
|
||||
const marketplaceConfig = JSON.parse(marketplaceConfigContent);
|
||||
|
||||
if (typeof marketplaceConfig !== 'object' || marketplaceConfig === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const marketplaceConfigObj = marketplaceConfig as Record<string, unknown>;
|
||||
|
||||
// Must have name and owner
|
||||
if (
|
||||
typeof marketplaceConfigObj['name'] !== 'string' ||
|
||||
typeof marketplaceConfigObj['owner'] !== 'object'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!Array.isArray(marketplaceConfigObj['plugins'])) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const marketplacePluginObj = marketplaceConfigObj['plugins'].find(
|
||||
(plugin: ClaudeMarketplacePluginConfig) =>
|
||||
plugin.name === marketplace.pluginName,
|
||||
);
|
||||
|
||||
if (!marketplacePluginObj) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve plugin source from marketplace plugin configuration.
|
||||
* Returns the absolute path to the plugin source directory.
|
||||
*/
|
||||
async function resolvePluginSource(
|
||||
pluginConfig: ClaudeMarketplacePluginConfig,
|
||||
marketplaceDir: string,
|
||||
pluginDir: string,
|
||||
): Promise<string> {
|
||||
const source = pluginConfig.source;
|
||||
|
||||
// Handle string source (relative path or URL)
|
||||
if (typeof source === 'string') {
|
||||
// Check if it's a URL
|
||||
if (source.startsWith('http://') || source.startsWith('https://')) {
|
||||
// Download from URL
|
||||
const installMetadata: ExtensionInstallMetadata = {
|
||||
source,
|
||||
type: 'git',
|
||||
};
|
||||
try {
|
||||
await downloadFromGitHubRelease(installMetadata, pluginDir);
|
||||
} catch {
|
||||
await cloneFromGit(installMetadata, pluginDir);
|
||||
}
|
||||
return pluginDir;
|
||||
}
|
||||
|
||||
// Relative path within marketplace
|
||||
const pluginRoot = marketplaceDir;
|
||||
const sourcePath = path.join(pluginRoot, source);
|
||||
|
||||
if (!fs.existsSync(sourcePath)) {
|
||||
throw new Error(`Plugin source not found at ${sourcePath}`);
|
||||
}
|
||||
|
||||
// Copy to plugin directory
|
||||
await fs.promises.cp(sourcePath, pluginDir, { recursive: true });
|
||||
return pluginDir;
|
||||
}
|
||||
|
||||
// Handle object source (github or url)
|
||||
if (source.source === 'github') {
|
||||
const installMetadata: ExtensionInstallMetadata = {
|
||||
source: `https://github.com/${source.repo}`,
|
||||
type: 'git',
|
||||
};
|
||||
try {
|
||||
await downloadFromGitHubRelease(installMetadata, pluginDir);
|
||||
} catch {
|
||||
await cloneFromGit(installMetadata, pluginDir);
|
||||
}
|
||||
return pluginDir;
|
||||
}
|
||||
|
||||
if (source.source === 'url') {
|
||||
const installMetadata: ExtensionInstallMetadata = {
|
||||
source: source.url,
|
||||
type: 'git',
|
||||
};
|
||||
try {
|
||||
await downloadFromGitHubRelease(installMetadata, pluginDir);
|
||||
} catch {
|
||||
await cloneFromGit(installMetadata, pluginDir);
|
||||
}
|
||||
return pluginDir;
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported plugin source type: ${JSON.stringify(source)}`);
|
||||
}
|
||||
836
packages/core/src/extension/extensionManager.test.ts
Normal file
836
packages/core/src/extension/extensionManager.test.ts
Normal file
@@ -0,0 +1,836 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
INSTALL_METADATA_FILENAME,
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
} from './variables.js';
|
||||
import { QWEN_DIR } from '../config/storage.js';
|
||||
import {
|
||||
ExtensionManager,
|
||||
SettingScope,
|
||||
type ExtensionManagerOptions,
|
||||
validateName,
|
||||
getExtensionId,
|
||||
hashValue,
|
||||
extensionConsentString,
|
||||
maybeRequestConsentOrFail,
|
||||
parseInstallSource,
|
||||
type ExtensionConfig,
|
||||
} from './extensionManager.js';
|
||||
import type { MCPServerConfig, ExtensionInstallMetadata } from '../index.js';
|
||||
|
||||
const mockGit = {
|
||||
clone: vi.fn(),
|
||||
getRemotes: vi.fn(),
|
||||
fetch: vi.fn(),
|
||||
checkout: vi.fn(),
|
||||
listRemote: vi.fn(),
|
||||
revparse: vi.fn(),
|
||||
path: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mock('simple-git', () => ({
|
||||
simpleGit: vi.fn((path: string) => {
|
||||
mockGit.path.mockReturnValue(path);
|
||||
return mockGit;
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('./github.js', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('./github.js')>();
|
||||
return {
|
||||
...actual,
|
||||
downloadFromGitHubRelease: vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
|
||||
};
|
||||
});
|
||||
|
||||
const mockHomedir = vi.hoisted(() => vi.fn());
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const mockedOs = await importOriginal<typeof os>();
|
||||
return {
|
||||
...mockedOs,
|
||||
homedir: mockHomedir,
|
||||
};
|
||||
});
|
||||
|
||||
const mockLogExtensionEnable = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionDisable = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionUpdateEvent = vi.hoisted(() => vi.fn());
|
||||
vi.mock('../telemetry/loggers.js', () => ({
|
||||
logExtensionEnable: mockLogExtensionEnable,
|
||||
logExtensionUpdateEvent: mockLogExtensionUpdateEvent,
|
||||
}));
|
||||
|
||||
vi.mock('../index.js', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../index.js')>();
|
||||
return {
|
||||
...actual,
|
||||
logExtensionEnable: mockLogExtensionEnable,
|
||||
logExtensionInstallEvent: mockLogExtensionInstallEvent,
|
||||
logExtensionUninstall: mockLogExtensionUninstall,
|
||||
logExtensionDisable: mockLogExtensionDisable,
|
||||
};
|
||||
});
|
||||
|
||||
const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
|
||||
|
||||
function createExtension({
|
||||
extensionsDir = 'extensions-dir',
|
||||
name = 'my-extension',
|
||||
version = '1.0.0',
|
||||
addContextFile = false,
|
||||
contextFileName = undefined as string | undefined,
|
||||
mcpServers = {} as Record<string, MCPServerConfig>,
|
||||
installMetadata = undefined as ExtensionInstallMetadata | undefined,
|
||||
} = {}): string {
|
||||
const extDir = path.join(extensionsDir, name);
|
||||
fs.mkdirSync(extDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name, version, contextFileName, mcpServers }),
|
||||
);
|
||||
|
||||
if (addContextFile) {
|
||||
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
|
||||
}
|
||||
|
||||
if (contextFileName) {
|
||||
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
|
||||
}
|
||||
|
||||
if (installMetadata) {
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, INSTALL_METADATA_FILENAME),
|
||||
JSON.stringify(installMetadata),
|
||||
);
|
||||
}
|
||||
return extDir;
|
||||
}
|
||||
|
||||
describe('extension tests', () => {
|
||||
let tempHomeDir: string;
|
||||
let tempWorkspaceDir: string;
|
||||
let userExtensionsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'qwen-code-test-home-'),
|
||||
);
|
||||
tempWorkspaceDir = fs.mkdtempSync(
|
||||
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
|
||||
);
|
||||
userExtensionsDir = path.join(tempHomeDir, EXTENSIONS_DIRECTORY_NAME);
|
||||
fs.mkdirSync(userExtensionsDir, { recursive: true });
|
||||
|
||||
mockHomedir.mockReturnValue(tempHomeDir);
|
||||
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
|
||||
Object.values(mockGit).forEach((fn) => fn.mockReset());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
function createExtensionManager(
|
||||
options: Partial<ExtensionManagerOptions> = {},
|
||||
): ExtensionManager {
|
||||
return new ExtensionManager({
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
isWorkspaceTrusted: true,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
|
||||
describe('loadExtension', () => {
|
||||
it('should include extension path in loaded extension', async () => {
|
||||
const extensionDir = path.join(userExtensionsDir, 'test-extension');
|
||||
fs.mkdirSync(extensionDir, { recursive: true });
|
||||
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
expect(extensions[0].path).toBe(extensionDir);
|
||||
expect(extensions[0].config.name).toBe('test-extension');
|
||||
});
|
||||
|
||||
it('should load context file path when QWEN.md is present', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
addContextFile: true,
|
||||
});
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext2',
|
||||
version: '2.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(2);
|
||||
const ext1 = extensions.find((e) => e.config.name === 'ext1');
|
||||
const ext2 = extensions.find((e) => e.config.name === 'ext2');
|
||||
expect(ext1?.contextFiles).toEqual([
|
||||
path.join(userExtensionsDir, 'ext1', 'QWEN.md'),
|
||||
]);
|
||||
expect(ext2?.contextFiles).toEqual([]);
|
||||
});
|
||||
|
||||
it('should load context file path from the extension config', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
addContextFile: false,
|
||||
contextFileName: 'my-context-file.md',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
const ext1 = extensions.find((e) => e.config.name === 'ext1');
|
||||
expect(ext1?.contextFiles).toEqual([
|
||||
path.join(userExtensionsDir, 'ext1', 'my-context-file.md'),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should skip extensions with invalid JSON and log a warning', async () => {
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Good extension
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'good-ext',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
// Bad extension
|
||||
const badExtDir = path.join(userExtensionsDir, 'bad-ext');
|
||||
fs.mkdirSync(badExtDir);
|
||||
const badConfigPath = path.join(badExtDir, EXTENSIONS_CONFIG_FILENAME);
|
||||
fs.writeFileSync(badConfigPath, '{ "name": "bad-ext"'); // Malformed
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
expect(extensions[0].config.name).toBe('good-ext');
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining(`Warning: Skipping extension in ${badExtDir}`),
|
||||
);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should skip extensions with missing name and log a warning', async () => {
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Good extension
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'good-ext',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
// Bad extension
|
||||
const badExtDir = path.join(userExtensionsDir, 'bad-ext-no-name');
|
||||
fs.mkdirSync(badExtDir);
|
||||
const badConfigPath = path.join(badExtDir, EXTENSIONS_CONFIG_FILENAME);
|
||||
fs.writeFileSync(badConfigPath, JSON.stringify({ version: '1.0.0' }));
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
expect(extensions[0].config.name).toBe('good-ext');
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining(`Warning: Skipping extension in ${badExtDir}`),
|
||||
);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should filter trust out of mcp servers', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
trust: true,
|
||||
} as MCPServerConfig,
|
||||
},
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
// trust should be filtered from extension.mcpServers (not config.mcpServers)
|
||||
expect(extensions[0].mcpServers?.['test-server']?.trust).toBeUndefined();
|
||||
// config.mcpServers should still have trust (original config)
|
||||
expect(extensions[0].config.mcpServers?.['test-server']?.trust).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('enableExtension / disableExtension', () => {
|
||||
it('should disable an extension at the user scope', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'my-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
manager.disableExtension('my-extension', SettingScope.User);
|
||||
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should disable an extension at the workspace scope', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'my-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
manager.disableExtension(
|
||||
'my-extension',
|
||||
SettingScope.Workspace,
|
||||
tempWorkspaceDir,
|
||||
);
|
||||
|
||||
expect(manager.isEnabled('my-extension', tempHomeDir)).toBe(true);
|
||||
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle disabling the same extension twice', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'my-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
manager.disableExtension('my-extension', SettingScope.User);
|
||||
manager.disableExtension('my-extension', SettingScope.User);
|
||||
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should throw an error if you request system scope', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'my-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
expect(() =>
|
||||
manager.disableExtension('my-extension', SettingScope.System),
|
||||
).toThrow('System and SystemDefaults scopes are not supported.');
|
||||
});
|
||||
|
||||
it('should enable an extension at the user scope', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
manager.disableExtension('ext1', SettingScope.User);
|
||||
expect(manager.isEnabled('ext1')).toBe(false);
|
||||
|
||||
manager.enableExtension('ext1', SettingScope.User);
|
||||
expect(manager.isEnabled('ext1')).toBe(true);
|
||||
});
|
||||
|
||||
it('should enable an extension at the workspace scope', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
manager.disableExtension('ext1', SettingScope.Workspace);
|
||||
expect(manager.isEnabled('ext1', tempWorkspaceDir)).toBe(false);
|
||||
|
||||
manager.enableExtension('ext1', SettingScope.Workspace);
|
||||
expect(manager.isEnabled('ext1', tempWorkspaceDir)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateExtensionOverrides', () => {
|
||||
it('should mark all extensions as active if no enabled extensions are provided', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext2',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(2);
|
||||
expect(extensions.every((e) => e.isActive)).toBe(true);
|
||||
});
|
||||
|
||||
it('should mark only the enabled extensions as active', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext2',
|
||||
version: '1.0.0',
|
||||
});
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext3',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager({
|
||||
enabledExtensionOverrides: ['ext1', 'ext3'],
|
||||
});
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions.find((e) => e.name === 'ext1')?.isActive).toBe(true);
|
||||
expect(extensions.find((e) => e.name === 'ext2')?.isActive).toBe(false);
|
||||
expect(extensions.find((e) => e.name === 'ext3')?.isActive).toBe(true);
|
||||
});
|
||||
|
||||
it('should mark all extensions as inactive when "none" is provided', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext2',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager({
|
||||
enabledExtensionOverrides: ['none'],
|
||||
});
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions.every((e) => !e.isActive)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle case-insensitivity', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager({
|
||||
enabledExtensionOverrides: ['EXT1'],
|
||||
});
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions.find((e) => e.name === 'ext1')?.isActive).toBe(true);
|
||||
});
|
||||
|
||||
it('should log an error for unknown extensions', async () => {
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager({
|
||||
enabledExtensionOverrides: ['ext4'],
|
||||
});
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith('Extension not found: ext4');
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadExtensionConfig', () => {
|
||||
it('should resolve environment variables in extension configuration', async () => {
|
||||
process.env['TEST_API_KEY'] = 'test-api-key-123';
|
||||
process.env['TEST_DB_URL'] = 'postgresql://localhost:5432/testdb';
|
||||
|
||||
try {
|
||||
const extDir = path.join(userExtensionsDir, 'test-extension');
|
||||
fs.mkdirSync(extDir);
|
||||
|
||||
const extensionConfig = {
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
API_KEY: '$TEST_API_KEY',
|
||||
DATABASE_URL: '${TEST_DB_URL}',
|
||||
STATIC_VALUE: 'no-substitution',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
const extension = extensions[0];
|
||||
expect(extension.config.name).toBe('test-extension');
|
||||
expect(extension.config.mcpServers).toBeDefined();
|
||||
|
||||
const serverConfig = extension.config.mcpServers?.['test-server'];
|
||||
expect(serverConfig).toBeDefined();
|
||||
expect(serverConfig?.env).toBeDefined();
|
||||
expect(serverConfig?.env?.['API_KEY']).toBe('test-api-key-123');
|
||||
expect(serverConfig?.env?.['DATABASE_URL']).toBe(
|
||||
'postgresql://localhost:5432/testdb',
|
||||
);
|
||||
expect(serverConfig?.env?.['STATIC_VALUE']).toBe('no-substitution');
|
||||
} finally {
|
||||
delete process.env['TEST_API_KEY'];
|
||||
delete process.env['TEST_DB_URL'];
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle missing environment variables gracefully', async () => {
|
||||
const extDir = path.join(userExtensionsDir, 'test-extension');
|
||||
fs.mkdirSync(extDir);
|
||||
|
||||
const extensionConfig = {
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
MISSING_VAR: '$UNDEFINED_ENV_VAR',
|
||||
MISSING_VAR_BRACES: '${ALSO_UNDEFINED}',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
const extension = extensions[0];
|
||||
const serverConfig = extension.config.mcpServers!['test-server'];
|
||||
expect(serverConfig.env).toBeDefined();
|
||||
expect(serverConfig.env!['MISSING_VAR']).toBe('$UNDEFINED_ENV_VAR');
|
||||
expect(serverConfig.env!['MISSING_VAR_BRACES']).toBe('${ALSO_UNDEFINED}');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('extensionManager utility functions', () => {
|
||||
describe('validateName', () => {
|
||||
it('should accept valid extension names', () => {
|
||||
expect(() => validateName('my-extension')).not.toThrow();
|
||||
expect(() => validateName('Extension123')).not.toThrow();
|
||||
expect(() => validateName('test-ext-1')).not.toThrow();
|
||||
expect(() => validateName('UPPERCASE')).not.toThrow();
|
||||
});
|
||||
|
||||
it('should reject names with invalid characters', () => {
|
||||
expect(() => validateName('my_extension')).toThrow(
|
||||
'Invalid extension name',
|
||||
);
|
||||
expect(() => validateName('my.extension')).toThrow(
|
||||
'Invalid extension name',
|
||||
);
|
||||
expect(() => validateName('my extension')).toThrow(
|
||||
'Invalid extension name',
|
||||
);
|
||||
expect(() => validateName('my@ext')).toThrow('Invalid extension name');
|
||||
});
|
||||
|
||||
it('should reject empty names', () => {
|
||||
expect(() => validateName('')).toThrow('Invalid extension name');
|
||||
});
|
||||
});
|
||||
|
||||
describe('hashValue', () => {
|
||||
it('should generate consistent hash for same input', () => {
|
||||
const hash1 = hashValue('test-input');
|
||||
const hash2 = hashValue('test-input');
|
||||
expect(hash1).toBe(hash2);
|
||||
});
|
||||
|
||||
it('should generate different hashes for different inputs', () => {
|
||||
const hash1 = hashValue('input-1');
|
||||
const hash2 = hashValue('input-2');
|
||||
expect(hash1).not.toBe(hash2);
|
||||
});
|
||||
|
||||
it('should generate a valid SHA256 hash', () => {
|
||||
const hash = hashValue('test');
|
||||
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getExtensionId', () => {
|
||||
it('should use hashed name when no install metadata', () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const id = getExtensionId(config);
|
||||
expect(id).toBe(hashValue('test-ext'));
|
||||
});
|
||||
|
||||
it('should use hashed source for local install', () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const metadata = { type: 'local' as const, source: '/path/to/ext' };
|
||||
const id = getExtensionId(config, metadata);
|
||||
expect(id).toBe(hashValue('/path/to/ext'));
|
||||
});
|
||||
|
||||
it('should use GitHub URL for git install', () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const metadata = {
|
||||
type: 'git' as const,
|
||||
source: 'https://github.com/owner/repo',
|
||||
};
|
||||
const id = getExtensionId(config, metadata);
|
||||
expect(id).toBe(hashValue('https://github.com/owner/repo'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('extensionConsentString', () => {
|
||||
it('should generate basic consent string', () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const consent = extensionConsentString(config);
|
||||
expect(consent).toContain('Installing extension "test-ext"');
|
||||
expect(consent).toContain('Extensions may introduce unexpected behavior');
|
||||
});
|
||||
|
||||
it('should include MCP servers in consent string', () => {
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
mcpServers: {
|
||||
'my-server': { command: 'node', args: ['server.js'] },
|
||||
},
|
||||
};
|
||||
const consent = extensionConsentString(config);
|
||||
expect(consent).toContain(
|
||||
'This extension will run the following MCP servers',
|
||||
);
|
||||
expect(consent).toContain('my-server');
|
||||
});
|
||||
|
||||
it('should include commands in consent string', () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const consent = extensionConsentString(config, ['cmd1', 'cmd2']);
|
||||
expect(consent).toContain(
|
||||
'This extension will add the following commands',
|
||||
);
|
||||
expect(consent).toContain('cmd1, cmd2');
|
||||
});
|
||||
|
||||
it('should include context file info', () => {
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
contextFileName: 'CONTEXT.md',
|
||||
};
|
||||
const consent = extensionConsentString(config);
|
||||
expect(consent).toContain('CONTEXT.md');
|
||||
});
|
||||
|
||||
it('should include excluded tools', () => {
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
excludeTools: ['tool1', 'tool2'],
|
||||
};
|
||||
const consent = extensionConsentString(config);
|
||||
expect(consent).toContain('exclude the following core tools');
|
||||
});
|
||||
});
|
||||
|
||||
describe('maybeRequestConsentOrFail', () => {
|
||||
it('should request consent for new installation', async () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const requestConsent = vi.fn().mockResolvedValue(true);
|
||||
|
||||
await maybeRequestConsentOrFail(config, requestConsent, []);
|
||||
|
||||
expect(requestConsent).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should throw if user declines consent', async () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const requestConsent = vi.fn().mockResolvedValue(false);
|
||||
|
||||
await expect(
|
||||
maybeRequestConsentOrFail(config, requestConsent, []),
|
||||
).rejects.toThrow('Installation cancelled');
|
||||
});
|
||||
|
||||
it('should skip consent if config unchanged during update', async () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const previousConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '0.9.0',
|
||||
};
|
||||
const requestConsent = vi.fn().mockResolvedValue(true);
|
||||
|
||||
await maybeRequestConsentOrFail(
|
||||
config,
|
||||
requestConsent,
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
previousConfig,
|
||||
);
|
||||
|
||||
expect(requestConsent).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should request consent if config changed during update', async () => {
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
mcpServers: { server: { command: 'node' } },
|
||||
};
|
||||
const previousConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '0.9.0',
|
||||
};
|
||||
const requestConsent = vi.fn().mockResolvedValue(true);
|
||||
|
||||
await maybeRequestConsentOrFail(
|
||||
config,
|
||||
requestConsent,
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
previousConfig,
|
||||
);
|
||||
|
||||
expect(requestConsent).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseInstallSource', () => {
|
||||
it('should parse HTTPS URL as git type', async () => {
|
||||
const result = await parseInstallSource('https://github.com/owner/repo');
|
||||
expect(result.type).toBe('git');
|
||||
expect(result.source).toBe('https://github.com/owner/repo');
|
||||
});
|
||||
|
||||
it('should parse HTTP URL as git type', async () => {
|
||||
const result = await parseInstallSource('http://example.com/repo');
|
||||
expect(result.type).toBe('git');
|
||||
});
|
||||
|
||||
it('should parse git@ URL as git type', async () => {
|
||||
const result = await parseInstallSource('git@github.com:owner/repo.git');
|
||||
expect(result.type).toBe('git');
|
||||
});
|
||||
|
||||
it('should parse sso:// URL as git type', async () => {
|
||||
const result = await parseInstallSource('sso://some/path');
|
||||
expect(result.type).toBe('git');
|
||||
});
|
||||
|
||||
it('should parse marketplace URL correctly', async () => {
|
||||
const result = await parseInstallSource(
|
||||
'https://example.com/marketplace:plugin-name',
|
||||
);
|
||||
expect(result.type).toBe('marketplace');
|
||||
expect(result.marketplace?.pluginName).toBe('plugin-name');
|
||||
});
|
||||
|
||||
it('should throw for non-existent local path', async () => {
|
||||
await expect(
|
||||
parseInstallSource('/nonexistent/path/to/extension'),
|
||||
).rejects.toThrow('Install source not found');
|
||||
});
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user