mirror of
https://github.com/QwenLM/qwen-code.git
synced 2026-01-19 07:16:19 +00:00
Compare commits
21 Commits
feat/ide-t
...
feat/exten
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f8e41fb7fa | ||
|
|
6e641b8def | ||
|
|
a546e84887 | ||
|
|
706cdb2ac1 | ||
|
|
df33029589 | ||
|
|
c8b0efa4d9 | ||
|
|
d0104dc487 | ||
|
|
592bf2bad1 | ||
|
|
f10fcc8dc9 | ||
|
|
f7fb624af9 | ||
|
|
2852f48a4a | ||
|
|
f00f76456c | ||
|
|
4c7605d900 | ||
|
|
b37ede07e8 | ||
|
|
0a88dd7861 | ||
|
|
70991e474f | ||
|
|
551e546974 | ||
|
|
74013bd8b2 | ||
|
|
18713ef2b0 | ||
|
|
50dac93c80 | ||
|
|
22504b0a5b |
279
.github/workflows/vscode-extension-test.yml
vendored
279
.github/workflows/vscode-extension-test.yml
vendored
@@ -1,279 +0,0 @@
|
||||
name: 'VSCode Extension Tests'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
- 'release/**'
|
||||
- feat/ide-test-ci
|
||||
paths:
|
||||
- 'packages/vscode-ide-companion/**'
|
||||
- '.github/workflows/vscode-extension-test.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'main'
|
||||
- 'release/**'
|
||||
paths:
|
||||
- 'packages/vscode-ide-companion/**'
|
||||
- '.github/workflows/vscode-extension-test.yml'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: '${{ github.workflow }}-${{ github.head_ref || github.ref }}'
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: 'read'
|
||||
checks: 'write'
|
||||
pull-requests: 'write' # Needed to comment on PRs
|
||||
|
||||
jobs:
|
||||
unit-test:
|
||||
name: 'Unit Tests'
|
||||
runs-on: '${{ matrix.os }}'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- 'ubuntu-latest'
|
||||
node-version:
|
||||
- '20.x'
|
||||
|
||||
steps:
|
||||
- name: 'Checkout'
|
||||
uses: 'actions/checkout@v4'
|
||||
|
||||
- name: 'Setup Node.js'
|
||||
uses: 'actions/setup-node@v4'
|
||||
with:
|
||||
node-version: '${{ matrix.node-version }}'
|
||||
cache: 'npm'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: 'npm ci'
|
||||
|
||||
- name: 'Build project'
|
||||
run: 'npm run build'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Run unit tests'
|
||||
run: 'npm run test:ci'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Upload coverage'
|
||||
if: matrix.os == 'ubuntu-latest' && matrix.node-version == '20.x'
|
||||
uses: 'actions/upload-artifact@v4'
|
||||
with:
|
||||
name: 'coverage-unit-test'
|
||||
path: 'packages/vscode-ide-companion/coverage'
|
||||
|
||||
integration-test:
|
||||
name: 'Integration Tests'
|
||||
runs-on: 'ubuntu-latest'
|
||||
needs: 'unit-test'
|
||||
if: needs.unit-test.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: 'Checkout'
|
||||
uses: 'actions/checkout@v4'
|
||||
|
||||
- name: 'Setup Node.js'
|
||||
uses: 'actions/setup-node@v4'
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: 'npm ci'
|
||||
|
||||
- name: 'Build project'
|
||||
run: 'npm run build'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Bundle CLI'
|
||||
run: 'node scripts/prepackage.js'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Run integration tests'
|
||||
run: 'xvfb-run -a npm run test:integration'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
e2e-test:
|
||||
name: 'E2E Tests'
|
||||
runs-on: 'ubuntu-latest'
|
||||
needs: 'integration-test'
|
||||
if: needs.integration-test.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: 'Checkout'
|
||||
uses: 'actions/checkout@v4'
|
||||
|
||||
- name: 'Setup Node.js'
|
||||
uses: 'actions/setup-node@v4'
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: 'npm ci'
|
||||
|
||||
- name: 'Install Playwright browsers'
|
||||
run: 'npx playwright install --with-deps chromium'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Build project'
|
||||
run: 'npm run build'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Bundle CLI'
|
||||
run: 'node scripts/prepackage.js'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Run E2E tests'
|
||||
run: 'xvfb-run -a npm run test:e2e'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Upload E2E test results'
|
||||
if: always()
|
||||
uses: 'actions/upload-artifact@v4'
|
||||
with:
|
||||
name: 'e2e-test-results'
|
||||
path: 'packages/vscode-ide-companion/e2e/test-results'
|
||||
|
||||
- name: 'Upload Playwright report'
|
||||
if: always()
|
||||
uses: 'actions/upload-artifact@v4'
|
||||
with:
|
||||
name: 'playwright-report'
|
||||
path: 'packages/vscode-ide-companion/e2e/playwright-report'
|
||||
|
||||
e2e-vscode-test:
|
||||
name: 'VSCode E2E Tests'
|
||||
runs-on: 'ubuntu-latest'
|
||||
needs: 'e2e-test'
|
||||
if: needs.e2e-test.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: 'Checkout'
|
||||
uses: 'actions/checkout@v4'
|
||||
|
||||
- name: 'Setup Node.js'
|
||||
uses: 'actions/setup-node@v4'
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
|
||||
- name: 'Install dependencies'
|
||||
run: 'npm ci'
|
||||
|
||||
- name: 'Install Playwright browsers'
|
||||
run: 'npx playwright install --with-deps'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Build project'
|
||||
run: 'npm run build'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Bundle CLI'
|
||||
run: 'node scripts/prepackage.js'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Run VSCode E2E tests'
|
||||
run: 'xvfb-run -a npm run test:e2e:vscode'
|
||||
working-directory: 'packages/vscode-ide-companion'
|
||||
|
||||
- name: 'Upload VSCode E2E test results'
|
||||
if: always()
|
||||
uses: 'actions/upload-artifact@v4'
|
||||
with:
|
||||
name: 'vscode-e2e-test-results'
|
||||
path: 'packages/vscode-ide-companion/e2e-vscode/test-results'
|
||||
|
||||
- name: 'Upload VSCode Playwright report'
|
||||
if: always()
|
||||
uses: 'actions/upload-artifact@v4'
|
||||
with:
|
||||
name: 'vscode-playwright-report'
|
||||
path: 'packages/vscode-ide-companion/e2e-vscode/playwright-report'
|
||||
|
||||
# Job to comment test results on PR if tests fail
|
||||
comment-on-pr:
|
||||
name: 'Comment PR with Test Results'
|
||||
runs-on: 'ubuntu-latest'
|
||||
needs: [unit-test, integration-test, e2e-test, e2e-vscode-test]
|
||||
if: always() && github.event_name == 'pull_request' && (needs.unit-test.result == 'failure' || needs.integration-test.result == 'failure' || needs.e2e-test.result == 'failure' || needs.e2e-vscode-test.result == 'failure')
|
||||
|
||||
steps:
|
||||
- name: 'Checkout'
|
||||
uses: 'actions/checkout@v4'
|
||||
|
||||
- name: 'Find Comment'
|
||||
uses: 'peter-evans/find-comment@v3'
|
||||
id: 'find-comment'
|
||||
with:
|
||||
issue-number: '${{ github.event.pull_request.number }}'
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: 'VSCode Extension Test Results'
|
||||
|
||||
- name: 'Comment on PR'
|
||||
uses: 'peter-evans/create-or-update-comment@v4'
|
||||
with:
|
||||
comment-id: '${{ steps.find-comment.outputs.comment-id }}'
|
||||
issue-number: '${{ github.event.pull_request.number }}'
|
||||
edit-mode: 'replace'
|
||||
body: |
|
||||
## VSCode Extension Test Results
|
||||
|
||||
Tests have failed for this pull request. Please check the following jobs:
|
||||
|
||||
- Unit Tests: `${{ needs.unit-test.result }}`
|
||||
- Integration Tests: `${{ needs.integration-test.result }}`
|
||||
- E2E Tests: `${{ needs.e2e-test.result }}`
|
||||
- VSCode E2E Tests: `${{ needs.e2e-vscode-test.result }}`
|
||||
|
||||
[Check the workflow run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for details.
|
||||
|
||||
# Job to create an issue if tests fail when not on a PR (e.g. direct push to main)
|
||||
create-issue:
|
||||
name: 'Create Issue for Failed Tests'
|
||||
runs-on: 'ubuntu-latest'
|
||||
needs: [unit-test, integration-test, e2e-test, e2e-vscode-test]
|
||||
if: always() && github.event_name == 'push' && (needs.unit-test.result == 'failure' || needs.integration-test.result == 'failure' || needs.e2e-test.result == 'failure' || needs.e2e-vscode-test.result == 'failure')
|
||||
|
||||
steps:
|
||||
- name: 'Checkout'
|
||||
uses: 'actions/checkout@v4'
|
||||
|
||||
- name: 'Create Issue'
|
||||
uses: 'actions/github-script@v7'
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo;
|
||||
const result = await github.rest.issues.create({
|
||||
owner,
|
||||
repo,
|
||||
title: `VSCode Extension Tests Failed - ${context.sha.substring(0, 7)}`,
|
||||
body: `VSCode Extension Tests failed on commit ${context.sha}\n\nResults:\n- Unit Tests: ${{ needs.unit-test.result }}\n- Integration Tests: ${{ needs.integration-test.result }}\n- E2E Tests: ${{ needs.e2e-test.result }}\n- VSCode E2E Tests: ${{ needs.e2e-vscode-test.result }}\n\nWorkflow run: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}`
|
||||
});
|
||||
|
||||
# Summary job to pass/fail the entire workflow based on test results
|
||||
vscode-extension-tests:
|
||||
name: 'VSCode Extension Tests Summary'
|
||||
runs-on: 'ubuntu-latest'
|
||||
needs:
|
||||
- 'unit-test'
|
||||
- 'integration-test'
|
||||
- 'e2e-test'
|
||||
- 'e2e-vscode-test'
|
||||
if: always()
|
||||
steps:
|
||||
- name: 'Check test results'
|
||||
run: |
|
||||
if [[ "${{ needs.unit-test.result }}" == "failure" ]] || \
|
||||
[[ "${{ needs.integration-test.result }}" == "failure" ]] || \
|
||||
[[ "${{ needs.e2e-test.result }}" == "failure" ]] || \
|
||||
[[ "${{ needs.e2e-vscode-test.result }}" == "failure" ]]; then
|
||||
echo "One or more test jobs failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "All tests passed!"
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -63,8 +63,3 @@ patch_output.log
|
||||
docs-site/.next
|
||||
# content is a symlink to ../docs
|
||||
docs-site/content
|
||||
|
||||
# vscode-ida-companion test files
|
||||
.vscode-test/
|
||||
test-results/
|
||||
e2e-vscode/
|
||||
|
||||
@@ -4,11 +4,25 @@ Qwen Code extensions package prompts, MCP servers, and custom commands into a fa
|
||||
|
||||
## Extension management
|
||||
|
||||
We offer a suite of extension management tools using `qwen extensions` commands.
|
||||
We offer a suite of extension management tools using both `qwen extensions` CLI commands and `/extensions` slash commands within the interactive CLI.
|
||||
|
||||
Note that these commands are not supported from within the CLI, although you can list installed extensions using the `/extensions list` subcommand.
|
||||
### Runtime Extension Management (Slash Commands)
|
||||
|
||||
Note that all of these commands will only be reflected in active CLI sessions on restart.
|
||||
You can manage extensions at runtime within the interactive CLI using `/extensions` slash commands. These commands support hot-reloading, meaning changes take effect immediately without restarting the application.
|
||||
|
||||
| Command | Description |
|
||||
| ------------------------------------------------------ | --------------------------------------------------------------- |
|
||||
| `/extensions` or `/extensions list` | List all installed extensions with their status |
|
||||
| `/extensions install <source>` | Install an extension from a git URL, local path, or marketplace |
|
||||
| `/extensions uninstall <name>` | Uninstall an extension |
|
||||
| `/extensions enable <name> --scope <user\|workspace>` | Enable an extension |
|
||||
| `/extensions disable <name> --scope <user\|workspace>` | Disable an extension |
|
||||
| `/extensions update <name>` | Update a specific extension |
|
||||
| `/extensions update --all` | Update all extensions with available updates |
|
||||
|
||||
### CLI Extension Management
|
||||
|
||||
You can also manage extensions using `qwen extensions` CLI commands. Note that changes made via CLI commands will be reflected in active CLI sessions on restart.
|
||||
|
||||
### Installing an extension
|
||||
|
||||
@@ -98,7 +112,18 @@ The `qwen-extension.json` file contains the configuration for the extension. The
|
||||
}
|
||||
},
|
||||
"contextFileName": "QWEN.md",
|
||||
"excludeTools": ["run_shell_command"]
|
||||
"excludeTools": ["run_shell_command"],
|
||||
"commands": "commands",
|
||||
"skills": "skills",
|
||||
"agents": "agents",
|
||||
"settings": [
|
||||
{
|
||||
"name": "API Key",
|
||||
"description": "Your API key for the service",
|
||||
"envVar": "MY_API_KEY",
|
||||
"sensitive": true
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
@@ -108,12 +133,18 @@ The `qwen-extension.json` file contains the configuration for the extension. The
|
||||
- Note that all MCP server configuration options are supported except for `trust`.
|
||||
- `contextFileName`: The name of the file that contains the context for the extension. This will be used to load the context from the extension directory. If this property is not used but a `QWEN.md` file is present in your extension directory, then that file will be loaded.
|
||||
- `excludeTools`: An array of tool names to exclude from the model. You can also specify command-specific restrictions for tools that support it, like the `run_shell_command` tool. For example, `"excludeTools": ["run_shell_command(rm -rf)"]` will block the `rm -rf` command. Note that this differs from the MCP server `excludeTools` functionality, which can be listed in the MCP server config. **Important:** Tools specified in `excludeTools` will be disabled for the entire conversation context and will affect all subsequent queries in the current session.
|
||||
- `commands`: The directory containing custom commands (default: `commands`). Commands are `.md` files that define prompts.
|
||||
- `skills`: The directory containing custom skills (default: `skills`). Skills are discovered automatically and become available via the `/skills` command.
|
||||
- `agents`: The directory containing custom subagents (default: `agents`). Subagents are `.yaml` or `.md` files that define specialized AI assistants.
|
||||
- `settings`: An array of settings that the extension requires. When installing, users will be prompted to provide values for these settings. The values are stored securely and passed to MCP servers as environment variables.
|
||||
|
||||
When Qwen Code starts, it loads all the extensions and merges their configurations. If there are any conflicts, the workspace configuration takes precedence.
|
||||
|
||||
### Custom commands
|
||||
|
||||
Extensions can provide [custom commands](./cli/commands.md#custom-commands) by placing TOML files in a `commands/` subdirectory within the extension directory. These commands follow the same format as user and project custom commands and use standard naming conventions.
|
||||
Extensions can provide [custom commands](./cli/commands.md#custom-commands) by placing Markdown files in a `commands/` subdirectory within the extension directory. These commands follow the same format as user and project custom commands and use standard naming conventions.
|
||||
|
||||
> **Note:** The command format has been updated from TOML to Markdown. TOML files are deprecated but still supported. You can migrate existing TOML commands using the automatic migration prompt that appears when TOML files are detected.
|
||||
|
||||
**Example**
|
||||
|
||||
@@ -123,15 +154,46 @@ An extension named `gcp` with the following structure:
|
||||
.qwen/extensions/gcp/
|
||||
├── qwen-extension.json
|
||||
└── commands/
|
||||
├── deploy.toml
|
||||
├── deploy.md
|
||||
└── gcs/
|
||||
└── sync.toml
|
||||
└── sync.md
|
||||
```
|
||||
|
||||
Would provide these commands:
|
||||
|
||||
- `/deploy` - Shows as `[gcp] Custom command from deploy.toml` in help
|
||||
- `/gcs:sync` - Shows as `[gcp] Custom command from sync.toml` in help
|
||||
- `/deploy` - Shows as `[gcp] Custom command from deploy.md` in help
|
||||
- `/gcs:sync` - Shows as `[gcp] Custom command from sync.md` in help
|
||||
|
||||
### Custom skills
|
||||
|
||||
Extensions can provide custom skills by placing skill files in a `skills/` subdirectory within the extension directory. Each skill should have a `SKILL.md` file with YAML frontmatter defining the skill's name and description.
|
||||
|
||||
**Example**
|
||||
|
||||
```
|
||||
.qwen/extensions/my-extension/
|
||||
├── qwen-extension.json
|
||||
└── skills/
|
||||
└── pdf-processor/
|
||||
└── SKILL.md
|
||||
```
|
||||
|
||||
The skill will be available via the `/skills` command when the extension is active.
|
||||
|
||||
### Custom subagents
|
||||
|
||||
Extensions can provide custom subagents by placing agent configuration files in an `agents/` subdirectory within the extension directory. Agents are defined using YAML or Markdown files.
|
||||
|
||||
**Example**
|
||||
|
||||
```
|
||||
.qwen/extensions/my-extension/
|
||||
├── qwen-extension.json
|
||||
└── agents/
|
||||
└── testing-expert.yaml
|
||||
```
|
||||
|
||||
Extension subagents appear in the subagent manager dialog under "Extension Agents" section.
|
||||
|
||||
### Conflict resolution
|
||||
|
||||
|
||||
@@ -148,22 +148,119 @@ Custom commands provide a way to create shortcuts for complex prompts. Let's add
|
||||
mkdir -p commands/fs
|
||||
```
|
||||
|
||||
2. Create a file named `commands/fs/grep-code.toml`:
|
||||
2. Create a file named `commands/fs/grep-code.md`:
|
||||
|
||||
```markdown
|
||||
---
|
||||
description: Search for a pattern in code and summarize findings
|
||||
---
|
||||
|
||||
```toml
|
||||
prompt = """
|
||||
Please summarize the findings for the pattern `{{args}}`.
|
||||
|
||||
Search Results:
|
||||
!{grep -r {{args}} .}
|
||||
"""
|
||||
```
|
||||
|
||||
This command, `/fs:grep-code`, will take an argument, run the `grep` shell command with it, and pipe the results into a prompt for summarization.
|
||||
|
||||
> **Note:** Commands use Markdown format with optional YAML frontmatter. TOML format is deprecated but still supported for backwards compatibility.
|
||||
|
||||
After saving the file, restart the Qwen Code. You can now run `/fs:grep-code "some pattern"` to use your new command.
|
||||
|
||||
## Step 5: Add a Custom `QWEN.md`
|
||||
## Step 5: Add Custom Skills and Subagents (Optional)
|
||||
|
||||
Extensions can also provide custom skills and subagents to extend Qwen Code's capabilities.
|
||||
|
||||
### Adding a Custom Skill
|
||||
|
||||
Skills are model-invoked capabilities that the AI can automatically use when relevant.
|
||||
|
||||
1. Create a `skills` directory with a skill subdirectory:
|
||||
|
||||
```bash
|
||||
mkdir -p skills/code-analyzer
|
||||
```
|
||||
|
||||
2. Create a `skills/code-analyzer/SKILL.md` file:
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: code-analyzer
|
||||
description: Analyzes code structure and provides insights about complexity, dependencies, and potential improvements
|
||||
---
|
||||
|
||||
# Code Analyzer
|
||||
|
||||
## Instructions
|
||||
|
||||
When analyzing code, focus on:
|
||||
|
||||
- Code complexity and maintainability
|
||||
- Dependencies and coupling
|
||||
- Potential performance issues
|
||||
- Suggestions for improvements
|
||||
|
||||
## Examples
|
||||
|
||||
- "Analyze the complexity of this function"
|
||||
- "What are the dependencies of this module?"
|
||||
```
|
||||
|
||||
### Adding a Custom Subagent
|
||||
|
||||
Subagents are specialized AI assistants for specific tasks.
|
||||
|
||||
1. Create an `agents` directory:
|
||||
|
||||
```bash
|
||||
mkdir -p agents
|
||||
```
|
||||
|
||||
2. Create an `agents/refactoring-expert.md` file:
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: refactoring-expert
|
||||
description: Specialized in code refactoring, improving code structure and maintainability
|
||||
tools:
|
||||
- read_file
|
||||
- write_file
|
||||
- read_many_files
|
||||
---
|
||||
|
||||
You are a refactoring specialist focused on improving code quality.
|
||||
|
||||
Your expertise includes:
|
||||
|
||||
- Identifying code smells and anti-patterns
|
||||
- Applying SOLID principles
|
||||
- Improving code readability and maintainability
|
||||
- Safe refactoring with minimal risk
|
||||
|
||||
For each refactoring task:
|
||||
|
||||
1. Analyze the current code structure
|
||||
2. Identify areas for improvement
|
||||
3. Propose refactoring steps
|
||||
4. Implement changes incrementally
|
||||
5. Verify functionality is preserved
|
||||
```
|
||||
|
||||
3. Update your `qwen-extension.json` to include the new directories:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "my-first-extension",
|
||||
"version": "1.0.0",
|
||||
"skills": "skills",
|
||||
"agents": "agents",
|
||||
"mcpServers": { ... }
|
||||
}
|
||||
```
|
||||
|
||||
After restarting Qwen Code, your custom skills will be available via `/skills` and subagents via `/agents manage`.
|
||||
|
||||
## Step 6: Add a Custom `QWEN.md`
|
||||
|
||||
You can provide persistent context to the model by adding a `QWEN.md` file to your extension. This is useful for giving the model instructions on how to behave or information about your extension's tools. Note that you may not always need this for extensions built to expose commands and prompts.
|
||||
|
||||
@@ -194,7 +291,7 @@ You can provide persistent context to the model by adding a `QWEN.md` file to yo
|
||||
|
||||
Restart the CLI again. The model will now have the context from your `QWEN.md` file in every session where the extension is active.
|
||||
|
||||
## Step 6: Releasing Your Extension
|
||||
## Step 7: Releasing Your Extension
|
||||
|
||||
Once you are happy with your extension, you can share it with others. The two primary ways of releasing extensions are via a Git repository or through GitHub Releases. Using a public Git repository is the simplest method.
|
||||
|
||||
@@ -207,6 +304,7 @@ You've successfully created a Qwen Code extension! You learned how to:
|
||||
- Bootstrap a new extension from a template.
|
||||
- Add custom tools with an MCP server.
|
||||
- Create convenient custom commands.
|
||||
- Add custom skills and subagents.
|
||||
- Provide persistent context to the model.
|
||||
- Link your extension for local development.
|
||||
|
||||
|
||||
@@ -5,11 +5,13 @@ Qwen Code supports two authentication methods. Pick the one that matches how you
|
||||
- **Qwen OAuth (recommended)**: sign in with your `qwen.ai` account in a browser.
|
||||
- **OpenAI-compatible API**: use an API key (OpenAI or any OpenAI-compatible provider / endpoint).
|
||||
|
||||

|
||||
|
||||
## Option 1: Qwen OAuth (recommended & free) 👍
|
||||
|
||||
Use this if you want the simplest setup and you’re using Qwen models.
|
||||
Use this if you want the simplest setup and you're using Qwen models.
|
||||
|
||||
- **How it works**: on first start, Qwen Code opens a browser login page. After you finish, credentials are cached locally so you usually won’t need to log in again.
|
||||
- **How it works**: on first start, Qwen Code opens a browser login page. After you finish, credentials are cached locally so you usually won't need to log in again.
|
||||
- **Requirements**: a `qwen.ai` account + internet access (at least for the first login).
|
||||
- **Benefits**: no API key management, automatic credential refresh.
|
||||
- **Cost & quota**: free, with a quota of **60 requests/minute** and **2,000 requests/day**.
|
||||
@@ -24,15 +26,54 @@ qwen
|
||||
|
||||
Use this if you want to use OpenAI models or any provider that exposes an OpenAI-compatible API (e.g. OpenAI, Azure OpenAI, OpenRouter, ModelScope, Alibaba Cloud Bailian, or a self-hosted compatible endpoint).
|
||||
|
||||
### Quick start (interactive, recommended for local use)
|
||||
### Recommended: Coding Plan (subscription-based) 🚀
|
||||
|
||||
When you choose the OpenAI-compatible option in the CLI, it will prompt you for:
|
||||
Use this if you want predictable costs with higher usage quotas for the qwen3-coder-plus model.
|
||||
|
||||
- **API key**
|
||||
- **Base URL** (default: `https://api.openai.com/v1`)
|
||||
- **Model** (default: `gpt-4o`)
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
> Coding Plan is only available for users in China mainland (Beijing region).
|
||||
|
||||
> **Note:** the CLI may display the key in plain text for verification. Make sure your terminal is not being recorded or shared.
|
||||
- **How it works**: subscribe to the Coding Plan with a fixed monthly fee, then configure Qwen Code to use the dedicated endpoint and your subscription API key.
|
||||
- **Requirements**: an active Coding Plan subscription from [Alibaba Cloud Bailian](https://bailian.console.aliyun.com/cn-beijing/?tab=globalset#/efm/coding_plan).
|
||||
- **Benefits**: higher usage quotas, predictable monthly costs, access to latest qwen3-coder-plus model.
|
||||
- **Cost & quota**: varies by plan (see table below).
|
||||
|
||||
#### Coding Plan Pricing & Quotas
|
||||
|
||||
| Feature | Lite Basic Plan | Pro Advanced Plan |
|
||||
| :------------------ | :-------------------- | :-------------------- |
|
||||
| **Price** | ¥40/month | ¥200/month |
|
||||
| **5-Hour Limit** | Up to 1,200 requests | Up to 6,000 requests |
|
||||
| **Weekly Limit** | Up to 9,000 requests | Up to 45,000 requests |
|
||||
| **Monthly Limit** | Up to 18,000 requests | Up to 90,000 requests |
|
||||
| **Supported Model** | qwen3-coder-plus | qwen3-coder-plus |
|
||||
|
||||
#### Quick Setup for Coding Plan
|
||||
|
||||
When you select the OpenAI-compatible option in the CLI, enter these values:
|
||||
|
||||
- **API key**: `sk-sp-xxxxx`
|
||||
- **Base URL**: `https://coding.dashscope.aliyuncs.com/v1`
|
||||
- **Model**: `qwen3-coder-plus`
|
||||
|
||||
> **Note**: Coding Plan API keys have the format `sk-sp-xxxxx`, which is different from standard Alibaba Cloud API keys.
|
||||
|
||||
#### Configure via Environment Variables
|
||||
|
||||
Set these environment variables to use Coding Plan:
|
||||
|
||||
```bash
|
||||
export OPENAI_API_KEY="your-coding-plan-api-key" # Format: sk-sp-xxxxx
|
||||
export OPENAI_BASE_URL="https://coding.dashscope.aliyuncs.com/v1"
|
||||
export OPENAI_MODEL="qwen3-coder-plus"
|
||||
```
|
||||
|
||||
For more details about Coding Plan, including subscription options and troubleshooting, see the [full Coding Plan documentation](https://bailian.console.aliyun.com/cn-beijing/?tab=doc#/doc/?type=model&url=3005961).
|
||||
|
||||
### Other OpenAI-compatible Providers
|
||||
|
||||
If you are using other providers (OpenAI, Azure, local LLMs, etc.), use the following configuration methods.
|
||||
|
||||
### Configure via command-line arguments
|
||||
|
||||
|
||||
@@ -275,7 +275,7 @@ If you are experiencing performance issues with file searching (e.g., with `@` c
|
||||
| `tools.truncateToolOutputThreshold` | number | Truncate tool output if it is larger than this many characters. Applies to Shell, Grep, Glob, ReadFile and ReadManyFiles tools. | `25000` | Requires restart: Yes |
|
||||
| `tools.truncateToolOutputLines` | number | Maximum lines or entries kept when truncating tool output. Applies to Shell, Grep, Glob, ReadFile and ReadManyFiles tools. | `1000` | Requires restart: Yes |
|
||||
| `tools.autoAccept` | boolean | Controls whether the CLI automatically accepts and executes tool calls that are considered safe (e.g., read-only operations) without explicit user confirmation. If set to `true`, the CLI will bypass the confirmation prompt for tools deemed safe. | `false` | |
|
||||
| `tools.experimental.skills` | boolean | Enable experimental Agent Skills feature | `false` | |
|
||||
| `tools.experimental.skills` | boolean | Enable experimental Agent Skills feature | `false` | |
|
||||
|
||||
#### mcp
|
||||
|
||||
|
||||
@@ -121,6 +121,8 @@ Environment Variables: Commands executed via `!` will set the `QWEN_CODE=1` envi
|
||||
|
||||
Save frequently used prompts as shortcut commands to improve work efficiency and ensure consistency.
|
||||
|
||||
> **Note:** Custom commands now use Markdown format with optional YAML frontmatter. TOML format is deprecated but still supported for backwards compatibility. When TOML files are detected, an automatic migration prompt will be displayed.
|
||||
|
||||
### Quick Overview
|
||||
|
||||
| Function | Description | Advantages | Priority | Applicable Scenarios |
|
||||
@@ -135,14 +137,34 @@ Priority Rules: Project commands > User commands (project command used when name
|
||||
|
||||
#### File Path to Command Name Mapping Table
|
||||
|
||||
| File Location | Generated Command | Example Call |
|
||||
| ---------------------------- | ----------------- | --------------------- |
|
||||
| `~/.qwen/commands/test.toml` | `/test` | `/test Parameter` |
|
||||
| `<project>/git/commit.toml` | `/git:commit` | `/git:commit Message` |
|
||||
| File Location | Generated Command | Example Call |
|
||||
| -------------------------- | ----------------- | --------------------- |
|
||||
| `~/.qwen/commands/test.md` | `/test` | `/test Parameter` |
|
||||
| `<project>/git/commit.md` | `/git:commit` | `/git:commit Message` |
|
||||
|
||||
Naming Rules: Path separator (`/` or `\`) converted to colon (`:`)
|
||||
|
||||
### TOML File Format Specification
|
||||
### Markdown File Format Specification (Recommended)
|
||||
|
||||
Custom commands use Markdown files with optional YAML frontmatter:
|
||||
|
||||
```markdown
|
||||
---
|
||||
description: Optional description (displayed in /help)
|
||||
---
|
||||
|
||||
Your prompt content here.
|
||||
Use {{args}} for parameter injection.
|
||||
```
|
||||
|
||||
| Field | Required | Description | Example |
|
||||
| ------------- | -------- | ---------------------------------------- | ------------------------------------------ |
|
||||
| `description` | Optional | Command description (displayed in /help) | `description: Code analysis tool` |
|
||||
| Prompt body | Required | Prompt content sent to model | Any Markdown content after the frontmatter |
|
||||
|
||||
### TOML File Format (Deprecated)
|
||||
|
||||
> **Deprecated:** TOML format is still supported but will be removed in a future version. Please migrate to Markdown format.
|
||||
|
||||
| Field | Required | Description | Example |
|
||||
| ------------- | -------- | ---------------------------------------- | ------------------------------------------ |
|
||||
@@ -191,15 +213,19 @@ Naming Rules: Path separator (`/` or `\`) converted to colon (`:`)
|
||||
|
||||
Example: Git Commit Message Generation
|
||||
|
||||
```
|
||||
# git/commit.toml
|
||||
description = "Generate Commit message based on staged changes"
|
||||
prompt = """
|
||||
````markdown
|
||||
---
|
||||
description: Generate Commit message based on staged changes
|
||||
---
|
||||
|
||||
Please generate a Commit message based on the following diff:
|
||||
diff
|
||||
|
||||
```diff
|
||||
!{git diff --staged}
|
||||
"""
|
||||
```
|
||||
````
|
||||
|
||||
````
|
||||
|
||||
#### 4. File Content Injection (`@{...}`)
|
||||
|
||||
@@ -212,36 +238,38 @@ diff
|
||||
|
||||
Example: Code Review Command
|
||||
|
||||
```
|
||||
# review.toml
|
||||
description = "Code review based on best practices"
|
||||
prompt = """
|
||||
```markdown
|
||||
---
|
||||
description: Code review based on best practices
|
||||
---
|
||||
|
||||
Review {{args}}, reference standards:
|
||||
|
||||
@{docs/code-standards.md}
|
||||
"""
|
||||
```
|
||||
````
|
||||
|
||||
### Practical Creation Example
|
||||
|
||||
#### "Pure Function Refactoring" Command Creation Steps Table
|
||||
|
||||
| Operation | Command/Code |
|
||||
| ----------------------------- | ------------------------------------------- |
|
||||
| 1. Create directory structure | `mkdir -p ~/.qwen/commands/refactor` |
|
||||
| 2. Create command file | `touch ~/.qwen/commands/refactor/pure.toml` |
|
||||
| 3. Edit command content | Refer to the complete code below. |
|
||||
| 4. Test command | `@file.js` → `/refactor:pure` |
|
||||
| Operation | Command/Code |
|
||||
| ----------------------------- | ----------------------------------------- |
|
||||
| 1. Create directory structure | `mkdir -p ~/.qwen/commands/refactor` |
|
||||
| 2. Create command file | `touch ~/.qwen/commands/refactor/pure.md` |
|
||||
| 3. Edit command content | Refer to the complete code below. |
|
||||
| 4. Test command | `@file.js` → `/refactor:pure` |
|
||||
|
||||
```# ~/.qwen/commands/refactor/pure.toml
|
||||
description = "Refactor code to pure function"
|
||||
prompt = """
|
||||
Please analyze code in current context, refactor to pure function.
|
||||
Requirements:
|
||||
1. Provide refactored code
|
||||
2. Explain key changes and pure function characteristic implementation
|
||||
3. Maintain function unchanged
|
||||
"""
|
||||
```markdown
|
||||
---
|
||||
description: Refactor code to pure function
|
||||
---
|
||||
|
||||
Please analyze code in current context, refactor to pure function.
|
||||
Requirements:
|
||||
|
||||
1. Provide refactored code
|
||||
2. Explain key changes and pure function characteristic implementation
|
||||
3. Maintain function unchanged
|
||||
```
|
||||
|
||||
### Custom Command Best Practices Summary
|
||||
|
||||
@@ -157,6 +157,18 @@ When `--experimental-skills` is enabled, Qwen Code discovers Skills from:
|
||||
|
||||
- Personal Skills: `~/.qwen/skills/`
|
||||
- Project Skills: `.qwen/skills/`
|
||||
- Extension Skills: Skills provided by installed extensions
|
||||
|
||||
### Extension Skills
|
||||
|
||||
Extensions can provide custom skills that become available when the extension is enabled. These skills are stored in the extension's `skills/` directory and follow the same format as personal and project skills.
|
||||
|
||||
Extension skills are automatically discovered and loaded when:
|
||||
|
||||
- The extension is installed and enabled
|
||||
- The `--experimental-skills` flag is enabled
|
||||
|
||||
To see which extensions provide skills, check the extension's `qwen-extension.json` file for a `skills` field.
|
||||
|
||||
To view available Skills, ask Qwen Code directly:
|
||||
|
||||
|
||||
@@ -6,11 +6,11 @@ Subagents are specialized AI assistants that handle specific types of tasks with
|
||||
|
||||
Subagents are independent AI assistants that:
|
||||
|
||||
- **Specialize in specific tasks** - Each Subagent is configured with a focused system prompt for particular types of work
|
||||
- **Have separate context** - They maintain their own conversation history, separate from your main chat
|
||||
- **Use controlled tools** - You can configure which tools each Subagent has access to
|
||||
- **Work autonomously** - Once given a task, they work independently until completion or failure
|
||||
- **Provide detailed feedback** - You can see their progress, tool usage, and execution statistics in real-time
|
||||
- **Specialize in specific tasks** - Each Subagent is configured with a focused system prompt for particular types of work
|
||||
- **Have separate context** - They maintain their own conversation history, separate from your main chat
|
||||
- **Use controlled tools** - You can configure which tools each Subagent has access to
|
||||
- **Work autonomously** - Once given a task, they work independently until completion or failure
|
||||
- **Provide detailed feedback** - You can see their progress, tool usage, and execution statistics in real-time
|
||||
|
||||
## Key Benefits
|
||||
|
||||
@@ -59,7 +59,7 @@ AI: I'll delegate this to your testing specialist Subagents.
|
||||
|
||||
### CLI Commands
|
||||
|
||||
Subagents are managed through the `/agents` slash command and its subcommands:
|
||||
Subagents are managed through the `/agents` slash command and its subcommands:
|
||||
|
||||
**Usage:**:`/agents create`。Creates a new Subagent through a guided step wizard.
|
||||
|
||||
@@ -67,12 +67,26 @@ Subagents are managed through the `/agents` slash command and its subcommands:
|
||||
|
||||
### Storage Locations
|
||||
|
||||
Subagents are stored as Markdown files in two locations:
|
||||
Subagents are stored as Markdown files in multiple locations:
|
||||
|
||||
- **Project-level**: `.qwen/agents/` (takes precedence)
|
||||
- **User-level**: `~/.qwen/agents/` (fallback)
|
||||
- **Project-level**: `.qwen/agents/` (highest precedence)
|
||||
- **User-level**: `~/.qwen/agents/` (fallback)
|
||||
- **Extension-level**: Provided by installed extensions
|
||||
|
||||
This allows you to have both project-specific agents and personal agents that work across all projects.
|
||||
This allows you to have project-specific agents, personal agents that work across all projects, and extension-provided agents that add specialized capabilities.
|
||||
|
||||
### Extension Subagents
|
||||
|
||||
Extensions can provide custom subagents that become available when the extension is enabled. These agents are stored in the extension's `agents/` directory and follow the same format as personal and project agents.
|
||||
|
||||
Extension subagents:
|
||||
|
||||
- Are automatically discovered when the extension is enabled
|
||||
- Appear in the `/agents manage` dialog under "Extension Agents" section
|
||||
- Cannot be edited directly (edit the extension source instead)
|
||||
- Follow the same configuration format as user-defined agents
|
||||
|
||||
To see which extensions provide subagents, check the extension's `qwen-extension.json` file for an `agents` field.
|
||||
|
||||
### File Format
|
||||
|
||||
@@ -398,7 +412,7 @@ description: Helps with testing, documentation, code review, and deployment
|
||||
---
|
||||
```
|
||||
|
||||
**Why:** Focused agents produce better results and are easier to maintain.
|
||||
**Why:** Focused agents produce better results and are easier to maintain.
|
||||
|
||||
#### Clear Specialization
|
||||
|
||||
@@ -422,7 +436,7 @@ description: Works on frontend development tasks
|
||||
---
|
||||
```
|
||||
|
||||
**Why:** Specific expertise leads to more targeted and effective assistance.
|
||||
**Why:** Specific expertise leads to more targeted and effective assistance.
|
||||
|
||||
#### Actionable Descriptions
|
||||
|
||||
@@ -440,7 +454,7 @@ description: Reviews code for security vulnerabilities, performance issues, and
|
||||
description: A helpful code reviewer
|
||||
```
|
||||
|
||||
**Why:** Clear descriptions help the main AI choose the right agent for each task.
|
||||
**Why:** Clear descriptions help the main AI choose the right agent for each task.
|
||||
|
||||
### Configuration Best Practices
|
||||
|
||||
|
||||
1132
package-lock.json
generated
1132
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -46,6 +46,7 @@
|
||||
"comment-json": "^4.2.5",
|
||||
"diff": "^7.0.0",
|
||||
"dotenv": "^17.1.0",
|
||||
"prompts": "^2.4.2",
|
||||
"fzf": "^0.5.2",
|
||||
"glob": "^10.5.0",
|
||||
"highlight.js": "^11.11.1",
|
||||
@@ -79,6 +80,7 @@
|
||||
"@types/command-exists": "^1.2.3",
|
||||
"@types/diff": "^7.0.2",
|
||||
"@types/dotenv": "^6.1.1",
|
||||
"@types/prompts": "^2.4.9",
|
||||
"@types/node": "^20.11.24",
|
||||
"@types/react": "^19.1.8",
|
||||
"@types/react-dom": "^19.1.6",
|
||||
|
||||
@@ -27,10 +27,8 @@ import { Readable, Writable } from 'node:stream';
|
||||
import type { LoadedSettings } from '../config/settings.js';
|
||||
import { SettingScope } from '../config/settings.js';
|
||||
import { z } from 'zod';
|
||||
import { ExtensionStorage, type Extension } from '../config/extension.js';
|
||||
import type { CliArgs } from '../config/config.js';
|
||||
import { loadCliConfig } from '../config/config.js';
|
||||
import { ExtensionEnablementManager } from '../config/extensions/extensionEnablement.js';
|
||||
|
||||
// Import the modular Session class
|
||||
import { Session } from './session/Session.js';
|
||||
@@ -38,7 +36,6 @@ import { Session } from './session/Session.js';
|
||||
export async function runAcpAgent(
|
||||
config: Config,
|
||||
settings: LoadedSettings,
|
||||
extensions: Extension[],
|
||||
argv: CliArgs,
|
||||
) {
|
||||
const stdout = Writable.toWeb(process.stdout) as WritableStream;
|
||||
@@ -51,8 +48,7 @@ export async function runAcpAgent(
|
||||
console.debug = console.error;
|
||||
|
||||
new acp.AgentSideConnection(
|
||||
(client: acp.Client) =>
|
||||
new GeminiAgent(config, settings, extensions, argv, client),
|
||||
(client: acp.Client) => new GeminiAgent(config, settings, argv, client),
|
||||
stdout,
|
||||
stdin,
|
||||
);
|
||||
@@ -65,7 +61,6 @@ class GeminiAgent {
|
||||
constructor(
|
||||
private config: Config,
|
||||
private settings: LoadedSettings,
|
||||
private extensions: Extension[],
|
||||
private argv: CliArgs,
|
||||
private client: acp.Client,
|
||||
) {}
|
||||
@@ -215,16 +210,7 @@ class GeminiAgent {
|
||||
continue: false,
|
||||
};
|
||||
|
||||
const config = await loadCliConfig(
|
||||
settings,
|
||||
this.extensions,
|
||||
new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
this.argv.extensions,
|
||||
),
|
||||
argvForSession,
|
||||
cwd,
|
||||
);
|
||||
const config = await loadCliConfig(settings, argvForSession, cwd);
|
||||
|
||||
await config.initialize();
|
||||
return config;
|
||||
|
||||
218
packages/cli/src/commands/extensions/consent.ts
Normal file
218
packages/cli/src/commands/extensions/consent.ts
Normal file
@@ -0,0 +1,218 @@
|
||||
import type {
|
||||
ExtensionConfig,
|
||||
ExtensionRequestOptions,
|
||||
SkillConfig,
|
||||
SubagentConfig,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import type { ConfirmationRequest } from '../../ui/types.js';
|
||||
import chalk from 'chalk';
|
||||
import { t } from '../../i18n/index.js';
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, by reading a Y/n
|
||||
* character from stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentNonInteractive(
|
||||
consentDescription: string,
|
||||
): Promise<boolean> {
|
||||
console.info(consentDescription);
|
||||
const result = await promptForConsentNonInteractive(
|
||||
t('Do you want to continue? [Y/n]: '),
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, in interactive mode.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will not work.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @param addExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentInteractive(
|
||||
consentDescription: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return promptForConsentInteractive(
|
||||
consentDescription + '\n\n' + t('Do you want to continue?'),
|
||||
addExtensionUpdateConfirmationRequest,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users a prompt and awaits for a y/n response on stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A yes/no prompt to ask the user
|
||||
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
|
||||
*/
|
||||
async function promptForConsentNonInteractive(
|
||||
prompt: string,
|
||||
): Promise<boolean> {
|
||||
const readline = await import('node:readline');
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
rl.question(prompt, (answer) => {
|
||||
rl.close();
|
||||
resolve(['y', ''].includes(answer.trim().toLowerCase()));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users an interactive yes/no prompt.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A markdown prompt to ask the user
|
||||
* @param addExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
|
||||
* @returns Whether or not the user answers yes.
|
||||
*/
|
||||
async function promptForConsentInteractive(
|
||||
prompt: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return new Promise<boolean>((resolve) => {
|
||||
addExtensionUpdateConfirmationRequest({
|
||||
prompt,
|
||||
onConfirm: (resolvedConfirmed) => {
|
||||
resolve(resolvedConfirmed);
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a consent string for installing an extension based on it's
|
||||
* extensionConfig.
|
||||
*/
|
||||
export function extensionConsentString(
|
||||
extensionConfig: ExtensionConfig,
|
||||
commands: string[] = [],
|
||||
skills: SkillConfig[] = [],
|
||||
subagents: SubagentConfig[] = [],
|
||||
): string {
|
||||
const output: string[] = [];
|
||||
const mcpServerEntries = Object.entries(extensionConfig.mcpServers || {});
|
||||
output.push(
|
||||
t('Installing extension "{{name}}".', { name: extensionConfig.name }),
|
||||
);
|
||||
output.push(
|
||||
t(
|
||||
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**',
|
||||
),
|
||||
);
|
||||
|
||||
if (mcpServerEntries.length) {
|
||||
output.push(t('This extension will run the following MCP servers:'));
|
||||
for (const [key, mcpServer] of mcpServerEntries) {
|
||||
const isLocal = !!mcpServer.command;
|
||||
const source =
|
||||
mcpServer.httpUrl ??
|
||||
`${mcpServer.command || ''}${mcpServer.args ? ' ' + mcpServer.args.join(' ') : ''}`;
|
||||
output.push(
|
||||
` * ${key} (${isLocal ? t('local') : t('remote')}): ${source}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (commands && commands.length > 0) {
|
||||
output.push(
|
||||
t('This extension will add the following commands: {{commands}}.', {
|
||||
commands: commands.join(', '),
|
||||
}),
|
||||
);
|
||||
}
|
||||
if (extensionConfig.contextFileName) {
|
||||
const fileName = Array.isArray(extensionConfig.contextFileName)
|
||||
? extensionConfig.contextFileName.join(', ')
|
||||
: extensionConfig.contextFileName;
|
||||
output.push(
|
||||
t(
|
||||
'This extension will append info to your QWEN.md context using {{fileName}}',
|
||||
{ fileName },
|
||||
),
|
||||
);
|
||||
}
|
||||
if (extensionConfig.excludeTools) {
|
||||
output.push(
|
||||
t('This extension will exclude the following core tools: {{tools}}', {
|
||||
tools: extensionConfig.excludeTools.join(', '),
|
||||
}),
|
||||
);
|
||||
}
|
||||
if (skills.length > 0) {
|
||||
output.push(t('This extension will install the following skills:'));
|
||||
for (const skill of skills) {
|
||||
output.push(` * ${chalk.bold(skill.name)}: ${skill.description}`);
|
||||
}
|
||||
}
|
||||
if (subagents.length > 0) {
|
||||
output.push(t('This extension will install the following subagents:'));
|
||||
for (const subagent of subagents) {
|
||||
output.push(` * ${chalk.bold(subagent.name)}: ${subagent.description}`);
|
||||
}
|
||||
}
|
||||
return output.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to install an extension (extensionConfig), if
|
||||
* there is any difference between the consent string for `extensionConfig` and
|
||||
* `previousExtensionConfig`.
|
||||
*
|
||||
* Always requests consent if previousExtensionConfig is null.
|
||||
*
|
||||
* Throws if the user does not consent.
|
||||
*/
|
||||
export const requestConsentOrFail = async (
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
options?: ExtensionRequestOptions,
|
||||
) => {
|
||||
if (!options) return;
|
||||
const {
|
||||
extensionConfig,
|
||||
commands = [],
|
||||
skills = [],
|
||||
subagents = [],
|
||||
previousExtensionConfig,
|
||||
previousCommands = [],
|
||||
previousSkills = [],
|
||||
previousSubagents = [],
|
||||
} = options;
|
||||
const extensionConsent = extensionConsentString(
|
||||
extensionConfig,
|
||||
commands,
|
||||
skills,
|
||||
subagents,
|
||||
);
|
||||
if (previousExtensionConfig) {
|
||||
const previousExtensionConsent = extensionConsentString(
|
||||
previousExtensionConfig,
|
||||
previousCommands,
|
||||
previousSkills,
|
||||
previousSubagents,
|
||||
);
|
||||
if (previousExtensionConsent === extensionConsent) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!(await requestConsent(extensionConsent))) {
|
||||
throw new Error(
|
||||
t('Installation cancelled for "{{name}}".', {
|
||||
name: extensionConfig.name,
|
||||
}),
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -5,21 +5,22 @@
|
||||
*/
|
||||
|
||||
import { type CommandModule } from 'yargs';
|
||||
import { disableExtension } from '../../config/extension.js';
|
||||
import { SettingScope } from '../../config/settings.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface DisableArgs {
|
||||
name: string;
|
||||
scope?: string;
|
||||
}
|
||||
|
||||
export function handleDisable(args: DisableArgs) {
|
||||
export async function handleDisable(args: DisableArgs) {
|
||||
const extensionManager = await getExtensionManager();
|
||||
try {
|
||||
if (args.scope?.toLowerCase() === 'workspace') {
|
||||
disableExtension(args.name, SettingScope.Workspace);
|
||||
extensionManager.disableExtension(args.name, SettingScope.Workspace);
|
||||
} else {
|
||||
disableExtension(args.name, SettingScope.User);
|
||||
extensionManager.disableExtension(args.name, SettingScope.User);
|
||||
}
|
||||
console.log(
|
||||
`Extension "${args.name}" successfully disabled for scope "${args.scope}".`,
|
||||
@@ -61,8 +62,8 @@ export const disableCommand: CommandModule = {
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
handler: (argv) => {
|
||||
handleDisable({
|
||||
handler: async (argv) => {
|
||||
await handleDisable({
|
||||
name: argv['name'] as string,
|
||||
scope: argv['scope'] as string,
|
||||
});
|
||||
|
||||
@@ -6,20 +6,22 @@
|
||||
|
||||
import { type CommandModule } from 'yargs';
|
||||
import { FatalConfigError, getErrorMessage } from '@qwen-code/qwen-code-core';
|
||||
import { enableExtension } from '../../config/extension.js';
|
||||
import { SettingScope } from '../../config/settings.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface EnableArgs {
|
||||
name: string;
|
||||
scope?: string;
|
||||
}
|
||||
|
||||
export function handleEnable(args: EnableArgs) {
|
||||
export async function handleEnable(args: EnableArgs) {
|
||||
const extensionManager = await getExtensionManager();
|
||||
|
||||
try {
|
||||
if (args.scope?.toLowerCase() === 'workspace') {
|
||||
enableExtension(args.name, SettingScope.Workspace);
|
||||
extensionManager.enableExtension(args.name, SettingScope.Workspace);
|
||||
} else {
|
||||
enableExtension(args.name, SettingScope.User);
|
||||
extensionManager.enableExtension(args.name, SettingScope.User);
|
||||
}
|
||||
if (args.scope) {
|
||||
console.log(
|
||||
@@ -66,8 +68,8 @@ export const enableCommand: CommandModule = {
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
handler: (argv) => {
|
||||
handleEnable({
|
||||
handler: async (argv) => {
|
||||
await handleEnable({
|
||||
name: argv['name'] as string,
|
||||
scope: argv['scope'] as string,
|
||||
});
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
prompt = """
|
||||
Please summarize the findings for the pattern `{{args}}`.
|
||||
|
||||
Search Results:
|
||||
!{grep -r {{args}} .}
|
||||
"""
|
||||
@@ -5,58 +5,67 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
|
||||
import {
|
||||
installExtension,
|
||||
requestConsentNonInteractive,
|
||||
} from '../../config/extension.js';
|
||||
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
|
||||
ExtensionManager,
|
||||
parseInstallSource,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { stat } from 'node:fs/promises';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import {
|
||||
requestConsentOrFail,
|
||||
requestConsentNonInteractive,
|
||||
} from './consent.js';
|
||||
|
||||
interface InstallArgs {
|
||||
source: string;
|
||||
ref?: string;
|
||||
autoUpdate?: boolean;
|
||||
allowPreRelease?: boolean;
|
||||
consent?: boolean;
|
||||
}
|
||||
|
||||
export async function handleInstall(args: InstallArgs) {
|
||||
try {
|
||||
let installMetadata: ExtensionInstallMetadata;
|
||||
const { source } = args;
|
||||
const installMetadata = await parseInstallSource(args.source);
|
||||
|
||||
if (
|
||||
source.startsWith('http://') ||
|
||||
source.startsWith('https://') ||
|
||||
source.startsWith('git@') ||
|
||||
source.startsWith('sso://')
|
||||
installMetadata.type !== 'git' &&
|
||||
installMetadata.type !== 'github-release'
|
||||
) {
|
||||
installMetadata = {
|
||||
source,
|
||||
type: 'git',
|
||||
ref: args.ref,
|
||||
autoUpdate: args.autoUpdate,
|
||||
};
|
||||
} else {
|
||||
if (args.ref || args.autoUpdate) {
|
||||
throw new Error(
|
||||
'--ref and --auto-update are not applicable for local extensions.',
|
||||
'--ref and --auto-update are not applicable for marketplace extensions.',
|
||||
);
|
||||
}
|
||||
try {
|
||||
await stat(source);
|
||||
installMetadata = {
|
||||
source,
|
||||
type: 'local',
|
||||
};
|
||||
} catch {
|
||||
throw new Error('Install source not found.');
|
||||
}
|
||||
}
|
||||
|
||||
const name = await installExtension(
|
||||
installMetadata,
|
||||
requestConsentNonInteractive,
|
||||
const requestConsent = args.consent
|
||||
? () => Promise.resolve()
|
||||
: requestConsentOrFail.bind(null, requestConsentNonInteractive);
|
||||
const workspaceDir = process.cwd();
|
||||
const extensionManager = new ExtensionManager({
|
||||
workspaceDir,
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(
|
||||
loadSettings(workspaceDir).merged,
|
||||
),
|
||||
requestConsent,
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
|
||||
const extension = await extensionManager.installExtension(
|
||||
{
|
||||
...installMetadata,
|
||||
ref: args.ref,
|
||||
autoUpdate: args.autoUpdate,
|
||||
allowPreRelease: args.allowPreRelease,
|
||||
},
|
||||
requestConsent,
|
||||
);
|
||||
console.log(
|
||||
`Extension "${extension.name}" installed successfully and enabled.`,
|
||||
);
|
||||
console.log(`Extension "${name}" installed successfully and enabled.`);
|
||||
} catch (error) {
|
||||
console.error(getErrorMessage(error));
|
||||
process.exit(1);
|
||||
@@ -65,11 +74,13 @@ export async function handleInstall(args: InstallArgs) {
|
||||
|
||||
export const installCommand: CommandModule = {
|
||||
command: 'install <source>',
|
||||
describe: 'Installs an extension from a git repository URL or a local path.',
|
||||
describe:
|
||||
'Installs an extension from a git repository URL, local path, or claude marketplace (marketplace-url:plugin-name).',
|
||||
builder: (yargs) =>
|
||||
yargs
|
||||
.positional('source', {
|
||||
describe: 'The github URL or local path of the extension to install.',
|
||||
describe:
|
||||
'The github URL, local path, or marketplace source (marketplace-url:plugin-name) of the extension to install.',
|
||||
type: 'string',
|
||||
demandOption: true,
|
||||
})
|
||||
@@ -81,6 +92,16 @@ export const installCommand: CommandModule = {
|
||||
describe: 'Enable auto-update for this extension.',
|
||||
type: 'boolean',
|
||||
})
|
||||
.option('pre-release', {
|
||||
describe: 'Enable pre-release versions for this extension.',
|
||||
type: 'boolean',
|
||||
})
|
||||
.option('consent', {
|
||||
describe:
|
||||
'Acknowledge the security risks of installing an extension and skip the confirmation prompt.',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
})
|
||||
.check((argv) => {
|
||||
if (!argv.source) {
|
||||
throw new Error('The source argument must be provided.');
|
||||
@@ -92,6 +113,8 @@ export const installCommand: CommandModule = {
|
||||
source: argv['source'] as string,
|
||||
ref: argv['ref'] as string | undefined,
|
||||
autoUpdate: argv['auto-update'] as boolean | undefined,
|
||||
allowPreRelease: argv['pre-release'] as boolean | undefined,
|
||||
consent: argv['consent'] as boolean | undefined,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
@@ -5,13 +5,13 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import {
|
||||
installExtension,
|
||||
requestConsentNonInteractive,
|
||||
} from '../../config/extension.js';
|
||||
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
|
||||
|
||||
import { type ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import {
|
||||
requestConsentNonInteractive,
|
||||
requestConsentOrFail,
|
||||
} from './consent.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface InstallArgs {
|
||||
path: string;
|
||||
@@ -23,12 +23,14 @@ export async function handleLink(args: InstallArgs) {
|
||||
source: args.path,
|
||||
type: 'link',
|
||||
};
|
||||
const extensionName = await installExtension(
|
||||
const extensionManager = await getExtensionManager();
|
||||
|
||||
const extension = await extensionManager.installExtension(
|
||||
installMetadata,
|
||||
requestConsentNonInteractive,
|
||||
requestConsentOrFail.bind(null, requestConsentNonInteractive),
|
||||
);
|
||||
console.log(
|
||||
`Extension "${extensionName}" linked successfully and enabled.`,
|
||||
`Extension "${extension.name}" linked successfully and enabled.`,
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(getErrorMessage(error));
|
||||
|
||||
@@ -5,19 +5,23 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import { loadUserExtensions, toOutputString } from '../../config/extension.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
export async function handleList() {
|
||||
try {
|
||||
const extensions = loadUserExtensions();
|
||||
const extensionManager = await getExtensionManager();
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
|
||||
if (extensions.length === 0) {
|
||||
console.log('No extensions installed.');
|
||||
return;
|
||||
}
|
||||
console.log(
|
||||
extensions
|
||||
.map((extension, _): string => toOutputString(extension, process.cwd()))
|
||||
.map((extension, _): string =>
|
||||
extensionManager.toOutputString(extension, process.cwd()),
|
||||
)
|
||||
.join('\n\n'),
|
||||
);
|
||||
} catch (error) {
|
||||
|
||||
@@ -5,8 +5,14 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import { uninstallExtension } from '../../config/extension.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { ExtensionManager } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
requestConsentNonInteractive,
|
||||
requestConsentOrFail,
|
||||
} from './consent.js';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
|
||||
interface UninstallArgs {
|
||||
name: string; // can be extension name or source URL.
|
||||
@@ -14,7 +20,19 @@ interface UninstallArgs {
|
||||
|
||||
export async function handleUninstall(args: UninstallArgs) {
|
||||
try {
|
||||
await uninstallExtension(args.name);
|
||||
const workspaceDir = process.cwd();
|
||||
const extensionManager = new ExtensionManager({
|
||||
workspaceDir,
|
||||
requestConsent: requestConsentOrFail.bind(
|
||||
null,
|
||||
requestConsentNonInteractive,
|
||||
),
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(
|
||||
loadSettings(workspaceDir).merged,
|
||||
),
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
await extensionManager.uninstallExtension(args.name, false);
|
||||
console.log(`Extension "${args.name}" successfully uninstalled.`);
|
||||
} catch (error) {
|
||||
console.error(getErrorMessage(error));
|
||||
|
||||
@@ -5,22 +5,13 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import {
|
||||
loadExtensions,
|
||||
annotateActiveExtensions,
|
||||
ExtensionStorage,
|
||||
requestConsentNonInteractive,
|
||||
} from '../../config/extension.js';
|
||||
import {
|
||||
updateAllUpdatableExtensions,
|
||||
type ExtensionUpdateInfo,
|
||||
checkForAllExtensionUpdates,
|
||||
updateExtension,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { checkForExtensionUpdate } from '../../config/extensions/github.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
|
||||
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
|
||||
import {
|
||||
checkForExtensionUpdate,
|
||||
type ExtensionUpdateInfo,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface UpdateArgs {
|
||||
name?: string;
|
||||
@@ -31,19 +22,9 @@ const updateOutput = (info: ExtensionUpdateInfo) =>
|
||||
`Extension "${info.name}" successfully updated: ${info.originalVersion} → ${info.updatedVersion}.`;
|
||||
|
||||
export async function handleUpdate(args: UpdateArgs) {
|
||||
const workingDir = process.cwd();
|
||||
const extensionEnablementManager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
// Force enable named extensions, otherwise we will only update the enabled
|
||||
// ones.
|
||||
args.name ? [args.name] : [],
|
||||
);
|
||||
const allExtensions = loadExtensions(extensionEnablementManager);
|
||||
const extensions = annotateActiveExtensions(
|
||||
allExtensions,
|
||||
workingDir,
|
||||
extensionEnablementManager,
|
||||
);
|
||||
const extensionManager = await getExtensionManager();
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
|
||||
if (args.name) {
|
||||
try {
|
||||
const extension = extensions.find(
|
||||
@@ -53,25 +34,23 @@ export async function handleUpdate(args: UpdateArgs) {
|
||||
console.log(`Extension "${args.name}" not found.`);
|
||||
return;
|
||||
}
|
||||
let updateState: ExtensionUpdateState | undefined;
|
||||
if (!extension.installMetadata) {
|
||||
console.log(
|
||||
`Unable to install extension "${args.name}" due to missing install metadata`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
await checkForExtensionUpdate(extension, (newState) => {
|
||||
updateState = newState;
|
||||
});
|
||||
const updateState = await checkForExtensionUpdate(
|
||||
extension,
|
||||
extensionManager,
|
||||
);
|
||||
if (updateState !== ExtensionUpdateState.UPDATE_AVAILABLE) {
|
||||
console.log(`Extension "${args.name}" is already up to date.`);
|
||||
return;
|
||||
}
|
||||
// TODO(chrstnb): we should list extensions if the requested extension is not installed.
|
||||
const updatedExtensionInfo = (await updateExtension(
|
||||
const updatedExtensionInfo = (await extensionManager.updateExtension(
|
||||
extension,
|
||||
workingDir,
|
||||
requestConsentNonInteractive,
|
||||
updateState,
|
||||
() => {},
|
||||
))!;
|
||||
@@ -92,18 +71,15 @@ export async function handleUpdate(args: UpdateArgs) {
|
||||
if (args.all) {
|
||||
try {
|
||||
const extensionState = new Map();
|
||||
await checkForAllExtensionUpdates(extensions, (action) => {
|
||||
if (action.type === 'SET_STATE') {
|
||||
extensionState.set(action.payload.name, {
|
||||
status: action.payload.state,
|
||||
await extensionManager.checkForAllExtensionUpdates(
|
||||
(extensionName, state) => {
|
||||
extensionState.set(extensionName, {
|
||||
status: state,
|
||||
processed: true, // No need to process as we will force the update.
|
||||
});
|
||||
}
|
||||
});
|
||||
let updateInfos = await updateAllUpdatableExtensions(
|
||||
workingDir,
|
||||
requestConsentNonInteractive,
|
||||
extensions,
|
||||
},
|
||||
);
|
||||
let updateInfos = await extensionManager.updateAllUpdatableExtensions(
|
||||
extensionState,
|
||||
() => {},
|
||||
);
|
||||
|
||||
27
packages/cli/src/commands/extensions/utils.ts
Normal file
27
packages/cli/src/commands/extensions/utils.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { ExtensionManager } from '@qwen-code/qwen-code-core';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import {
|
||||
requestConsentOrFail,
|
||||
requestConsentNonInteractive,
|
||||
} from './consent.js';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
|
||||
export async function getExtensionManager(): Promise<ExtensionManager> {
|
||||
const workspaceDir = process.cwd();
|
||||
const extensionManager = new ExtensionManager({
|
||||
workspaceDir,
|
||||
requestConsent: requestConsentOrFail.bind(
|
||||
null,
|
||||
requestConsentNonInteractive,
|
||||
),
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(loadSettings(workspaceDir).merged),
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
return extensionManager;
|
||||
}
|
||||
@@ -7,7 +7,8 @@
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { listMcpServers } from './list.js';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
|
||||
import { loadExtensions } from '../../config/extension.js';
|
||||
import { ExtensionStorage } from '../../config/extensions/storage.js';
|
||||
import { createTransport } from '@qwen-code/qwen-code-core';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
|
||||
|
||||
@@ -8,10 +8,13 @@
|
||||
import type { CommandModule } from 'yargs';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import type { MCPServerConfig } from '@qwen-code/qwen-code-core';
|
||||
import { MCPServerStatus, createTransport } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
MCPServerStatus,
|
||||
createTransport,
|
||||
ExtensionManager,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
|
||||
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
|
||||
const COLOR_GREEN = '\u001b[32m';
|
||||
const COLOR_YELLOW = '\u001b[33m';
|
||||
@@ -22,22 +25,27 @@ async function getMcpServersFromConfig(): Promise<
|
||||
Record<string, MCPServerConfig>
|
||||
> {
|
||||
const settings = loadSettings();
|
||||
const extensions = loadExtensions(
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
);
|
||||
const extensionManager = new ExtensionManager({
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(settings.merged),
|
||||
telemetrySettings: settings.merged.telemetry,
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
const mcpServers = { ...(settings.merged.mcpServers || {}) };
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) {
|
||||
return;
|
||||
}
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
if (extension.isActive) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) {
|
||||
return;
|
||||
}
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
@@ -16,7 +16,8 @@ import {
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { loadCliConfig, parseArguments, type CliArgs } from './config.js';
|
||||
import type { Settings } from './settings.js';
|
||||
import { ExtensionStorage, type Extension } from './extension.js';
|
||||
import type { Extension } from './extension.js';
|
||||
import { ExtensionStorage } from './extensions/storage.js';
|
||||
import * as ServerConfig from '@qwen-code/qwen-code-core';
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
AuthType,
|
||||
Config,
|
||||
DEFAULT_QWEN_EMBEDDING_MODEL,
|
||||
DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
|
||||
FileDiscoveryService,
|
||||
getCurrentGeminiMdFilename,
|
||||
loadServerHierarchicalMemory,
|
||||
@@ -23,7 +22,6 @@ import {
|
||||
SessionService,
|
||||
type ResumedSessionData,
|
||||
type FileFilteringOptions,
|
||||
type MCPServerConfig,
|
||||
type ToolName,
|
||||
EditTool,
|
||||
ShellTool,
|
||||
@@ -43,14 +41,11 @@ import { homedir } from 'node:os';
|
||||
|
||||
import { resolvePath } from '../utils/resolvePath.js';
|
||||
import { getCliVersion } from '../utils/version.js';
|
||||
import type { Extension } from './extension.js';
|
||||
import { annotateActiveExtensions } from './extension.js';
|
||||
import { loadSandboxConfig } from './sandboxConfig.js';
|
||||
import { appEvents } from '../utils/events.js';
|
||||
import { mcpCommand } from '../commands/mcp.js';
|
||||
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import type { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
|
||||
import { buildWebSearchConfig } from './webSearch.js';
|
||||
|
||||
// Simple console logger for now - replace with actual logger if available
|
||||
@@ -560,11 +555,9 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
|
||||
}),
|
||||
)
|
||||
// Register MCP subcommands
|
||||
.command(mcpCommand);
|
||||
|
||||
if (settings?.experimental?.extensionManagement ?? true) {
|
||||
yargsInstance.command(extensionsCommand);
|
||||
}
|
||||
.command(mcpCommand)
|
||||
// Register Extension subcommands
|
||||
.command(extensionsCommand);
|
||||
|
||||
yargsInstance
|
||||
.version(await getCliVersion()) // This will enable the --version flag based on package.json
|
||||
@@ -639,11 +632,11 @@ export async function loadHierarchicalGeminiMemory(
|
||||
includeDirectoriesToReadGemini: readonly string[] = [],
|
||||
debugMode: boolean,
|
||||
fileService: FileDiscoveryService,
|
||||
settings: Settings,
|
||||
extensionContextFilePaths: string[] = [],
|
||||
folderTrust: boolean,
|
||||
memoryImportFormat: 'flat' | 'tree' = 'tree',
|
||||
fileFilteringOptions?: FileFilteringOptions,
|
||||
maxDirs: number = 200,
|
||||
): Promise<{ memoryContent: string; fileCount: number }> {
|
||||
// FIX: Use real, canonical paths for a reliable comparison to handle symlinks.
|
||||
const realCwd = fs.realpathSync(path.resolve(currentWorkingDirectory));
|
||||
@@ -670,7 +663,7 @@ export async function loadHierarchicalGeminiMemory(
|
||||
folderTrust,
|
||||
memoryImportFormat,
|
||||
fileFilteringOptions,
|
||||
settings.context?.discoveryMaxDirs,
|
||||
maxDirs,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -685,30 +678,17 @@ export function isDebugMode(argv: CliArgs): boolean {
|
||||
|
||||
export async function loadCliConfig(
|
||||
settings: Settings,
|
||||
extensions: Extension[],
|
||||
extensionEnablementManager: ExtensionEnablementManager,
|
||||
argv: CliArgs,
|
||||
cwd: string = process.cwd(),
|
||||
overrideExtensions?: string[],
|
||||
): Promise<Config> {
|
||||
const debugMode = isDebugMode(argv);
|
||||
|
||||
const memoryImportFormat = settings.context?.importFormat || 'tree';
|
||||
|
||||
const ideMode = settings.ide?.enabled ?? false;
|
||||
|
||||
const folderTrust = settings.security?.folderTrust?.enabled ?? false;
|
||||
const trustedFolder = isWorkspaceTrusted(settings)?.isTrusted ?? true;
|
||||
|
||||
const allExtensions = annotateActiveExtensions(
|
||||
extensions,
|
||||
cwd,
|
||||
extensionEnablementManager,
|
||||
);
|
||||
|
||||
const activeExtensions = extensions.filter(
|
||||
(_, i) => allExtensions[i].isActive,
|
||||
);
|
||||
|
||||
// Set the context filename in the server's memoryTool module BEFORE loading memory
|
||||
// TODO(b/343434939): This is a bit of a hack. The contextFileName should ideally be passed
|
||||
// directly to the Config constructor in core, and have core handle setGeminiMdFilename.
|
||||
@@ -720,51 +700,27 @@ export async function loadCliConfig(
|
||||
setServerGeminiMdFilename(getCurrentGeminiMdFilename());
|
||||
}
|
||||
|
||||
const extensionContextFilePaths = activeExtensions.flatMap(
|
||||
(e) => e.contextFiles,
|
||||
);
|
||||
|
||||
// Automatically load output-language.md if it exists
|
||||
const outputLanguageFilePath = path.join(
|
||||
let outputLanguageFilePath: string | undefined = path.join(
|
||||
Storage.getGlobalQwenDir(),
|
||||
'output-language.md',
|
||||
);
|
||||
if (fs.existsSync(outputLanguageFilePath)) {
|
||||
extensionContextFilePaths.push(outputLanguageFilePath);
|
||||
if (debugMode) {
|
||||
logger.debug(
|
||||
`Found output-language.md, adding to context files: ${outputLanguageFilePath}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
outputLanguageFilePath = undefined;
|
||||
}
|
||||
|
||||
const fileService = new FileDiscoveryService(cwd);
|
||||
|
||||
const fileFiltering = {
|
||||
...DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
|
||||
...settings.context?.fileFiltering,
|
||||
};
|
||||
|
||||
const includeDirectories = (settings.context?.includeDirectories || [])
|
||||
.map(resolvePath)
|
||||
.concat((argv.includeDirectories || []).map(resolvePath));
|
||||
|
||||
// Call the (now wrapper) loadHierarchicalGeminiMemory which calls the server's version
|
||||
const { memoryContent, fileCount } = await loadHierarchicalGeminiMemory(
|
||||
cwd,
|
||||
settings.context?.loadMemoryFromIncludeDirectories
|
||||
? includeDirectories
|
||||
: [],
|
||||
debugMode,
|
||||
fileService,
|
||||
settings,
|
||||
extensionContextFilePaths,
|
||||
trustedFolder,
|
||||
memoryImportFormat,
|
||||
fileFiltering,
|
||||
);
|
||||
|
||||
let mcpServers = mergeMcpServers(settings, activeExtensions);
|
||||
const question = argv.promptInteractive || argv.prompt || '';
|
||||
const inputFormat: InputFormat =
|
||||
(argv.inputFormat as InputFormat | undefined) ?? InputFormat.TEXT;
|
||||
@@ -902,38 +858,18 @@ export async function loadCliConfig(
|
||||
|
||||
const excludeTools = mergeExcludeTools(
|
||||
settings,
|
||||
activeExtensions,
|
||||
extraExcludes.length > 0 ? extraExcludes : undefined,
|
||||
argv.excludeTools,
|
||||
);
|
||||
const blockedMcpServers: Array<{ name: string; extensionName: string }> = [];
|
||||
|
||||
if (!argv.allowedMcpServerNames) {
|
||||
if (settings.mcp?.allowed) {
|
||||
mcpServers = allowedMcpServers(
|
||||
mcpServers,
|
||||
settings.mcp.allowed,
|
||||
blockedMcpServers,
|
||||
);
|
||||
}
|
||||
|
||||
if (settings.mcp?.excluded) {
|
||||
const excludedNames = new Set(settings.mcp.excluded.filter(Boolean));
|
||||
if (excludedNames.size > 0) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(([key]) => !excludedNames.has(key)),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (argv.allowedMcpServerNames) {
|
||||
mcpServers = allowedMcpServers(
|
||||
mcpServers,
|
||||
argv.allowedMcpServerNames,
|
||||
blockedMcpServers,
|
||||
);
|
||||
}
|
||||
const allowedMcpServers = argv.allowedMcpServerNames
|
||||
? new Set(argv.allowedMcpServerNames.filter(Boolean))
|
||||
: settings.mcp?.allowed
|
||||
? new Set(settings.mcp.allowed.filter(Boolean))
|
||||
: undefined;
|
||||
const excludedMcpServers = settings.mcp?.excluded
|
||||
? new Set(settings.mcp.excluded.filter(Boolean))
|
||||
: undefined;
|
||||
|
||||
const selectedAuthType =
|
||||
(argv.authType as AuthType | undefined) ||
|
||||
@@ -1000,6 +936,8 @@ export async function loadCliConfig(
|
||||
includeDirectories,
|
||||
loadMemoryFromIncludeDirectories:
|
||||
settings.context?.loadMemoryFromIncludeDirectories || false,
|
||||
importFormat: settings.context?.importFormat || 'tree',
|
||||
discoveryMaxDirs: settings.context?.discoveryMaxDirs || 200,
|
||||
debugMode,
|
||||
question,
|
||||
fullContext: argv.allFiles || false,
|
||||
@@ -1009,9 +947,13 @@ export async function loadCliConfig(
|
||||
toolDiscoveryCommand: settings.tools?.discoveryCommand,
|
||||
toolCallCommand: settings.tools?.callCommand,
|
||||
mcpServerCommand: settings.mcp?.serverCommand,
|
||||
mcpServers,
|
||||
userMemory: memoryContent,
|
||||
geminiMdFileCount: fileCount,
|
||||
mcpServers: settings.mcpServers || {},
|
||||
allowedMcpServers: allowedMcpServers
|
||||
? Array.from(allowedMcpServers)
|
||||
: undefined,
|
||||
excludedMcpServers: excludedMcpServers
|
||||
? Array.from(excludedMcpServers)
|
||||
: undefined,
|
||||
approvalMode,
|
||||
showMemoryUsage:
|
||||
argv.showMemoryUsage || settings.ui?.showMemoryUsage || false,
|
||||
@@ -1034,15 +976,14 @@ export async function loadCliConfig(
|
||||
fileDiscoveryService: fileService,
|
||||
bugCommand: settings.advanced?.bugCommand,
|
||||
model: resolvedModel,
|
||||
extensionContextFilePaths,
|
||||
outputLanguageFilePath,
|
||||
sessionTokenLimit: settings.model?.sessionTokenLimit ?? -1,
|
||||
maxSessionTurns:
|
||||
argv.maxSessionTurns ?? settings.model?.maxSessionTurns ?? -1,
|
||||
experimentalZedIntegration: argv.acp || argv.experimentalAcp || false,
|
||||
experimentalSkills: argv.experimentalSkills || false,
|
||||
listExtensions: argv.listExtensions || false,
|
||||
extensions: allExtensions,
|
||||
blockedMcpServers,
|
||||
overrideExtensions: overrideExtensions || argv.extensions,
|
||||
noBrowser: !!process.env['NO_BROWSER'],
|
||||
authType: selectedAuthType,
|
||||
inputFormat,
|
||||
@@ -1084,61 +1025,8 @@ export async function loadCliConfig(
|
||||
});
|
||||
}
|
||||
|
||||
function allowedMcpServers(
|
||||
mcpServers: { [x: string]: MCPServerConfig },
|
||||
allowMCPServers: string[],
|
||||
blockedMcpServers: Array<{ name: string; extensionName: string }>,
|
||||
) {
|
||||
const allowedNames = new Set(allowMCPServers.filter(Boolean));
|
||||
if (allowedNames.size > 0) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(([key, server]) => {
|
||||
const isAllowed = allowedNames.has(key);
|
||||
if (!isAllowed) {
|
||||
blockedMcpServers.push({
|
||||
name: key,
|
||||
extensionName: server.extensionName || '',
|
||||
});
|
||||
}
|
||||
return isAllowed;
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
blockedMcpServers.push(
|
||||
...Object.entries(mcpServers).map(([key, server]) => ({
|
||||
name: key,
|
||||
extensionName: server.extensionName || '',
|
||||
})),
|
||||
);
|
||||
mcpServers = {};
|
||||
}
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
function mergeMcpServers(settings: Settings, extensions: Extension[]) {
|
||||
const mcpServers = { ...(settings.mcpServers || {}) };
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) {
|
||||
logger.warn(
|
||||
`Skipping extension MCP config for server with key "${key}" as it already exists.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
function mergeExcludeTools(
|
||||
settings: Settings,
|
||||
extensions: Extension[],
|
||||
extraExcludes?: string[] | undefined,
|
||||
cliExcludeTools?: string[] | undefined,
|
||||
): string[] {
|
||||
@@ -1147,10 +1035,5 @@ function mergeExcludeTools(
|
||||
...(settings.tools?.exclude || []),
|
||||
...(extraExcludes || []),
|
||||
]);
|
||||
for (const extension of extensions) {
|
||||
for (const tool of extension.config.excludeTools || []) {
|
||||
allExcludeTools.add(tool);
|
||||
}
|
||||
}
|
||||
return [...allExcludeTools];
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,786 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type {
|
||||
MCPServerConfig,
|
||||
GeminiCLIExtension,
|
||||
ExtensionInstallMetadata,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
QWEN_DIR,
|
||||
Storage,
|
||||
Config,
|
||||
ExtensionInstallEvent,
|
||||
ExtensionUninstallEvent,
|
||||
ExtensionDisableEvent,
|
||||
ExtensionEnableEvent,
|
||||
logExtensionEnable,
|
||||
logExtensionInstallEvent,
|
||||
logExtensionUninstall,
|
||||
logExtensionDisable,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { SettingScope, loadSettings } from '../config/settings.js';
|
||||
import { getErrorMessage } from '../utils/errors.js';
|
||||
import { recursivelyHydrateStrings } from './extensions/variables.js';
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
|
||||
import {
|
||||
cloneFromGit,
|
||||
downloadFromGitHubRelease,
|
||||
} from './extensions/github.js';
|
||||
import type { LoadExtensionContext } from './extensions/variableSchema.js';
|
||||
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
|
||||
import chalk from 'chalk';
|
||||
import type { ConfirmationRequest } from '../ui/types.js';
|
||||
|
||||
export const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
|
||||
|
||||
export const EXTENSIONS_CONFIG_FILENAME = 'qwen-extension.json';
|
||||
export const INSTALL_METADATA_FILENAME = '.qwen-extension-install.json';
|
||||
|
||||
export interface Extension {
|
||||
path: string;
|
||||
config: ExtensionConfig;
|
||||
contextFiles: string[];
|
||||
installMetadata?: ExtensionInstallMetadata | undefined;
|
||||
}
|
||||
|
||||
export interface ExtensionConfig {
|
||||
name: string;
|
||||
version: string;
|
||||
mcpServers?: Record<string, MCPServerConfig>;
|
||||
contextFileName?: string | string[];
|
||||
excludeTools?: string[];
|
||||
}
|
||||
|
||||
export interface ExtensionUpdateInfo {
|
||||
name: string;
|
||||
originalVersion: string;
|
||||
updatedVersion: string;
|
||||
}
|
||||
|
||||
export class ExtensionStorage {
|
||||
private readonly extensionName: string;
|
||||
|
||||
constructor(extensionName: string) {
|
||||
this.extensionName = extensionName;
|
||||
}
|
||||
|
||||
getExtensionDir(): string {
|
||||
return path.join(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
this.extensionName,
|
||||
);
|
||||
}
|
||||
|
||||
getConfigPath(): string {
|
||||
return path.join(this.getExtensionDir(), EXTENSIONS_CONFIG_FILENAME);
|
||||
}
|
||||
|
||||
static getUserExtensionsDir(): string {
|
||||
const storage = new Storage(os.homedir());
|
||||
return storage.getExtensionsDir();
|
||||
}
|
||||
|
||||
static async createTmpDir(): Promise<string> {
|
||||
return await fs.promises.mkdtemp(path.join(os.tmpdir(), 'qwen-extension'));
|
||||
}
|
||||
}
|
||||
|
||||
export function getWorkspaceExtensions(workspaceDir: string): Extension[] {
|
||||
// If the workspace dir is the user extensions dir, there are no workspace extensions.
|
||||
if (path.resolve(workspaceDir) === path.resolve(os.homedir())) {
|
||||
return [];
|
||||
}
|
||||
return loadExtensionsFromDir(workspaceDir);
|
||||
}
|
||||
|
||||
export async function copyExtension(
|
||||
source: string,
|
||||
destination: string,
|
||||
): Promise<void> {
|
||||
await fs.promises.cp(source, destination, { recursive: true });
|
||||
}
|
||||
|
||||
export async function performWorkspaceExtensionMigration(
|
||||
extensions: Extension[],
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
): Promise<string[]> {
|
||||
const failedInstallNames: string[] = [];
|
||||
|
||||
for (const extension of extensions) {
|
||||
try {
|
||||
const installMetadata: ExtensionInstallMetadata = {
|
||||
source: extension.path,
|
||||
type: 'local',
|
||||
};
|
||||
await installExtension(installMetadata, requestConsent);
|
||||
} catch (_) {
|
||||
failedInstallNames.push(extension.config.name);
|
||||
}
|
||||
}
|
||||
return failedInstallNames;
|
||||
}
|
||||
|
||||
function getTelemetryConfig(cwd: string) {
|
||||
const settings = loadSettings(cwd);
|
||||
const config = new Config({
|
||||
telemetry: settings.merged.telemetry,
|
||||
interactive: false,
|
||||
targetDir: cwd,
|
||||
cwd,
|
||||
model: '',
|
||||
debugMode: false,
|
||||
});
|
||||
return config;
|
||||
}
|
||||
|
||||
export function loadExtensions(
|
||||
extensionEnablementManager: ExtensionEnablementManager,
|
||||
workspaceDir: string = process.cwd(),
|
||||
): Extension[] {
|
||||
const settings = loadSettings(workspaceDir).merged;
|
||||
const allExtensions = [...loadUserExtensions()];
|
||||
|
||||
if (
|
||||
(isWorkspaceTrusted(settings) ?? true) &&
|
||||
// Default management setting to true
|
||||
!(settings.experimental?.extensionManagement ?? true)
|
||||
) {
|
||||
allExtensions.push(...getWorkspaceExtensions(workspaceDir));
|
||||
}
|
||||
|
||||
const uniqueExtensions = new Map<string, Extension>();
|
||||
|
||||
for (const extension of allExtensions) {
|
||||
if (
|
||||
!uniqueExtensions.has(extension.config.name) &&
|
||||
extensionEnablementManager.isEnabled(extension.config.name, workspaceDir)
|
||||
) {
|
||||
uniqueExtensions.set(extension.config.name, extension);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(uniqueExtensions.values());
|
||||
}
|
||||
|
||||
export function loadUserExtensions(): Extension[] {
|
||||
const userExtensions = loadExtensionsFromDir(os.homedir());
|
||||
|
||||
const uniqueExtensions = new Map<string, Extension>();
|
||||
for (const extension of userExtensions) {
|
||||
if (!uniqueExtensions.has(extension.config.name)) {
|
||||
uniqueExtensions.set(extension.config.name, extension);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(uniqueExtensions.values());
|
||||
}
|
||||
|
||||
export function loadExtensionsFromDir(dir: string): Extension[] {
|
||||
const storage = new Storage(dir);
|
||||
const extensionsDir = storage.getExtensionsDir();
|
||||
if (!fs.existsSync(extensionsDir)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const extensions: Extension[] = [];
|
||||
for (const subdir of fs.readdirSync(extensionsDir)) {
|
||||
const extensionDir = path.join(extensionsDir, subdir);
|
||||
|
||||
const extension = loadExtension({ extensionDir, workspaceDir: dir });
|
||||
if (extension != null) {
|
||||
extensions.push(extension);
|
||||
}
|
||||
}
|
||||
return extensions;
|
||||
}
|
||||
|
||||
export function loadExtension(context: LoadExtensionContext): Extension | null {
|
||||
const { extensionDir, workspaceDir } = context;
|
||||
if (!fs.statSync(extensionDir).isDirectory()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const installMetadata = loadInstallMetadata(extensionDir);
|
||||
let effectiveExtensionPath = extensionDir;
|
||||
|
||||
if (installMetadata?.type === 'link') {
|
||||
effectiveExtensionPath = installMetadata.source;
|
||||
}
|
||||
|
||||
try {
|
||||
let config = loadExtensionConfig({
|
||||
extensionDir: effectiveExtensionPath,
|
||||
workspaceDir,
|
||||
});
|
||||
|
||||
config = resolveEnvVarsInObject(config);
|
||||
|
||||
if (config.mcpServers) {
|
||||
config.mcpServers = Object.fromEntries(
|
||||
Object.entries(config.mcpServers).map(([key, value]) => [
|
||||
key,
|
||||
filterMcpConfig(value),
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
const contextFiles = getContextFileNames(config)
|
||||
.map((contextFileName) =>
|
||||
path.join(effectiveExtensionPath, contextFileName),
|
||||
)
|
||||
.filter((contextFilePath) => fs.existsSync(contextFilePath));
|
||||
|
||||
return {
|
||||
path: effectiveExtensionPath,
|
||||
config,
|
||||
contextFiles,
|
||||
installMetadata,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(
|
||||
`Warning: Skipping extension in ${effectiveExtensionPath}: ${getErrorMessage(
|
||||
e,
|
||||
)}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function loadExtensionByName(
|
||||
name: string,
|
||||
workspaceDir: string = process.cwd(),
|
||||
): Extension | null {
|
||||
const userExtensionsDir = ExtensionStorage.getUserExtensionsDir();
|
||||
if (!fs.existsSync(userExtensionsDir)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (const subdir of fs.readdirSync(userExtensionsDir)) {
|
||||
const extensionDir = path.join(userExtensionsDir, subdir);
|
||||
if (!fs.statSync(extensionDir).isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const extension = loadExtension({ extensionDir, workspaceDir });
|
||||
if (
|
||||
extension &&
|
||||
extension.config.name.toLowerCase() === name.toLowerCase()
|
||||
) {
|
||||
return extension;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function filterMcpConfig(original: MCPServerConfig): MCPServerConfig {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { trust, ...rest } = original;
|
||||
return Object.freeze(rest);
|
||||
}
|
||||
|
||||
export function loadInstallMetadata(
|
||||
extensionDir: string,
|
||||
): ExtensionInstallMetadata | undefined {
|
||||
const metadataFilePath = path.join(extensionDir, INSTALL_METADATA_FILENAME);
|
||||
try {
|
||||
const configContent = fs.readFileSync(metadataFilePath, 'utf-8');
|
||||
const metadata = JSON.parse(configContent) as ExtensionInstallMetadata;
|
||||
return metadata;
|
||||
} catch (_e) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function getContextFileNames(config: ExtensionConfig): string[] {
|
||||
if (!config.contextFileName) {
|
||||
return ['QWEN.md'];
|
||||
} else if (!Array.isArray(config.contextFileName)) {
|
||||
return [config.contextFileName];
|
||||
}
|
||||
return config.contextFileName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an annotated list of extensions. If an extension is listed in enabledExtensionNames, it will be active.
|
||||
* If enabledExtensionNames is empty, an extension is active unless it is disabled.
|
||||
* @param extensions The base list of extensions.
|
||||
* @param enabledExtensionNames The names of explicitly enabled extensions.
|
||||
* @param workspaceDir The current workspace directory.
|
||||
*/
|
||||
export function annotateActiveExtensions(
|
||||
extensions: Extension[],
|
||||
workspaceDir: string,
|
||||
manager: ExtensionEnablementManager,
|
||||
): GeminiCLIExtension[] {
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
return extensions.map((extension) => ({
|
||||
name: extension.config.name,
|
||||
version: extension.config.version,
|
||||
isActive: manager.isEnabled(extension.config.name, workspaceDir),
|
||||
path: extension.path,
|
||||
installMetadata: extension.installMetadata,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, by reading a Y/n
|
||||
* character from stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentNonInteractive(
|
||||
consentDescription: string,
|
||||
): Promise<boolean> {
|
||||
console.info(consentDescription);
|
||||
const result = await promptForConsentNonInteractive(
|
||||
'Do you want to continue? [Y/n]: ',
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, in interactive mode.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will not work.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @param setExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentInteractive(
|
||||
consentDescription: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return await promptForConsentInteractive(
|
||||
consentDescription + '\n\nDo you want to continue?',
|
||||
addExtensionUpdateConfirmationRequest,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users a prompt and awaits for a y/n response on stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A yes/no prompt to ask the user
|
||||
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
|
||||
*/
|
||||
async function promptForConsentNonInteractive(
|
||||
prompt: string,
|
||||
): Promise<boolean> {
|
||||
const readline = await import('node:readline');
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
rl.question(prompt, (answer) => {
|
||||
rl.close();
|
||||
resolve(['y', ''].includes(answer.trim().toLowerCase()));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users an interactive yes/no prompt.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A markdown prompt to ask the user
|
||||
* @param setExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
|
||||
* @returns Whether or not the user answers yes.
|
||||
*/
|
||||
async function promptForConsentInteractive(
|
||||
prompt: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return await new Promise<boolean>((resolve) => {
|
||||
addExtensionUpdateConfirmationRequest({
|
||||
prompt,
|
||||
onConfirm: (resolvedConfirmed) => {
|
||||
resolve(resolvedConfirmed);
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function installExtension(
|
||||
installMetadata: ExtensionInstallMetadata,
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
cwd: string = process.cwd(),
|
||||
previousExtensionConfig?: ExtensionConfig,
|
||||
): Promise<string> {
|
||||
const telemetryConfig = getTelemetryConfig(cwd);
|
||||
let newExtensionConfig: ExtensionConfig | null = null;
|
||||
let localSourcePath: string | undefined;
|
||||
|
||||
try {
|
||||
const settings = loadSettings(cwd).merged;
|
||||
if (!isWorkspaceTrusted(settings)) {
|
||||
throw new Error(
|
||||
`Could not install extension from untrusted folder at ${installMetadata.source}`,
|
||||
);
|
||||
}
|
||||
|
||||
const extensionsDir = ExtensionStorage.getUserExtensionsDir();
|
||||
await fs.promises.mkdir(extensionsDir, { recursive: true });
|
||||
|
||||
if (
|
||||
!path.isAbsolute(installMetadata.source) &&
|
||||
(installMetadata.type === 'local' || installMetadata.type === 'link')
|
||||
) {
|
||||
installMetadata.source = path.resolve(cwd, installMetadata.source);
|
||||
}
|
||||
|
||||
let tempDir: string | undefined;
|
||||
|
||||
if (
|
||||
installMetadata.type === 'git' ||
|
||||
installMetadata.type === 'github-release'
|
||||
) {
|
||||
tempDir = await ExtensionStorage.createTmpDir();
|
||||
try {
|
||||
const result = await downloadFromGitHubRelease(
|
||||
installMetadata,
|
||||
tempDir,
|
||||
);
|
||||
installMetadata.type = result.type;
|
||||
installMetadata.releaseTag = result.tagName;
|
||||
} catch (_error) {
|
||||
await cloneFromGit(installMetadata, tempDir);
|
||||
installMetadata.type = 'git';
|
||||
}
|
||||
localSourcePath = tempDir;
|
||||
} else if (
|
||||
installMetadata.type === 'local' ||
|
||||
installMetadata.type === 'link'
|
||||
) {
|
||||
localSourcePath = installMetadata.source;
|
||||
} else {
|
||||
throw new Error(`Unsupported install type: ${installMetadata.type}`);
|
||||
}
|
||||
|
||||
try {
|
||||
newExtensionConfig = loadExtensionConfig({
|
||||
extensionDir: localSourcePath,
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
|
||||
const newExtensionName = newExtensionConfig.name;
|
||||
const extensionStorage = new ExtensionStorage(newExtensionName);
|
||||
const destinationPath = extensionStorage.getExtensionDir();
|
||||
|
||||
const installedExtensions = loadUserExtensions();
|
||||
if (
|
||||
installedExtensions.some(
|
||||
(installed) => installed.config.name === newExtensionName,
|
||||
)
|
||||
) {
|
||||
throw new Error(
|
||||
`Extension "${newExtensionName}" is already installed. Please uninstall it first.`,
|
||||
);
|
||||
}
|
||||
await maybeRequestConsentOrFail(
|
||||
newExtensionConfig,
|
||||
requestConsent,
|
||||
previousExtensionConfig,
|
||||
);
|
||||
await fs.promises.mkdir(destinationPath, { recursive: true });
|
||||
|
||||
if (
|
||||
installMetadata.type === 'local' ||
|
||||
installMetadata.type === 'git' ||
|
||||
installMetadata.type === 'github-release'
|
||||
) {
|
||||
await copyExtension(localSourcePath, destinationPath);
|
||||
}
|
||||
|
||||
const metadataString = JSON.stringify(installMetadata, null, 2);
|
||||
const metadataPath = path.join(
|
||||
destinationPath,
|
||||
INSTALL_METADATA_FILENAME,
|
||||
);
|
||||
await fs.promises.writeFile(metadataPath, metadataString);
|
||||
} finally {
|
||||
if (tempDir) {
|
||||
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
logExtensionInstallEvent(
|
||||
telemetryConfig,
|
||||
new ExtensionInstallEvent(
|
||||
newExtensionConfig!.name,
|
||||
newExtensionConfig!.version,
|
||||
installMetadata.source,
|
||||
'success',
|
||||
),
|
||||
);
|
||||
|
||||
enableExtension(newExtensionConfig!.name, SettingScope.User);
|
||||
return newExtensionConfig!.name;
|
||||
} catch (error) {
|
||||
// Attempt to load config from the source path even if installation fails
|
||||
// to get the name and version for logging.
|
||||
if (!newExtensionConfig && localSourcePath) {
|
||||
try {
|
||||
newExtensionConfig = loadExtensionConfig({
|
||||
extensionDir: localSourcePath,
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
} catch {
|
||||
// Ignore error, this is just for logging.
|
||||
}
|
||||
}
|
||||
logExtensionInstallEvent(
|
||||
telemetryConfig,
|
||||
new ExtensionInstallEvent(
|
||||
newExtensionConfig?.name ?? '',
|
||||
newExtensionConfig?.version ?? '',
|
||||
installMetadata.source,
|
||||
'error',
|
||||
),
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a consent string for installing an extension based on it's
|
||||
* extensionConfig.
|
||||
*/
|
||||
function extensionConsentString(extensionConfig: ExtensionConfig): string {
|
||||
const output: string[] = [];
|
||||
const mcpServerEntries = Object.entries(extensionConfig.mcpServers || {});
|
||||
output.push(`Installing extension "${extensionConfig.name}".`);
|
||||
output.push(
|
||||
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**',
|
||||
);
|
||||
|
||||
if (mcpServerEntries.length) {
|
||||
output.push('This extension will run the following MCP servers:');
|
||||
for (const [key, mcpServer] of mcpServerEntries) {
|
||||
const isLocal = !!mcpServer.command;
|
||||
const source =
|
||||
mcpServer.httpUrl ??
|
||||
`${mcpServer.command || ''}${mcpServer.args ? ' ' + mcpServer.args.join(' ') : ''}`;
|
||||
output.push(` * ${key} (${isLocal ? 'local' : 'remote'}): ${source}`);
|
||||
}
|
||||
}
|
||||
if (extensionConfig.contextFileName) {
|
||||
output.push(
|
||||
`This extension will append info to your QWEN.md context using ${extensionConfig.contextFileName}`,
|
||||
);
|
||||
}
|
||||
if (extensionConfig.excludeTools) {
|
||||
output.push(
|
||||
`This extension will exclude the following core tools: ${extensionConfig.excludeTools}`,
|
||||
);
|
||||
}
|
||||
return output.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to install an extension (extensionConfig), if
|
||||
* there is any difference between the consent string for `extensionConfig` and
|
||||
* `previousExtensionConfig`.
|
||||
*
|
||||
* Always requests consent if previousExtensionConfig is null.
|
||||
*
|
||||
* Throws if the user does not consent.
|
||||
*/
|
||||
async function maybeRequestConsentOrFail(
|
||||
extensionConfig: ExtensionConfig,
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
previousExtensionConfig?: ExtensionConfig,
|
||||
) {
|
||||
const extensionConsent = extensionConsentString(extensionConfig);
|
||||
if (previousExtensionConfig) {
|
||||
const previousExtensionConsent = extensionConsentString(
|
||||
previousExtensionConfig,
|
||||
);
|
||||
if (previousExtensionConsent === extensionConsent) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!(await requestConsent(extensionConsent))) {
|
||||
throw new Error(`Installation cancelled for "${extensionConfig.name}".`);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateName(name: string) {
|
||||
if (!/^[a-zA-Z0-9-]+$/.test(name)) {
|
||||
throw new Error(
|
||||
`Invalid extension name: "${name}". Only letters (a-z, A-Z), numbers (0-9), and dashes (-) are allowed.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function loadExtensionConfig(
|
||||
context: LoadExtensionContext,
|
||||
): ExtensionConfig {
|
||||
const { extensionDir, workspaceDir } = context;
|
||||
const configFilePath = path.join(extensionDir, EXTENSIONS_CONFIG_FILENAME);
|
||||
if (!fs.existsSync(configFilePath)) {
|
||||
throw new Error(`Configuration file not found at ${configFilePath}`);
|
||||
}
|
||||
try {
|
||||
const configContent = fs.readFileSync(configFilePath, 'utf-8');
|
||||
const config = recursivelyHydrateStrings(JSON.parse(configContent), {
|
||||
extensionPath: extensionDir,
|
||||
workspacePath: workspaceDir,
|
||||
'/': path.sep,
|
||||
pathSeparator: path.sep,
|
||||
}) as unknown as ExtensionConfig;
|
||||
if (!config.name || !config.version) {
|
||||
throw new Error(
|
||||
`Invalid configuration in ${configFilePath}: missing ${!config.name ? '"name"' : '"version"'}`,
|
||||
);
|
||||
}
|
||||
validateName(config.name);
|
||||
return config;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Failed to load extension config from ${configFilePath}: ${getErrorMessage(
|
||||
e,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function uninstallExtension(
|
||||
extensionIdentifier: string,
|
||||
cwd: string = process.cwd(),
|
||||
): Promise<void> {
|
||||
const telemetryConfig = getTelemetryConfig(cwd);
|
||||
const installedExtensions = loadUserExtensions();
|
||||
const extensionName = installedExtensions.find(
|
||||
(installed) =>
|
||||
installed.config.name.toLowerCase() ===
|
||||
extensionIdentifier.toLowerCase() ||
|
||||
installed.installMetadata?.source.toLowerCase() ===
|
||||
extensionIdentifier.toLowerCase(),
|
||||
)?.config.name;
|
||||
if (!extensionName) {
|
||||
throw new Error(`Extension not found.`);
|
||||
}
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
[extensionName],
|
||||
);
|
||||
manager.remove(extensionName);
|
||||
const storage = new ExtensionStorage(extensionName);
|
||||
|
||||
await fs.promises.rm(storage.getExtensionDir(), {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
logExtensionUninstall(
|
||||
telemetryConfig,
|
||||
new ExtensionUninstallEvent(extensionName, 'success'),
|
||||
);
|
||||
}
|
||||
|
||||
export function toOutputString(
|
||||
extension: Extension,
|
||||
workspaceDir: string,
|
||||
): string {
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
);
|
||||
const userEnabled = manager.isEnabled(extension.config.name, os.homedir());
|
||||
const workspaceEnabled = manager.isEnabled(
|
||||
extension.config.name,
|
||||
workspaceDir,
|
||||
);
|
||||
|
||||
const status = workspaceEnabled ? chalk.green('✓') : chalk.red('✗');
|
||||
let output = `${status} ${extension.config.name} (${extension.config.version})`;
|
||||
output += `\n Path: ${extension.path}`;
|
||||
if (extension.installMetadata) {
|
||||
output += `\n Source: ${extension.installMetadata.source} (Type: ${extension.installMetadata.type})`;
|
||||
if (extension.installMetadata.ref) {
|
||||
output += `\n Ref: ${extension.installMetadata.ref}`;
|
||||
}
|
||||
if (extension.installMetadata.releaseTag) {
|
||||
output += `\n Release tag: ${extension.installMetadata.releaseTag}`;
|
||||
}
|
||||
}
|
||||
output += `\n Enabled (User): ${userEnabled}`;
|
||||
output += `\n Enabled (Workspace): ${workspaceEnabled}`;
|
||||
if (extension.contextFiles.length > 0) {
|
||||
output += `\n Context files:`;
|
||||
extension.contextFiles.forEach((contextFile) => {
|
||||
output += `\n ${contextFile}`;
|
||||
});
|
||||
}
|
||||
if (extension.config.mcpServers) {
|
||||
output += `\n MCP servers:`;
|
||||
Object.keys(extension.config.mcpServers).forEach((key) => {
|
||||
output += `\n ${key}`;
|
||||
});
|
||||
}
|
||||
if (extension.config.excludeTools) {
|
||||
output += `\n Excluded tools:`;
|
||||
extension.config.excludeTools.forEach((tool) => {
|
||||
output += `\n ${tool}`;
|
||||
});
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
export function disableExtension(
|
||||
name: string,
|
||||
scope: SettingScope,
|
||||
cwd: string = process.cwd(),
|
||||
) {
|
||||
const config = getTelemetryConfig(cwd);
|
||||
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
|
||||
throw new Error('System and SystemDefaults scopes are not supported.');
|
||||
}
|
||||
const extension = loadExtensionByName(name, cwd);
|
||||
if (!extension) {
|
||||
throw new Error(`Extension with name ${name} does not exist.`);
|
||||
}
|
||||
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
[name],
|
||||
);
|
||||
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
|
||||
manager.disable(name, true, scopePath);
|
||||
logExtensionDisable(config, new ExtensionDisableEvent(name, scope));
|
||||
}
|
||||
|
||||
export function enableExtension(
|
||||
name: string,
|
||||
scope: SettingScope,
|
||||
cwd: string = process.cwd(),
|
||||
) {
|
||||
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
|
||||
throw new Error('System and SystemDefaults scopes are not supported.');
|
||||
}
|
||||
const extension = loadExtensionByName(name, cwd);
|
||||
if (!extension) {
|
||||
throw new Error(`Extension with name ${name} does not exist.`);
|
||||
}
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
);
|
||||
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
|
||||
manager.enable(name, true, scopePath);
|
||||
const config = getTelemetryConfig(cwd);
|
||||
logExtensionEnable(config, new ExtensionEnableEvent(name, scope));
|
||||
}
|
||||
@@ -1,424 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
import { ExtensionEnablementManager, Override } from './extensionEnablement.js';
|
||||
import type { Extension } from '../extension.js';
|
||||
|
||||
// Helper to create a temporary directory for testing
|
||||
function createTestDir() {
|
||||
const dirPath = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-test-'));
|
||||
return {
|
||||
path: dirPath,
|
||||
cleanup: () => fs.rmSync(dirPath, { recursive: true, force: true }),
|
||||
};
|
||||
}
|
||||
|
||||
let testDir: { path: string; cleanup: () => void };
|
||||
let configDir: string;
|
||||
let manager: ExtensionEnablementManager;
|
||||
|
||||
describe('ExtensionEnablementManager', () => {
|
||||
beforeEach(() => {
|
||||
testDir = createTestDir();
|
||||
configDir = path.join(testDir.path, '.gemini');
|
||||
manager = new ExtensionEnablementManager(configDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testDir.cleanup();
|
||||
// Reset the singleton instance for test isolation
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(ExtensionEnablementManager as any).instance = undefined;
|
||||
});
|
||||
|
||||
describe('isEnabled', () => {
|
||||
it('should return true if extension is not configured', () => {
|
||||
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true if no overrides match', () => {
|
||||
manager.disable('ext-test', false, '/another/path');
|
||||
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
|
||||
});
|
||||
|
||||
it('should enable a path based on an override rule', () => {
|
||||
manager.disable('ext-test', true, '/');
|
||||
manager.enable('ext-test', true, '/home/user/projects/');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should disable a path based on a disable override rule', () => {
|
||||
manager.enable('ext-test', true, '/');
|
||||
manager.disable('ext-test', true, '/home/user/projects/');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
it('should respect the last matching rule (enable wins)', () => {
|
||||
manager.disable('ext-test', true, '/home/user/projects/');
|
||||
manager.enable('ext-test', false, '/home/user/projects/my-app');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should respect the last matching rule (disable wins)', () => {
|
||||
manager.enable('ext-test', true, '/home/user/projects/');
|
||||
manager.disable('ext-test', false, '/home/user/projects/my-app');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle', () => {
|
||||
manager.enable('ext-test', true, '/home/user/projects');
|
||||
manager.disable('ext-test', false, '/home/user/projects/my-app');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
false,
|
||||
);
|
||||
expect(
|
||||
manager.isEnabled('ext-test', '/home/user/projects/something-else'),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('includeSubdirs', () => {
|
||||
it('should add a glob when enabling with includeSubdirs', () => {
|
||||
manager.enable('ext-test', true, '/path/to/dir');
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should not add a glob when enabling without includeSubdirs', () => {
|
||||
manager.enable('ext-test', false, '/path/to/dir');
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should add a glob when disabling with includeSubdirs', () => {
|
||||
manager.disable('ext-test', true, '/path/to/dir');
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('!/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should remove conflicting glob rule when enabling without subdirs', () => {
|
||||
manager.enable('ext-test', true, '/path/to/dir'); // Adds /path/to/dir*
|
||||
manager.enable('ext-test', false, '/path/to/dir'); // Should remove the glob
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should remove conflicting non-glob rule when enabling with subdirs', () => {
|
||||
manager.enable('ext-test', false, '/path/to/dir'); // Adds /path/to/dir
|
||||
manager.enable('ext-test', true, '/path/to/dir'); // Should remove the non-glob
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/');
|
||||
});
|
||||
|
||||
it('should remove conflicting rules when disabling', () => {
|
||||
manager.enable('ext-test', true, '/path/to/dir'); // enabled with glob
|
||||
manager.disable('ext-test', false, '/path/to/dir'); // disabled without
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('!/path/to/dir/');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should correctly evaluate isEnabled with subdirs', () => {
|
||||
manager.disable('ext-test', true, '/');
|
||||
manager.enable('ext-test', true, '/path/to/dir');
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir/')).toBe(true);
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir/sub/')).toBe(true);
|
||||
expect(manager.isEnabled('ext-test', '/path/to/another/')).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly evaluate isEnabled without subdirs', () => {
|
||||
manager.disable('ext-test', true, '/*');
|
||||
manager.enable('ext-test', false, '/path/to/dir');
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(true);
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir/sub')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('pruning child rules', () => {
|
||||
it('should remove child rules when enabling a parent with subdirs', () => {
|
||||
// Pre-existing rules for children
|
||||
manager.enable('ext-test', false, '/path/to/dir/subdir1');
|
||||
manager.disable('ext-test', true, '/path/to/dir/subdir2');
|
||||
manager.enable('ext-test', false, '/path/to/another/dir');
|
||||
|
||||
// Enable the parent directory
|
||||
manager.enable('ext-test', true, '/path/to/dir');
|
||||
|
||||
const config = manager.readConfig();
|
||||
const overrides = config['ext-test'].overrides;
|
||||
|
||||
// The new parent rule should be present
|
||||
expect(overrides).toContain(`/path/to/dir/*`);
|
||||
|
||||
// Child rules should be removed
|
||||
expect(overrides).not.toContain('/path/to/dir/subdir1/');
|
||||
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
|
||||
|
||||
// Unrelated rules should remain
|
||||
expect(overrides).toContain('/path/to/another/dir/');
|
||||
});
|
||||
|
||||
it('should remove child rules when disabling a parent with subdirs', () => {
|
||||
// Pre-existing rules for children
|
||||
manager.enable('ext-test', false, '/path/to/dir/subdir1');
|
||||
manager.disable('ext-test', true, '/path/to/dir/subdir2');
|
||||
manager.enable('ext-test', false, '/path/to/another/dir');
|
||||
|
||||
// Disable the parent directory
|
||||
manager.disable('ext-test', true, '/path/to/dir');
|
||||
|
||||
const config = manager.readConfig();
|
||||
const overrides = config['ext-test'].overrides;
|
||||
|
||||
// The new parent rule should be present
|
||||
expect(overrides).toContain(`!/path/to/dir/*`);
|
||||
|
||||
// Child rules should be removed
|
||||
expect(overrides).not.toContain('/path/to/dir/subdir1/');
|
||||
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
|
||||
|
||||
// Unrelated rules should remain
|
||||
expect(overrides).toContain('/path/to/another/dir/');
|
||||
});
|
||||
|
||||
it('should not remove child rules if includeSubdirs is false', () => {
|
||||
manager.enable('ext-test', false, '/path/to/dir/subdir1');
|
||||
manager.enable('ext-test', false, '/path/to/dir'); // Not including subdirs
|
||||
|
||||
const config = manager.readConfig();
|
||||
const overrides = config['ext-test'].overrides;
|
||||
|
||||
expect(overrides).toContain('/path/to/dir/subdir1/');
|
||||
expect(overrides).toContain('/path/to/dir/');
|
||||
});
|
||||
});
|
||||
|
||||
it('should enable a path based on an enable override', () => {
|
||||
manager.disable('ext-test', true, '/Users/chrstn');
|
||||
manager.enable('ext-test', true, '/Users/chrstn/gemini-cli');
|
||||
|
||||
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should ignore subdirs', () => {
|
||||
manager.disable('ext-test', false, '/Users/chrstn');
|
||||
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
describe('extension overrides (-e <name>)', () => {
|
||||
beforeEach(() => {
|
||||
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
|
||||
});
|
||||
|
||||
it('can enable extensions, case-insensitive', () => {
|
||||
manager.disable('ext-test', true, '/');
|
||||
expect(manager.isEnabled('ext-test', '/')).toBe(true);
|
||||
expect(manager.isEnabled('Ext-Test', '/')).toBe(true);
|
||||
// Double check that it would have been disabled otherwise
|
||||
expect(
|
||||
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('disable all other extensions', () => {
|
||||
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
|
||||
manager.enable('ext-test-2', true, '/');
|
||||
expect(manager.isEnabled('ext-test-2', '/')).toBe(false);
|
||||
// Double check that it would have been enabled otherwise
|
||||
expect(
|
||||
new ExtensionEnablementManager(configDir).isEnabled('ext-test-2', '/'),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('none disables all extensions', () => {
|
||||
manager = new ExtensionEnablementManager(configDir, ['none']);
|
||||
manager.enable('ext-test', true, '/');
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(false);
|
||||
// Double check that it would have been enabled otherwise
|
||||
expect(
|
||||
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateExtensionOverrides', () => {
|
||||
let consoleErrorSpy: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should not log an error if enabledExtensionNamesOverride is empty', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, []);
|
||||
manager.validateExtensionOverrides([]);
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not log an error if all enabledExtensionNamesOverride are valid', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, [
|
||||
'ext-one',
|
||||
'ext-two',
|
||||
]);
|
||||
const extensions = [
|
||||
{ config: { name: 'ext-one' } },
|
||||
{ config: { name: 'ext-two' } },
|
||||
] as Extension[];
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log an error for each invalid extension name in enabledExtensionNamesOverride', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, [
|
||||
'ext-one',
|
||||
'ext-invalid',
|
||||
'ext-another-invalid',
|
||||
]);
|
||||
const extensions = [
|
||||
{ config: { name: 'ext-one' } },
|
||||
{ config: { name: 'ext-two' } },
|
||||
] as Extension[];
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
expect(consoleErrorSpy).toHaveBeenCalledTimes(2);
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'Extension not found: ext-invalid',
|
||||
);
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'Extension not found: ext-another-invalid',
|
||||
);
|
||||
});
|
||||
|
||||
it('should not log an error if "none" is in enabledExtensionNamesOverride', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, ['none']);
|
||||
manager.validateExtensionOverrides([]);
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Override', () => {
|
||||
it('should create an override from input', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
expect(override.baseRule).toBe(`/path/to/dir/`);
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(true);
|
||||
});
|
||||
|
||||
it('should create a disable override from input', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.baseRule).toBe(`/path/to/dir/`);
|
||||
expect(override.isDisable).toBe(true);
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create an override from a file rule', () => {
|
||||
const override = Override.fromFileRule('/path/to/dir');
|
||||
expect(override.baseRule).toBe('/path/to/dir');
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create a disable override from a file rule', () => {
|
||||
const override = Override.fromFileRule('!/path/to/dir/');
|
||||
expect(override.isDisable).toBe(true);
|
||||
expect(override.baseRule).toBe('/path/to/dir/');
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create an override with subdirs from a file rule', () => {
|
||||
const override = Override.fromFileRule('/path/to/dir/*');
|
||||
expect(override.baseRule).toBe('/path/to/dir/');
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify conflicting overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/dir', false);
|
||||
expect(override1.conflictsWith(override2)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify non-conflicting overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/another/dir', true);
|
||||
expect(override1.conflictsWith(override2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly identify equal overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/dir', true);
|
||||
expect(override1.isEqualTo(override2)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify unequal overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('!/path/to/dir', true);
|
||||
expect(override1.isEqualTo(override2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should generate the correct regex', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
const regex = override.asRegex();
|
||||
expect(regex.test('/path/to/dir/')).toBe(true);
|
||||
expect(regex.test('/path/to/dir/subdir')).toBe(true);
|
||||
expect(regex.test('/path/to/another/dir')).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly identify child overrides', () => {
|
||||
const parent = Override.fromInput('/path/to/dir', true);
|
||||
const child = Override.fromInput('/path/to/dir/subdir', false);
|
||||
expect(child.isChildOf(parent)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify child overrides with glob', () => {
|
||||
const parent = Override.fromInput('/path/to/dir/*', true);
|
||||
const child = Override.fromInput('/path/to/dir/subdir', false);
|
||||
expect(child.isChildOf(parent)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify non-child overrides', () => {
|
||||
const parent = Override.fromInput('/path/to/dir', true);
|
||||
const other = Override.fromInput('/path/to/another/dir', false);
|
||||
expect(other.isChildOf(parent)).toBe(false);
|
||||
});
|
||||
|
||||
it('should generate the correct output string', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
expect(override.output()).toBe(`/path/to/dir/*`);
|
||||
});
|
||||
|
||||
it('should generate the correct output string for a disable override', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.output()).toBe(`!/path/to/dir/`);
|
||||
});
|
||||
|
||||
it('should disable a path based on a disable override rule', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.output()).toBe(`!/path/to/dir/`);
|
||||
});
|
||||
});
|
||||
@@ -1,239 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { type Extension } from '../extension.js';
|
||||
|
||||
export interface ExtensionEnablementConfig {
|
||||
overrides: string[];
|
||||
}
|
||||
|
||||
export interface AllExtensionsEnablementConfig {
|
||||
[extensionName: string]: ExtensionEnablementConfig;
|
||||
}
|
||||
|
||||
export class Override {
|
||||
constructor(
|
||||
public baseRule: string,
|
||||
public isDisable: boolean,
|
||||
public includeSubdirs: boolean,
|
||||
) {}
|
||||
|
||||
static fromInput(inputRule: string, includeSubdirs: boolean): Override {
|
||||
const isDisable = inputRule.startsWith('!');
|
||||
let baseRule = isDisable ? inputRule.substring(1) : inputRule;
|
||||
baseRule = ensureLeadingAndTrailingSlash(baseRule);
|
||||
return new Override(baseRule, isDisable, includeSubdirs);
|
||||
}
|
||||
|
||||
static fromFileRule(fileRule: string): Override {
|
||||
const isDisable = fileRule.startsWith('!');
|
||||
let baseRule = isDisable ? fileRule.substring(1) : fileRule;
|
||||
const includeSubdirs = baseRule.endsWith('*');
|
||||
baseRule = includeSubdirs
|
||||
? baseRule.substring(0, baseRule.length - 1)
|
||||
: baseRule;
|
||||
return new Override(baseRule, isDisable, includeSubdirs);
|
||||
}
|
||||
|
||||
conflictsWith(other: Override): boolean {
|
||||
if (this.baseRule === other.baseRule) {
|
||||
return (
|
||||
this.includeSubdirs !== other.includeSubdirs ||
|
||||
this.isDisable !== other.isDisable
|
||||
);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
isEqualTo(other: Override): boolean {
|
||||
return (
|
||||
this.baseRule === other.baseRule &&
|
||||
this.includeSubdirs === other.includeSubdirs &&
|
||||
this.isDisable === other.isDisable
|
||||
);
|
||||
}
|
||||
|
||||
asRegex(): RegExp {
|
||||
return globToRegex(`${this.baseRule}${this.includeSubdirs ? '*' : ''}`);
|
||||
}
|
||||
|
||||
isChildOf(parent: Override) {
|
||||
if (!parent.includeSubdirs) {
|
||||
return false;
|
||||
}
|
||||
return parent.asRegex().test(this.baseRule);
|
||||
}
|
||||
|
||||
output(): string {
|
||||
return `${this.isDisable ? '!' : ''}${this.baseRule}${this.includeSubdirs ? '*' : ''}`;
|
||||
}
|
||||
|
||||
matchesPath(path: string) {
|
||||
return this.asRegex().test(path);
|
||||
}
|
||||
}
|
||||
|
||||
const ensureLeadingAndTrailingSlash = function (dirPath: string): string {
|
||||
// Normalize separators to forward slashes for consistent matching across platforms.
|
||||
let result = dirPath.replace(/\\/g, '/');
|
||||
if (result.charAt(0) !== '/') {
|
||||
result = '/' + result;
|
||||
}
|
||||
if (result.charAt(result.length - 1) !== '/') {
|
||||
result = result + '/';
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts a glob pattern to a RegExp object.
|
||||
* This is a simplified implementation that supports `*`.
|
||||
*
|
||||
* @param glob The glob pattern to convert.
|
||||
* @returns A RegExp object.
|
||||
*/
|
||||
function globToRegex(glob: string): RegExp {
|
||||
const regexString = glob
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex characters
|
||||
.replace(/(\/?)\*/g, '($1.*)?'); // Convert * to optional group
|
||||
|
||||
return new RegExp(`^${regexString}$`);
|
||||
}
|
||||
|
||||
export class ExtensionEnablementManager {
|
||||
private configFilePath: string;
|
||||
private configDir: string;
|
||||
// If non-empty, this overrides all other extension configuration and enables
|
||||
// only the ones in this list.
|
||||
private enabledExtensionNamesOverride: string[];
|
||||
|
||||
constructor(configDir: string, enabledExtensionNames?: string[]) {
|
||||
this.configDir = configDir;
|
||||
this.configFilePath = path.join(configDir, 'extension-enablement.json');
|
||||
this.enabledExtensionNamesOverride =
|
||||
enabledExtensionNames?.map((name) => name.toLowerCase()) ?? [];
|
||||
}
|
||||
|
||||
validateExtensionOverrides(extensions: Extension[]) {
|
||||
for (const name of this.enabledExtensionNamesOverride) {
|
||||
if (name === 'none') continue;
|
||||
if (
|
||||
!extensions.some(
|
||||
(ext) => ext.config.name.toLowerCase() === name.toLowerCase(),
|
||||
)
|
||||
) {
|
||||
console.error(`Extension not found: ${name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if an extension is enabled based on its name and the current
|
||||
* path. The last matching rule in the overrides list wins.
|
||||
*
|
||||
* @param extensionName The name of the extension.
|
||||
* @param currentPath The absolute path of the current working directory.
|
||||
* @returns True if the extension is enabled, false otherwise.
|
||||
*/
|
||||
isEnabled(extensionName: string, currentPath: string): boolean {
|
||||
// If we have a single override called 'none', this disables all extensions.
|
||||
// Typically, this comes from the user passing `-e none`.
|
||||
if (
|
||||
this.enabledExtensionNamesOverride.length === 1 &&
|
||||
this.enabledExtensionNamesOverride[0] === 'none'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If we have explicit overrides, only enable those extensions.
|
||||
if (this.enabledExtensionNamesOverride.length > 0) {
|
||||
// When checking against overrides ONLY, we use a case insensitive match.
|
||||
// The override names are already lowercased in the constructor.
|
||||
return this.enabledExtensionNamesOverride.includes(
|
||||
extensionName.toLocaleLowerCase(),
|
||||
);
|
||||
}
|
||||
|
||||
// Otherwise, we use the configuration settings
|
||||
const config = this.readConfig();
|
||||
const extensionConfig = config[extensionName];
|
||||
// Extensions are enabled by default.
|
||||
let enabled = true;
|
||||
const allOverrides = extensionConfig?.overrides ?? [];
|
||||
for (const rule of allOverrides) {
|
||||
const override = Override.fromFileRule(rule);
|
||||
if (override.matchesPath(ensureLeadingAndTrailingSlash(currentPath))) {
|
||||
enabled = !override.isDisable;
|
||||
}
|
||||
}
|
||||
return enabled;
|
||||
}
|
||||
|
||||
readConfig(): AllExtensionsEnablementConfig {
|
||||
try {
|
||||
const content = fs.readFileSync(this.configFilePath, 'utf-8');
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'code' in error &&
|
||||
error.code === 'ENOENT'
|
||||
) {
|
||||
return {};
|
||||
}
|
||||
console.error('Error reading extension enablement config:', error);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
writeConfig(config: AllExtensionsEnablementConfig): void {
|
||||
fs.mkdirSync(this.configDir, { recursive: true });
|
||||
fs.writeFileSync(this.configFilePath, JSON.stringify(config, null, 2));
|
||||
}
|
||||
|
||||
enable(
|
||||
extensionName: string,
|
||||
includeSubdirs: boolean,
|
||||
scopePath: string,
|
||||
): void {
|
||||
const config = this.readConfig();
|
||||
if (!config[extensionName]) {
|
||||
config[extensionName] = { overrides: [] };
|
||||
}
|
||||
const override = Override.fromInput(scopePath, includeSubdirs);
|
||||
const overrides = config[extensionName].overrides.filter((rule) => {
|
||||
const fileOverride = Override.fromFileRule(rule);
|
||||
if (
|
||||
fileOverride.conflictsWith(override) ||
|
||||
fileOverride.isEqualTo(override)
|
||||
) {
|
||||
return false; // Remove conflicts and equivalent values.
|
||||
}
|
||||
return !fileOverride.isChildOf(override);
|
||||
});
|
||||
overrides.push(override.output());
|
||||
config[extensionName].overrides = overrides;
|
||||
this.writeConfig(config);
|
||||
}
|
||||
|
||||
disable(
|
||||
extensionName: string,
|
||||
includeSubdirs: boolean,
|
||||
scopePath: string,
|
||||
): void {
|
||||
this.enable(extensionName, includeSubdirs, `!${scopePath}`);
|
||||
}
|
||||
|
||||
remove(extensionName: string): void {
|
||||
const config = this.readConfig();
|
||||
if (config[extensionName]) {
|
||||
delete config[extensionName];
|
||||
this.writeConfig(config);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,468 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
ExtensionStorage,
|
||||
INSTALL_METADATA_FILENAME,
|
||||
annotateActiveExtensions,
|
||||
loadExtension,
|
||||
} from '../extension.js';
|
||||
import { checkForAllExtensionUpdates, updateExtension } from './update.js';
|
||||
import { QWEN_DIR } from '@qwen-code/qwen-code-core';
|
||||
import { isWorkspaceTrusted } from '../trustedFolders.js';
|
||||
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
|
||||
import { createExtension } from '../../test-utils/createExtension.js';
|
||||
import { ExtensionEnablementManager } from './extensionEnablement.js';
|
||||
|
||||
const mockGit = {
|
||||
clone: vi.fn(),
|
||||
getRemotes: vi.fn(),
|
||||
fetch: vi.fn(),
|
||||
checkout: vi.fn(),
|
||||
listRemote: vi.fn(),
|
||||
revparse: vi.fn(),
|
||||
// Not a part of the actual API, but we need to use this to do the correct
|
||||
// file system interactions.
|
||||
path: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mock('simple-git', () => ({
|
||||
simpleGit: vi.fn((path: string) => {
|
||||
mockGit.path.mockReturnValue(path);
|
||||
return mockGit;
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('../extensions/github.js', async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<typeof import('../extensions/github.js')>();
|
||||
return {
|
||||
...actual,
|
||||
downloadFromGitHubRelease: vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const mockedOs = await importOriginal<typeof os>();
|
||||
return {
|
||||
...mockedOs,
|
||||
homedir: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../trustedFolders.js', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../trustedFolders.js')>();
|
||||
return {
|
||||
...actual,
|
||||
isWorkspaceTrusted: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<typeof import('@qwen-code/qwen-code-core')>();
|
||||
return {
|
||||
...actual,
|
||||
logExtensionInstallEvent: mockLogExtensionInstallEvent,
|
||||
logExtensionUninstall: mockLogExtensionUninstall,
|
||||
ExtensionInstallEvent: vi.fn(),
|
||||
ExtensionUninstallEvent: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('update tests', () => {
|
||||
let tempHomeDir: string;
|
||||
let tempWorkspaceDir: string;
|
||||
let userExtensionsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'qwen-code-test-home-'),
|
||||
);
|
||||
tempWorkspaceDir = fs.mkdtempSync(
|
||||
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
|
||||
);
|
||||
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
|
||||
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
|
||||
// Clean up before each test
|
||||
fs.rmSync(userExtensionsDir, { recursive: true, force: true });
|
||||
fs.mkdirSync(userExtensionsDir, { recursive: true });
|
||||
vi.mocked(isWorkspaceTrusted).mockReturnValue({
|
||||
isTrusted: true,
|
||||
source: 'file',
|
||||
});
|
||||
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
|
||||
Object.values(mockGit).forEach((fn) => fn.mockReset());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('updateExtension', () => {
|
||||
it('should update a git-installed extension', async () => {
|
||||
const gitUrl = 'https://github.com/google/gemini-extensions.git';
|
||||
const extensionName = 'qwen-extensions';
|
||||
const targetExtDir = path.join(userExtensionsDir, extensionName);
|
||||
const metadataPath = path.join(targetExtDir, INSTALL_METADATA_FILENAME);
|
||||
|
||||
fs.mkdirSync(targetExtDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name: extensionName, version: '1.0.0' }),
|
||||
);
|
||||
fs.writeFileSync(
|
||||
metadataPath,
|
||||
JSON.stringify({ source: gitUrl, type: 'git' }),
|
||||
);
|
||||
|
||||
mockGit.clone.mockImplementation(async (_, destination) => {
|
||||
fs.mkdirSync(path.join(mockGit.path(), destination), {
|
||||
recursive: true,
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name: extensionName, version: '1.1.0' }),
|
||||
);
|
||||
});
|
||||
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: targetExtDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
const updateInfo = await updateExtension(
|
||||
extension,
|
||||
tempHomeDir,
|
||||
async (_) => true,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
() => {},
|
||||
);
|
||||
|
||||
expect(updateInfo).toEqual({
|
||||
name: 'qwen-extensions',
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
});
|
||||
|
||||
const updatedConfig = JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
'utf-8',
|
||||
),
|
||||
);
|
||||
expect(updatedConfig.version).toBe('1.1.0');
|
||||
});
|
||||
|
||||
it('should call setExtensionUpdateState with UPDATING and then UPDATED_NEEDS_RESTART on success', async () => {
|
||||
const extensionName = 'test-extension';
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: extensionName,
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
|
||||
mockGit.clone.mockImplementation(async (_, destination) => {
|
||||
fs.mkdirSync(path.join(mockGit.path(), destination), {
|
||||
recursive: true,
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name: extensionName, version: '1.1.0' }),
|
||||
);
|
||||
});
|
||||
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
|
||||
|
||||
const dispatch = vi.fn();
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
await updateExtension(
|
||||
extension,
|
||||
tempHomeDir,
|
||||
async (_) => true,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
dispatch,
|
||||
);
|
||||
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.UPDATING,
|
||||
},
|
||||
});
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call setExtensionUpdateState with ERROR on failure', async () => {
|
||||
const extensionName = 'test-extension';
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: extensionName,
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
|
||||
mockGit.clone.mockRejectedValue(new Error('Git clone failed'));
|
||||
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
|
||||
|
||||
const dispatch = vi.fn();
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
await expect(
|
||||
updateExtension(
|
||||
extension,
|
||||
tempHomeDir,
|
||||
async (_) => true,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
dispatch,
|
||||
),
|
||||
).rejects.toThrow();
|
||||
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.UPDATING,
|
||||
},
|
||||
});
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.ERROR,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkForAllExtensionUpdates', () => {
|
||||
it('should return UpdateAvailable for a git extension with updates', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
mockGit.getRemotes.mockResolvedValue([
|
||||
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
|
||||
]);
|
||||
mockGit.listRemote.mockResolvedValue('remoteHash HEAD');
|
||||
mockGit.revparse.mockResolvedValue('localHash');
|
||||
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return UpToDate for a git extension with no updates', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
mockGit.getRemotes.mockResolvedValue([
|
||||
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
|
||||
]);
|
||||
mockGit.listRemote.mockResolvedValue('sameHash HEAD');
|
||||
mockGit.revparse.mockResolvedValue('sameHash');
|
||||
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UP_TO_DATE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return UpToDate for a local extension with no updates', async () => {
|
||||
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
|
||||
const sourceExtensionDir = createExtension({
|
||||
extensionsDir: localExtensionSourcePath,
|
||||
name: 'my-local-ext',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const installedExtensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'local-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: { source: sourceExtensionDir, type: 'local' },
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: installedExtensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'local-extension',
|
||||
state: ExtensionUpdateState.UP_TO_DATE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return UpdateAvailable for a local extension with updates', async () => {
|
||||
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
|
||||
const sourceExtensionDir = createExtension({
|
||||
extensionsDir: localExtensionSourcePath,
|
||||
name: 'my-local-ext',
|
||||
version: '1.1.0',
|
||||
});
|
||||
|
||||
const installedExtensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'local-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: { source: sourceExtensionDir, type: 'local' },
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: installedExtensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'local-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return Error when git check fails', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'error-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
mockGit.getRemotes.mockRejectedValue(new Error('Git error'));
|
||||
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'error-extension',
|
||||
state: ExtensionUpdateState.ERROR,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,182 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import {
|
||||
type ExtensionUpdateAction,
|
||||
ExtensionUpdateState,
|
||||
type ExtensionUpdateStatus,
|
||||
} from '../../ui/state/extensions.js';
|
||||
import {
|
||||
copyExtension,
|
||||
installExtension,
|
||||
uninstallExtension,
|
||||
loadExtension,
|
||||
loadInstallMetadata,
|
||||
ExtensionStorage,
|
||||
loadExtensionConfig,
|
||||
} from '../extension.js';
|
||||
import { checkForExtensionUpdate } from './github.js';
|
||||
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import * as fs from 'node:fs';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
|
||||
export interface ExtensionUpdateInfo {
|
||||
name: string;
|
||||
originalVersion: string;
|
||||
updatedVersion: string;
|
||||
}
|
||||
|
||||
export async function updateExtension(
|
||||
extension: GeminiCLIExtension,
|
||||
cwd: string = process.cwd(),
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
currentState: ExtensionUpdateState,
|
||||
dispatchExtensionStateUpdate: (action: ExtensionUpdateAction) => void,
|
||||
): Promise<ExtensionUpdateInfo | undefined> {
|
||||
if (currentState === ExtensionUpdateState.UPDATING) {
|
||||
return undefined;
|
||||
}
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.UPDATING },
|
||||
});
|
||||
const installMetadata = loadInstallMetadata(extension.path);
|
||||
|
||||
if (!installMetadata?.type) {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
|
||||
});
|
||||
throw new Error(
|
||||
`Extension ${extension.name} cannot be updated, type is unknown.`,
|
||||
);
|
||||
}
|
||||
if (installMetadata?.type === 'link') {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.UP_TO_DATE },
|
||||
});
|
||||
throw new Error(`Extension is linked so does not need to be updated`);
|
||||
}
|
||||
const originalVersion = extension.version;
|
||||
|
||||
const tempDir = await ExtensionStorage.createTmpDir();
|
||||
try {
|
||||
await copyExtension(extension.path, tempDir);
|
||||
const previousExtensionConfig = await loadExtensionConfig({
|
||||
extensionDir: extension.path,
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
await uninstallExtension(extension.name, cwd);
|
||||
await installExtension(
|
||||
installMetadata,
|
||||
requestConsent,
|
||||
cwd,
|
||||
previousExtensionConfig,
|
||||
);
|
||||
|
||||
const updatedExtensionStorage = new ExtensionStorage(extension.name);
|
||||
const updatedExtension = loadExtension({
|
||||
extensionDir: updatedExtensionStorage.getExtensionDir(),
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
if (!updatedExtension) {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
|
||||
});
|
||||
throw new Error('Updated extension not found after installation.');
|
||||
}
|
||||
const updatedVersion = updatedExtension.config.version;
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extension.name,
|
||||
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
|
||||
},
|
||||
});
|
||||
return {
|
||||
name: extension.name,
|
||||
originalVersion,
|
||||
updatedVersion,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(
|
||||
`Error updating extension, rolling back. ${getErrorMessage(e)}`,
|
||||
);
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
|
||||
});
|
||||
await copyExtension(tempDir, extension.path);
|
||||
throw e;
|
||||
} finally {
|
||||
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateAllUpdatableExtensions(
|
||||
cwd: string = process.cwd(),
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
extensions: GeminiCLIExtension[],
|
||||
extensionsState: Map<string, ExtensionUpdateStatus>,
|
||||
dispatch: (action: ExtensionUpdateAction) => void,
|
||||
): Promise<ExtensionUpdateInfo[]> {
|
||||
return (
|
||||
await Promise.all(
|
||||
extensions
|
||||
.filter(
|
||||
(extension) =>
|
||||
extensionsState.get(extension.name)?.status ===
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
)
|
||||
.map((extension) =>
|
||||
updateExtension(
|
||||
extension,
|
||||
cwd,
|
||||
requestConsent,
|
||||
extensionsState.get(extension.name)!.status,
|
||||
dispatch,
|
||||
),
|
||||
),
|
||||
)
|
||||
).filter((updateInfo) => !!updateInfo);
|
||||
}
|
||||
|
||||
export interface ExtensionUpdateCheckResult {
|
||||
state: ExtensionUpdateState;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export async function checkForAllExtensionUpdates(
|
||||
extensions: GeminiCLIExtension[],
|
||||
dispatch: (action: ExtensionUpdateAction) => void,
|
||||
): Promise<void> {
|
||||
dispatch({ type: 'BATCH_CHECK_START' });
|
||||
const promises: Array<Promise<void>> = [];
|
||||
for (const extension of extensions) {
|
||||
if (!extension.installMetadata) {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extension.name,
|
||||
state: ExtensionUpdateState.NOT_UPDATABLE,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
promises.push(
|
||||
checkForExtensionUpdate(extension, (updatedState) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: updatedState },
|
||||
});
|
||||
}),
|
||||
);
|
||||
}
|
||||
await Promise.all(promises);
|
||||
dispatch({ type: 'BATCH_CHECK_END' });
|
||||
}
|
||||
@@ -51,7 +51,6 @@ import {
|
||||
import * as fs from 'node:fs'; // fs will be mocked separately
|
||||
import stripJsonComments from 'strip-json-comments'; // Will be mocked separately
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import { disableExtension } from './extension.js';
|
||||
|
||||
// These imports will get the versions from the vi.mock('./settings.js', ...) factory.
|
||||
import {
|
||||
@@ -65,8 +64,6 @@ import {
|
||||
needsMigration,
|
||||
type Settings,
|
||||
loadEnvironment,
|
||||
migrateDeprecatedSettings,
|
||||
SettingScope,
|
||||
SETTINGS_VERSION,
|
||||
SETTINGS_VERSION_KEY,
|
||||
} from './settings.js';
|
||||
@@ -2730,122 +2727,4 @@ describe('Settings Loading and Merging', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateDeprecatedSettings', () => {
|
||||
let mockFsExistsSync: Mocked<typeof fs.existsSync>;
|
||||
let mockFsReadFileSync: Mocked<typeof fs.readFileSync>;
|
||||
let mockDisableExtension: Mocked<typeof disableExtension>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
|
||||
mockFsExistsSync = vi.mocked(fs.existsSync);
|
||||
mockFsReadFileSync = vi.mocked(fs.readFileSync);
|
||||
mockDisableExtension = vi.mocked(disableExtension);
|
||||
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true);
|
||||
vi.mocked(isWorkspaceTrusted).mockReturnValue(true);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should migrate disabled extensions from user and workspace settings', () => {
|
||||
const userSettingsContent = {
|
||||
extensions: {
|
||||
disabled: ['user-ext-1', 'shared-ext'],
|
||||
},
|
||||
};
|
||||
const workspaceSettingsContent = {
|
||||
extensions: {
|
||||
disabled: ['workspace-ext-1', 'shared-ext'],
|
||||
},
|
||||
};
|
||||
|
||||
(mockFsReadFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
|
||||
|
||||
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
|
||||
|
||||
// Check user settings migration
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'user-ext-1',
|
||||
SettingScope.User,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'shared-ext',
|
||||
SettingScope.User,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
|
||||
// Check workspace settings migration
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'workspace-ext-1',
|
||||
SettingScope.Workspace,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'shared-ext',
|
||||
SettingScope.Workspace,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
|
||||
// Check that setValue was called to remove the deprecated setting
|
||||
expect(setValueSpy).toHaveBeenCalledWith(
|
||||
SettingScope.User,
|
||||
'extensions',
|
||||
{
|
||||
disabled: undefined,
|
||||
},
|
||||
);
|
||||
expect(setValueSpy).toHaveBeenCalledWith(
|
||||
SettingScope.Workspace,
|
||||
'extensions',
|
||||
{
|
||||
disabled: undefined,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should not do anything if there are no deprecated settings', () => {
|
||||
const userSettingsContent = {
|
||||
extensions: {
|
||||
enabled: ['user-ext-1'],
|
||||
},
|
||||
};
|
||||
const workspaceSettingsContent = {
|
||||
someOtherSetting: 'value',
|
||||
};
|
||||
|
||||
(mockFsReadFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
|
||||
|
||||
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(mockDisableExtension).not.toHaveBeenCalled();
|
||||
expect(setValueSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -30,7 +30,6 @@ import {
|
||||
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
|
||||
import { customDeepMerge, type MergeableObject } from '../utils/deepMerge.js';
|
||||
import { updateSettingsFilePreservingFormat } from '../utils/commentJson.js';
|
||||
import { disableExtension } from './extension.js';
|
||||
|
||||
function getMergeStrategyForPath(path: string[]): MergeStrategy | undefined {
|
||||
let current: SettingDefinition | undefined = undefined;
|
||||
@@ -81,7 +80,6 @@ const MIGRATION_MAP: Record<string, string> = {
|
||||
excludeTools: 'tools.exclude',
|
||||
excludeMCPServers: 'mcp.excluded',
|
||||
excludedProjectEnvVars: 'advanced.excludedEnvVars',
|
||||
extensionManagement: 'experimental.extensionManagement',
|
||||
extensions: 'extensions',
|
||||
fileFiltering: 'context.fileFiltering',
|
||||
folderTrustFeature: 'security.folderTrust.featureEnabled',
|
||||
@@ -903,31 +901,6 @@ export function loadSettings(
|
||||
);
|
||||
}
|
||||
|
||||
export function migrateDeprecatedSettings(
|
||||
loadedSettings: LoadedSettings,
|
||||
workspaceDir: string = process.cwd(),
|
||||
): void {
|
||||
const processScope = (scope: SettingScope) => {
|
||||
const settings = loadedSettings.forScope(scope).settings;
|
||||
if (settings.extensions?.disabled) {
|
||||
console.log(
|
||||
`Migrating deprecated extensions.disabled settings from ${scope} settings...`,
|
||||
);
|
||||
for (const extension of settings.extensions.disabled ?? []) {
|
||||
disableExtension(extension, scope, workspaceDir);
|
||||
}
|
||||
|
||||
const newExtensionsValue = { ...settings.extensions };
|
||||
newExtensionsValue.disabled = undefined;
|
||||
|
||||
loadedSettings.setValue(scope, 'extensions', newExtensionsValue);
|
||||
}
|
||||
};
|
||||
|
||||
processScope(SettingScope.User);
|
||||
processScope(SettingScope.Workspace);
|
||||
}
|
||||
|
||||
export function saveSettings(settingsFile: SettingsFile): void {
|
||||
try {
|
||||
// Ensure the directory exists
|
||||
|
||||
@@ -1228,15 +1228,6 @@ const SETTINGS_SCHEMA = {
|
||||
description: 'Setting to enable experimental features',
|
||||
showInDialog: false,
|
||||
properties: {
|
||||
extensionManagement: {
|
||||
type: 'boolean',
|
||||
label: 'Extension Management',
|
||||
category: 'Experimental',
|
||||
requiresRestart: true,
|
||||
default: true,
|
||||
description: 'Enable extension management features.',
|
||||
showInDialog: false,
|
||||
},
|
||||
visionModelPreview: {
|
||||
type: 'boolean',
|
||||
label: 'Vision Model Preview',
|
||||
@@ -1259,39 +1250,6 @@ const SETTINGS_SCHEMA = {
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
extensions: {
|
||||
type: 'object',
|
||||
label: 'Extensions',
|
||||
category: 'Extensions',
|
||||
requiresRestart: true,
|
||||
default: {},
|
||||
description: 'Settings for extensions.',
|
||||
showInDialog: false,
|
||||
properties: {
|
||||
disabled: {
|
||||
type: 'array',
|
||||
label: 'Disabled Extensions',
|
||||
category: 'Extensions',
|
||||
requiresRestart: true,
|
||||
default: [] as string[],
|
||||
description: 'List of disabled extensions.',
|
||||
showInDialog: false,
|
||||
mergeStrategy: MergeStrategy.UNION,
|
||||
},
|
||||
workspacesWithMigrationNudge: {
|
||||
type: 'array',
|
||||
label: 'Workspaces with Migration Nudge',
|
||||
category: 'Extensions',
|
||||
requiresRestart: false,
|
||||
default: [] as string[],
|
||||
description:
|
||||
'List of workspaces for which the migration nudge has been shown.',
|
||||
showInDialog: false,
|
||||
mergeStrategy: MergeStrategy.UNION,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as const satisfies SettingsSchema;
|
||||
|
||||
export type SettingsSchemaType = typeof SETTINGS_SCHEMA;
|
||||
|
||||
@@ -271,7 +271,6 @@ describe('gemini.tsx main function', () => {
|
||||
);
|
||||
const { loadSettings } = await import('./config/settings.js');
|
||||
const cleanupModule = await import('./utils/cleanup.js');
|
||||
const extensionModule = await import('./config/extension.js');
|
||||
const validatorModule = await import('./validateNonInterActiveAuth.js');
|
||||
const streamJsonModule = await import('./nonInteractive/session.js');
|
||||
const initializerModule = await import('./core/initializer.js');
|
||||
@@ -284,11 +283,6 @@ describe('gemini.tsx main function', () => {
|
||||
vi.mocked(cleanupModule.registerCleanup).mockImplementation(() => {});
|
||||
const runExitCleanupMock = vi.mocked(cleanupModule.runExitCleanup);
|
||||
runExitCleanupMock.mockResolvedValue(undefined);
|
||||
vi.spyOn(extensionModule, 'loadExtensions').mockReturnValue([]);
|
||||
vi.spyOn(
|
||||
extensionModule.ExtensionStorage,
|
||||
'getUserExtensionsDir',
|
||||
).mockReturnValue('/tmp/extensions');
|
||||
vi.spyOn(initializerModule, 'initializeApp').mockResolvedValue({
|
||||
authError: null,
|
||||
themeError: null,
|
||||
|
||||
@@ -15,13 +15,8 @@ import React from 'react';
|
||||
import { validateAuthMethod } from './config/auth.js';
|
||||
import * as cliConfig from './config/config.js';
|
||||
import { loadCliConfig, parseArguments } from './config/config.js';
|
||||
import { ExtensionStorage, loadExtensions } from './config/extension.js';
|
||||
import type { DnsResolutionOrder, LoadedSettings } from './config/settings.js';
|
||||
import {
|
||||
getSettingsWarnings,
|
||||
loadSettings,
|
||||
migrateDeprecatedSettings,
|
||||
} from './config/settings.js';
|
||||
import { getSettingsWarnings, loadSettings } from './config/settings.js';
|
||||
import {
|
||||
initializeApp,
|
||||
type InitializationResult,
|
||||
@@ -107,7 +102,6 @@ function getNodeMemoryArgs(isDebugMode: boolean): string[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
import { ExtensionEnablementManager } from './config/extensions/extensionEnablement.js';
|
||||
import { loadSandboxConfig } from './config/sandboxConfig.js';
|
||||
import { runAcpAgent } from './acp-integration/acpAgent.js';
|
||||
|
||||
@@ -206,7 +200,6 @@ export async function startInteractiveUI(
|
||||
export async function main() {
|
||||
setupUnhandledRejectionHandler();
|
||||
const settings = loadSettings();
|
||||
migrateDeprecatedSettings(settings);
|
||||
await cleanupCheckpoints();
|
||||
|
||||
let argv = await parseArguments(settings.merged);
|
||||
@@ -251,9 +244,9 @@ export async function main() {
|
||||
if (sandboxConfig) {
|
||||
const partialConfig = await loadCliConfig(
|
||||
settings.merged,
|
||||
[],
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
argv,
|
||||
undefined,
|
||||
[],
|
||||
);
|
||||
|
||||
if (!settings.merged.security?.auth?.useExternal) {
|
||||
@@ -335,26 +328,22 @@ export async function main() {
|
||||
// to run Gemini CLI. It is now safe to perform expensive initialization that
|
||||
// may have side effects.
|
||||
{
|
||||
const extensionEnablementManager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
argv.extensions,
|
||||
);
|
||||
const extensions = loadExtensions(extensionEnablementManager);
|
||||
const config = await loadCliConfig(
|
||||
settings.merged,
|
||||
extensions,
|
||||
extensionEnablementManager,
|
||||
argv,
|
||||
process.cwd(),
|
||||
argv.extensions,
|
||||
);
|
||||
registerCleanup(() => config.shutdown());
|
||||
|
||||
if (config.getListExtensions()) {
|
||||
console.log('Installed extensions:');
|
||||
for (const extension of extensions) {
|
||||
console.log(`- ${extension.config.name}`);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
// FIXME: list extensions after the config initialize
|
||||
// if (config.getListExtensions()) {
|
||||
// console.log('Installed extensions:');
|
||||
// for (const extension of extensions) {
|
||||
// console.log(`- ${extension.config.name}`);
|
||||
// }
|
||||
// process.exit(0);
|
||||
// }
|
||||
|
||||
// Setup unified ConsolePatcher based on interactive mode
|
||||
const isInteractive = config.isInteractive();
|
||||
@@ -400,7 +389,7 @@ export async function main() {
|
||||
}
|
||||
|
||||
if (config.getExperimentalZedIntegration()) {
|
||||
return runAcpAgent(config, settings, extensions, argv);
|
||||
return runAcpAgent(config, settings, argv);
|
||||
}
|
||||
|
||||
let input = config.getQuestion();
|
||||
|
||||
@@ -151,6 +151,7 @@ export default {
|
||||
'Project Level ({{path}})': 'Projektebene ({{path}})',
|
||||
'User Level ({{path}})': 'Benutzerebene ({{path}})',
|
||||
'Built-in Agents': 'Integrierte Agenten',
|
||||
'Extension Agents': 'Erweiterungs-Agenten',
|
||||
'Using: {{count}} agents': 'Verwendet: {{count}} Agenten',
|
||||
'View Agent': 'Agent anzeigen',
|
||||
'Edit Agent': 'Agent bearbeiten',
|
||||
@@ -348,6 +349,62 @@ export default {
|
||||
'List active extensions': 'Aktive Erweiterungen auflisten',
|
||||
'Update extensions. Usage: update <extension-names>|--all':
|
||||
'Erweiterungen aktualisieren. Verwendung: update <Erweiterungsnamen>|--all',
|
||||
'Disable an extension': 'Erweiterung deaktivieren',
|
||||
'Enable an extension': 'Erweiterung aktivieren',
|
||||
'Install an extension from a git repo or local path':
|
||||
'Erweiterung aus Git-Repository oder lokalem Pfad installieren',
|
||||
'Uninstall an extension': 'Erweiterung deinstallieren',
|
||||
'No extensions installed.': 'Keine Erweiterungen installiert.',
|
||||
'Usage: /extensions update <extension-names>|--all':
|
||||
'Verwendung: /extensions update <Erweiterungsnamen>|--all',
|
||||
'Extension "{{name}}" not found.': 'Erweiterung "{{name}}" nicht gefunden.',
|
||||
'No extensions to update.': 'Keine Erweiterungen zum Aktualisieren.',
|
||||
'Usage: /extensions install <source>':
|
||||
'Verwendung: /extensions install <Quelle>',
|
||||
'Installing extension from "{{source}}"...':
|
||||
'Installiere Erweiterung von "{{source}}"...',
|
||||
'Extension "{{name}}" installed successfully.':
|
||||
'Erweiterung "{{name}}" erfolgreich installiert.',
|
||||
'Failed to install extension from "{{source}}": {{error}}':
|
||||
'Fehler beim Installieren der Erweiterung von "{{source}}": {{error}}',
|
||||
'Usage: /extensions uninstall <extension-name>':
|
||||
'Verwendung: /extensions uninstall <Erweiterungsname>',
|
||||
'Uninstalling extension "{{name}}"...':
|
||||
'Deinstalliere Erweiterung "{{name}}"...',
|
||||
'Extension "{{name}}" uninstalled successfully.':
|
||||
'Erweiterung "{{name}}" erfolgreich deinstalliert.',
|
||||
'Failed to uninstall extension "{{name}}": {{error}}':
|
||||
'Fehler beim Deinstallieren der Erweiterung "{{name}}": {{error}}',
|
||||
'Usage: /extensions {{command}} <extension> [--scope=<user|workspace>]':
|
||||
'Verwendung: /extensions {{command}} <Erweiterung> [--scope=<user|workspace>]',
|
||||
'Unsupported scope "{{scope}}", should be one of "user" or "workspace"':
|
||||
'Nicht unterstützter Bereich "{{scope}}", sollte "user" oder "workspace" sein',
|
||||
'Extension "{{name}}" disabled for scope "{{scope}}"':
|
||||
'Erweiterung "{{name}}" für Bereich "{{scope}}" deaktiviert',
|
||||
'Extension "{{name}}" enabled for scope "{{scope}}"':
|
||||
'Erweiterung "{{name}}" für Bereich "{{scope}}" aktiviert',
|
||||
'Do you want to continue? [Y/n]: ': 'Möchten Sie fortfahren? [Y/n]: ',
|
||||
'Do you want to continue?': 'Möchten Sie fortfahren?',
|
||||
'Installing extension "{{name}}".':
|
||||
'Erweiterung "{{name}}" wird installiert.',
|
||||
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**':
|
||||
'**Erweiterungen können unerwartetes Verhalten verursachen. Stellen Sie sicher, dass Sie die Erweiterungsquelle untersucht haben und dem Autor vertrauen.**',
|
||||
'This extension will run the following MCP servers:':
|
||||
'Diese Erweiterung wird folgende MCP-Server ausführen:',
|
||||
local: 'lokal',
|
||||
remote: 'remote',
|
||||
'This extension will add the following commands: {{commands}}.':
|
||||
'Diese Erweiterung wird folgende Befehle hinzufügen: {{commands}}.',
|
||||
'This extension will append info to your QWEN.md context using {{fileName}}':
|
||||
'Diese Erweiterung wird Informationen zu Ihrem QWEN.md-Kontext mit {{fileName}} hinzufügen',
|
||||
'This extension will exclude the following core tools: {{tools}}':
|
||||
'Diese Erweiterung wird folgende Kernwerkzeuge ausschließen: {{tools}}',
|
||||
'This extension will install the following skills:':
|
||||
'Diese Erweiterung wird folgende Fähigkeiten installieren:',
|
||||
'This extension will install the following subagents:':
|
||||
'Diese Erweiterung wird folgende Unteragenten installieren:',
|
||||
'Installation cancelled for "{{name}}".':
|
||||
'Installation von "{{name}}" abgebrochen.',
|
||||
'manage IDE integration': 'IDE-Integration verwalten',
|
||||
'check status of IDE integration': 'Status der IDE-Integration prüfen',
|
||||
'install required IDE companion for {{ideName}}':
|
||||
@@ -985,6 +1042,19 @@ export default {
|
||||
'Session start time is unavailable, cannot calculate stats.':
|
||||
'Sitzungsstartzeit nicht verfügbar, Statistiken können nicht berechnet werden.',
|
||||
|
||||
// ============================================================================
|
||||
// Command Format Migration
|
||||
// ============================================================================
|
||||
'Command Format Migration': 'Befehlsformat-Migration',
|
||||
'Found {{count}} TOML command file:': '{{count}} TOML-Befehlsdatei gefunden:',
|
||||
'Found {{count}} TOML command files:':
|
||||
'{{count}} TOML-Befehlsdateien gefunden:',
|
||||
'... and {{count}} more': '... und {{count}} weitere',
|
||||
'The TOML format is deprecated. Would you like to migrate them to Markdown format?':
|
||||
'Das TOML-Format ist veraltet. Möchten Sie sie ins Markdown-Format migrieren?',
|
||||
'(Backups will be created and original files will be preserved)':
|
||||
'(Backups werden erstellt und Originaldateien werden beibehalten)',
|
||||
|
||||
// ============================================================================
|
||||
// Loading Phrases
|
||||
// ============================================================================
|
||||
|
||||
@@ -152,6 +152,7 @@ export default {
|
||||
'Project Level ({{path}})': 'Project Level ({{path}})',
|
||||
'User Level ({{path}})': 'User Level ({{path}})',
|
||||
'Built-in Agents': 'Built-in Agents',
|
||||
'Extension Agents': 'Extension Agents',
|
||||
'Using: {{count}} agents': 'Using: {{count}} agents',
|
||||
'View Agent': 'View Agent',
|
||||
'Edit Agent': 'Edit Agent',
|
||||
@@ -344,6 +345,60 @@ export default {
|
||||
'List active extensions': 'List active extensions',
|
||||
'Update extensions. Usage: update <extension-names>|--all':
|
||||
'Update extensions. Usage: update <extension-names>|--all',
|
||||
'Disable an extension': 'Disable an extension',
|
||||
'Enable an extension': 'Enable an extension',
|
||||
'Install an extension from a git repo or local path':
|
||||
'Install an extension from a git repo or local path',
|
||||
'Uninstall an extension': 'Uninstall an extension',
|
||||
'No extensions installed.': 'No extensions installed.',
|
||||
'Usage: /extensions update <extension-names>|--all':
|
||||
'Usage: /extensions update <extension-names>|--all',
|
||||
'Extension "{{name}}" not found.': 'Extension "{{name}}" not found.',
|
||||
'No extensions to update.': 'No extensions to update.',
|
||||
'Usage: /extensions install <source>': 'Usage: /extensions install <source>',
|
||||
'Installing extension from "{{source}}"...':
|
||||
'Installing extension from "{{source}}"...',
|
||||
'Extension "{{name}}" installed successfully.':
|
||||
'Extension "{{name}}" installed successfully.',
|
||||
'Failed to install extension from "{{source}}": {{error}}':
|
||||
'Failed to install extension from "{{source}}": {{error}}',
|
||||
'Usage: /extensions uninstall <extension-name>':
|
||||
'Usage: /extensions uninstall <extension-name>',
|
||||
'Uninstalling extension "{{name}}"...':
|
||||
'Uninstalling extension "{{name}}"...',
|
||||
'Extension "{{name}}" uninstalled successfully.':
|
||||
'Extension "{{name}}" uninstalled successfully.',
|
||||
'Failed to uninstall extension "{{name}}": {{error}}':
|
||||
'Failed to uninstall extension "{{name}}": {{error}}',
|
||||
'Usage: /extensions {{command}} <extension> [--scope=<user|workspace>]':
|
||||
'Usage: /extensions {{command}} <extension> [--scope=<user|workspace>]',
|
||||
'Unsupported scope "{{scope}}", should be one of "user" or "workspace"':
|
||||
'Unsupported scope "{{scope}}", should be one of "user" or "workspace"',
|
||||
'Extension "{{name}}" disabled for scope "{{scope}}"':
|
||||
'Extension "{{name}}" disabled for scope "{{scope}}"',
|
||||
'Extension "{{name}}" enabled for scope "{{scope}}"':
|
||||
'Extension "{{name}}" enabled for scope "{{scope}}"',
|
||||
'Do you want to continue? [Y/n]: ': 'Do you want to continue? [Y/n]: ',
|
||||
'Do you want to continue?': 'Do you want to continue?',
|
||||
'Installing extension "{{name}}".': 'Installing extension "{{name}}".',
|
||||
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**':
|
||||
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**',
|
||||
'This extension will run the following MCP servers:':
|
||||
'This extension will run the following MCP servers:',
|
||||
local: 'local',
|
||||
remote: 'remote',
|
||||
'This extension will add the following commands: {{commands}}.':
|
||||
'This extension will add the following commands: {{commands}}.',
|
||||
'This extension will append info to your QWEN.md context using {{fileName}}':
|
||||
'This extension will append info to your QWEN.md context using {{fileName}}',
|
||||
'This extension will exclude the following core tools: {{tools}}':
|
||||
'This extension will exclude the following core tools: {{tools}}',
|
||||
'This extension will install the following skills:':
|
||||
'This extension will install the following skills:',
|
||||
'This extension will install the following subagents:':
|
||||
'This extension will install the following subagents:',
|
||||
'Installation cancelled for "{{name}}".':
|
||||
'Installation cancelled for "{{name}}".',
|
||||
'manage IDE integration': 'manage IDE integration',
|
||||
'check status of IDE integration': 'check status of IDE integration',
|
||||
'install required IDE companion for {{ideName}}':
|
||||
@@ -958,6 +1013,18 @@ export default {
|
||||
'Session start time is unavailable, cannot calculate stats.':
|
||||
'Session start time is unavailable, cannot calculate stats.',
|
||||
|
||||
// ============================================================================
|
||||
// Command Format Migration
|
||||
// ============================================================================
|
||||
'Command Format Migration': 'Command Format Migration',
|
||||
'Found {{count}} TOML command file:': 'Found {{count}} TOML command file:',
|
||||
'Found {{count}} TOML command files:': 'Found {{count}} TOML command files:',
|
||||
'... and {{count}} more': '... and {{count}} more',
|
||||
'The TOML format is deprecated. Would you like to migrate them to Markdown format?':
|
||||
'The TOML format is deprecated. Would you like to migrate them to Markdown format?',
|
||||
'(Backups will be created and original files will be preserved)':
|
||||
'(Backups will be created and original files will be preserved)',
|
||||
|
||||
// ============================================================================
|
||||
// Loading Phrases
|
||||
// ============================================================================
|
||||
|
||||
@@ -155,6 +155,7 @@ export default {
|
||||
'Project Level ({{path}})': 'Уровень проекта ({{path}})',
|
||||
'User Level ({{path}})': 'Уровень пользователя ({{path}})',
|
||||
'Built-in Agents': 'Встроенные агенты',
|
||||
'Extension Agents': 'Агенты расширений',
|
||||
'Using: {{count}} agents': 'Используется: {{count}} агент(ов)',
|
||||
'View Agent': 'Просмотреть агента',
|
||||
'Edit Agent': 'Редактировать агента',
|
||||
@@ -349,6 +350,59 @@ export default {
|
||||
'List active extensions': 'Показать активные расширения',
|
||||
'Update extensions. Usage: update <extension-names>|--all':
|
||||
'Обновить расширения. Использование: update <extension-names>|--all',
|
||||
'Disable an extension': 'Отключить расширение',
|
||||
'Enable an extension': 'Включить расширение',
|
||||
'Install an extension from a git repo or local path':
|
||||
'Установить расширение из Git-репозитория или локального пути',
|
||||
'Uninstall an extension': 'Удалить расширение',
|
||||
'No extensions installed.': 'Расширения не установлены.',
|
||||
'Usage: /extensions update <extension-names>|--all':
|
||||
'Использование: /extensions update <имена-расширений>|--all',
|
||||
'Extension "{{name}}" not found.': 'Расширение "{{name}}" не найдено.',
|
||||
'No extensions to update.': 'Нет расширений для обновления.',
|
||||
'Usage: /extensions install <source>':
|
||||
'Использование: /extensions install <источник>',
|
||||
'Installing extension from "{{source}}"...':
|
||||
'Установка расширения из "{{source}}"...',
|
||||
'Extension "{{name}}" installed successfully.':
|
||||
'Расширение "{{name}}" успешно установлено.',
|
||||
'Failed to install extension from "{{source}}": {{error}}':
|
||||
'Не удалось установить расширение из "{{source}}": {{error}}',
|
||||
'Usage: /extensions uninstall <extension-name>':
|
||||
'Использование: /extensions uninstall <имя-расширения>',
|
||||
'Uninstalling extension "{{name}}"...': 'Удаление расширения "{{name}}"...',
|
||||
'Extension "{{name}}" uninstalled successfully.':
|
||||
'Расширение "{{name}}" успешно удалено.',
|
||||
'Failed to uninstall extension "{{name}}": {{error}}':
|
||||
'Не удалось удалить расширение "{{name}}": {{error}}',
|
||||
'Usage: /extensions {{command}} <extension> [--scope=<user|workspace>]':
|
||||
'Использование: /extensions {{command}} <расширение> [--scope=<user|workspace>]',
|
||||
'Unsupported scope "{{scope}}", should be one of "user" or "workspace"':
|
||||
'Неподдерживаемая область "{{scope}}", должна быть "user" или "workspace"',
|
||||
'Extension "{{name}}" disabled for scope "{{scope}}"':
|
||||
'Расширение "{{name}}" отключено для области "{{scope}}"',
|
||||
'Extension "{{name}}" enabled for scope "{{scope}}"':
|
||||
'Расширение "{{name}}" включено для области "{{scope}}"',
|
||||
'Do you want to continue? [Y/n]: ': 'Хотите продолжить? [Y/n]: ',
|
||||
'Do you want to continue?': 'Хотите продолжить?',
|
||||
'Installing extension "{{name}}".': 'Установка расширения "{{name}}".',
|
||||
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**':
|
||||
'**Расширения могут вызывать неожиданное поведение. Убедитесь, что вы изучили источник расширения и доверяете автору.**',
|
||||
'This extension will run the following MCP servers:':
|
||||
'Это расширение запустит следующие MCP-серверы:',
|
||||
local: 'локальный',
|
||||
remote: 'удалённый',
|
||||
'This extension will add the following commands: {{commands}}.':
|
||||
'Это расширение добавит следующие команды: {{commands}}.',
|
||||
'This extension will append info to your QWEN.md context using {{fileName}}':
|
||||
'Это расширение добавит информацию в ваш контекст QWEN.md с помощью {{fileName}}',
|
||||
'This extension will exclude the following core tools: {{tools}}':
|
||||
'Это расширение исключит следующие основные инструменты: {{tools}}',
|
||||
'This extension will install the following skills:':
|
||||
'Это расширение установит следующие навыки:',
|
||||
'This extension will install the following subagents:':
|
||||
'Это расширение установит следующие подагенты:',
|
||||
'Installation cancelled for "{{name}}".': 'Установка "{{name}}" отменена.',
|
||||
'manage IDE integration': 'Управление интеграцией с IDE',
|
||||
'check status of IDE integration': 'Проверить статус интеграции с IDE',
|
||||
'install required IDE companion for {{ideName}}':
|
||||
@@ -975,6 +1029,19 @@ export default {
|
||||
'Session start time is unavailable, cannot calculate stats.':
|
||||
'Время начала сессии недоступно, невозможно рассчитать статистику.',
|
||||
|
||||
// ============================================================================
|
||||
// Command Format Migration
|
||||
// ============================================================================
|
||||
'Command Format Migration': 'Миграция формата команд',
|
||||
'Found {{count}} TOML command file:': 'Найден {{count}} файл команд TOML:',
|
||||
'Found {{count}} TOML command files:':
|
||||
'Найдено {{count}} файлов команд TOML:',
|
||||
'... and {{count}} more': '... и ещё {{count}}',
|
||||
'The TOML format is deprecated. Would you like to migrate them to Markdown format?':
|
||||
'Формат TOML устарел. Хотите перенести их в формат Markdown?',
|
||||
'(Backups will be created and original files will be preserved)':
|
||||
'(Будут созданы резервные копии, исходные файлы будут сохранены)',
|
||||
|
||||
// ============================================================================
|
||||
// Loading Phrases
|
||||
// ============================================================================
|
||||
|
||||
@@ -149,6 +149,7 @@ export default {
|
||||
'Project Level ({{path}})': '项目级 ({{path}})',
|
||||
'User Level ({{path}})': '用户级 ({{path}})',
|
||||
'Built-in Agents': '内置代理',
|
||||
'Extension Agents': '扩展代理',
|
||||
'Using: {{count}} agents': '使用中: {{count}} 个代理',
|
||||
'View Agent': '查看代理',
|
||||
'Edit Agent': '编辑代理',
|
||||
@@ -331,6 +332,56 @@ export default {
|
||||
'List active extensions': '列出活动扩展',
|
||||
'Update extensions. Usage: update <extension-names>|--all':
|
||||
'更新扩展。用法:update <extension-names>|--all',
|
||||
'Disable an extension': '禁用扩展',
|
||||
'Enable an extension': '启用扩展',
|
||||
'Install an extension from a git repo or local path':
|
||||
'从 Git 仓库或本地路径安装扩展',
|
||||
'Uninstall an extension': '卸载扩展',
|
||||
'No extensions installed.': '未安装扩展。',
|
||||
'Usage: /extensions update <extension-names>|--all':
|
||||
'用法:/extensions update <扩展名>|--all',
|
||||
'Extension "{{name}}" not found.': '未找到扩展 "{{name}}"。',
|
||||
'No extensions to update.': '没有可更新的扩展。',
|
||||
'Usage: /extensions install <source>': '用法:/extensions install <来源>',
|
||||
'Installing extension from "{{source}}"...':
|
||||
'正在从 "{{source}}" 安装扩展...',
|
||||
'Extension "{{name}}" installed successfully.': '扩展 "{{name}}" 安装成功。',
|
||||
'Failed to install extension from "{{source}}": {{error}}':
|
||||
'从 "{{source}}" 安装扩展失败:{{error}}',
|
||||
'Usage: /extensions uninstall <extension-name>':
|
||||
'用法:/extensions uninstall <扩展名>',
|
||||
'Uninstalling extension "{{name}}"...': '正在卸载扩展 "{{name}}"...',
|
||||
'Extension "{{name}}" uninstalled successfully.':
|
||||
'扩展 "{{name}}" 卸载成功。',
|
||||
'Failed to uninstall extension "{{name}}": {{error}}':
|
||||
'卸载扩展 "{{name}}" 失败:{{error}}',
|
||||
'Usage: /extensions {{command}} <extension> [--scope=<user|workspace>]':
|
||||
'用法:/extensions {{command}} <扩展> [--scope=<user|workspace>]',
|
||||
'Unsupported scope "{{scope}}", should be one of "user" or "workspace"':
|
||||
'不支持的作用域 "{{scope}}",应为 "user" 或 "workspace"',
|
||||
'Extension "{{name}}" disabled for scope "{{scope}}"':
|
||||
'扩展 "{{name}}" 已在作用域 "{{scope}}" 中禁用',
|
||||
'Extension "{{name}}" enabled for scope "{{scope}}"':
|
||||
'扩展 "{{name}}" 已在作用域 "{{scope}}" 中启用',
|
||||
'Do you want to continue? [Y/n]: ': '是否继续?[Y/n]:',
|
||||
'Do you want to continue?': '是否继续?',
|
||||
'Installing extension "{{name}}".': '正在安装扩展 "{{name}}"。',
|
||||
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**':
|
||||
'**扩展可能会引入意外行为。请确保您已调查过扩展源并信任作者。**',
|
||||
'This extension will run the following MCP servers:':
|
||||
'此扩展将运行以下 MCP 服务器:',
|
||||
local: '本地',
|
||||
remote: '远程',
|
||||
'This extension will add the following commands: {{commands}}.':
|
||||
'此扩展将添加以下命令:{{commands}}。',
|
||||
'This extension will append info to your QWEN.md context using {{fileName}}':
|
||||
'此扩展将使用 {{fileName}} 向您的 QWEN.md 上下文追加信息',
|
||||
'This extension will exclude the following core tools: {{tools}}':
|
||||
'此扩展将排除以下核心工具:{{tools}}',
|
||||
'This extension will install the following skills:': '此扩展将安装以下技能:',
|
||||
'This extension will install the following subagents:':
|
||||
'此扩展将安装以下子代理:',
|
||||
'Installation cancelled for "{{name}}".': '已取消安装 "{{name}}"。',
|
||||
'manage IDE integration': '管理 IDE 集成',
|
||||
'check status of IDE integration': '检查 IDE 集成状态',
|
||||
'install required IDE companion for {{ideName}}':
|
||||
@@ -911,6 +962,18 @@ export default {
|
||||
'Session start time is unavailable, cannot calculate stats.':
|
||||
'会话开始时间不可用,无法计算统计信息',
|
||||
|
||||
// ============================================================================
|
||||
// Command Format Migration
|
||||
// ============================================================================
|
||||
'Command Format Migration': '命令格式迁移',
|
||||
'Found {{count}} TOML command file:': '发现 {{count}} 个 TOML 命令文件:',
|
||||
'Found {{count}} TOML command files:': '发现 {{count}} 个 TOML 命令文件:',
|
||||
'... and {{count}} more': '... 以及其他 {{count}} 个',
|
||||
'The TOML format is deprecated. Would you like to migrate them to Markdown format?':
|
||||
'TOML 格式已弃用。是否将它们迁移到 Markdown 格式?',
|
||||
'(Backups will be created and original files will be preserved)':
|
||||
'(将创建备份,原始文件将保留)',
|
||||
|
||||
// ============================================================================
|
||||
// Loading Phrases
|
||||
// ============================================================================
|
||||
|
||||
327
packages/cli/src/services/FileCommandLoader-extension.test.ts
Normal file
327
packages/cli/src/services/FileCommandLoader-extension.test.ts
Normal file
@@ -0,0 +1,327 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { FileCommandLoader } from './FileCommandLoader.js';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import { Storage } from '@qwen-code/qwen-code-core';
|
||||
|
||||
describe('FileCommandLoader - Extension Commands Support', () => {
|
||||
let tempDir: string;
|
||||
let mockConfig: Partial<Config>;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await fs.promises.mkdtemp(
|
||||
path.join(os.tmpdir(), 'file-command-loader-ext-test-'),
|
||||
);
|
||||
|
||||
mockConfig = {
|
||||
getFolderTrustFeature: () => false,
|
||||
getFolderTrust: () => true,
|
||||
getProjectRoot: () => tempDir,
|
||||
storage: new Storage(tempDir),
|
||||
getExtensions: () => [],
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should load commands from extension with config.commands path', async () => {
|
||||
// Setup extension structure
|
||||
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'test-ext');
|
||||
const customCommandsDir = path.join(extensionDir, 'custom-cmds');
|
||||
await fs.promises.mkdir(customCommandsDir, { recursive: true });
|
||||
|
||||
// Create extension config with custom commands path
|
||||
const extensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
commands: 'custom-cmds',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Create a test command in custom directory
|
||||
const commandContent =
|
||||
'---\ndescription: Test command from extension\n---\nDo something';
|
||||
await fs.promises.writeFile(
|
||||
path.join(customCommandsDir, 'test.md'),
|
||||
commandContent,
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'test-ext',
|
||||
config: extensionConfig,
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
contextFiles: [],
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('test-ext:test');
|
||||
expect(commands[0].description).toBe(
|
||||
'[test-ext] Test command from extension',
|
||||
);
|
||||
});
|
||||
|
||||
it('should load commands from extension with multiple commands paths', async () => {
|
||||
// Setup extension structure
|
||||
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'multi-ext');
|
||||
const cmdsDir1 = path.join(extensionDir, 'commands1');
|
||||
const cmdsDir2 = path.join(extensionDir, 'commands2');
|
||||
await fs.promises.mkdir(cmdsDir1, { recursive: true });
|
||||
await fs.promises.mkdir(cmdsDir2, { recursive: true });
|
||||
|
||||
// Create extension config with multiple commands paths
|
||||
const extensionConfig = {
|
||||
name: 'multi-ext',
|
||||
version: '1.0.0',
|
||||
commands: ['commands1', 'commands2'],
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Create test commands in both directories
|
||||
await fs.promises.writeFile(
|
||||
path.join(cmdsDir1, 'cmd1.md'),
|
||||
'---\n---\nCommand 1',
|
||||
);
|
||||
await fs.promises.writeFile(
|
||||
path.join(cmdsDir2, 'cmd2.md'),
|
||||
'---\n---\nCommand 2',
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'multi-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'multi-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(2);
|
||||
const commandNames = commands.map((c) => c.name).sort();
|
||||
expect(commandNames).toEqual(['multi-ext:cmd1', 'multi-ext:cmd2']);
|
||||
});
|
||||
|
||||
it('should fallback to default "commands" directory when config.commands not specified', async () => {
|
||||
// Setup extension structure with default commands directory
|
||||
const extensionDir = path.join(
|
||||
tempDir,
|
||||
'.qwen',
|
||||
'extensions',
|
||||
'default-ext',
|
||||
);
|
||||
const defaultCommandsDir = path.join(extensionDir, 'commands');
|
||||
await fs.promises.mkdir(defaultCommandsDir, { recursive: true });
|
||||
|
||||
// Create extension config without commands field
|
||||
const extensionConfig = {
|
||||
name: 'default-ext',
|
||||
version: '1.0.0',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Create a test command in default directory
|
||||
await fs.promises.writeFile(
|
||||
path.join(defaultCommandsDir, 'default.md'),
|
||||
'---\n---\nDefault command',
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'default-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'default-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('default-ext:default');
|
||||
});
|
||||
|
||||
it('should handle extension without commands directory gracefully', async () => {
|
||||
// Setup extension structure without commands directory
|
||||
const extensionDir = path.join(
|
||||
tempDir,
|
||||
'.qwen',
|
||||
'extensions',
|
||||
'no-cmds-ext',
|
||||
);
|
||||
await fs.promises.mkdir(extensionDir, { recursive: true });
|
||||
|
||||
// Create extension config
|
||||
const extensionConfig = {
|
||||
name: 'no-cmds-ext',
|
||||
version: '1.0.0',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'no-cmds-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'no-cmds-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
// Should not throw and return empty array
|
||||
expect(commands).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should prefix extension commands with extension name', async () => {
|
||||
// Setup extension
|
||||
const extensionDir = path.join(
|
||||
tempDir,
|
||||
'.qwen',
|
||||
'extensions',
|
||||
'prefix-ext',
|
||||
);
|
||||
const commandsDir = path.join(extensionDir, 'commands');
|
||||
await fs.promises.mkdir(commandsDir, { recursive: true });
|
||||
|
||||
const extensionConfig = {
|
||||
name: 'prefix-ext',
|
||||
version: '1.0.0',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
await fs.promises.writeFile(
|
||||
path.join(commandsDir, 'mycommand.md'),
|
||||
'---\n---\nMy command',
|
||||
);
|
||||
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'prefix-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'prefix-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('prefix-ext:mycommand');
|
||||
});
|
||||
|
||||
it('should load commands from multiple extensions in alphabetical order', async () => {
|
||||
// Setup two extensions
|
||||
const ext1Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-b');
|
||||
const ext2Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-a');
|
||||
|
||||
await fs.promises.mkdir(path.join(ext1Dir, 'commands'), {
|
||||
recursive: true,
|
||||
});
|
||||
await fs.promises.mkdir(path.join(ext2Dir, 'commands'), {
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
// Extension B
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext1Dir, 'qwen-extension.json'),
|
||||
JSON.stringify({ name: 'ext-b', version: '1.0.0' }),
|
||||
);
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext1Dir, 'commands', 'cmd.md'),
|
||||
'---\n---\nCommand B',
|
||||
);
|
||||
|
||||
// Extension A
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext2Dir, 'qwen-extension.json'),
|
||||
JSON.stringify({ name: 'ext-a', version: '1.0.0' }),
|
||||
);
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext2Dir, 'commands', 'cmd.md'),
|
||||
'---\n---\nCommand A',
|
||||
);
|
||||
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'ext-b',
|
||||
config: { name: 'ext-b', version: '1.0.0' },
|
||||
contextFiles: [],
|
||||
name: 'ext-b',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: ext1Dir,
|
||||
},
|
||||
{
|
||||
id: 'ext-a',
|
||||
config: { name: 'ext-a', version: '1.0.0' },
|
||||
contextFiles: [],
|
||||
name: 'ext-a',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: ext2Dir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(2);
|
||||
// Extensions are sorted alphabetically, so ext-a comes before ext-b
|
||||
expect(commands[0].name).toBe('ext-a:cmd');
|
||||
expect(commands[1].name).toBe('ext-b:cmd');
|
||||
});
|
||||
});
|
||||
117
packages/cli/src/services/FileCommandLoader-markdown.test.ts
Normal file
117
packages/cli/src/services/FileCommandLoader-markdown.test.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import os from 'node:os';
|
||||
import { FileCommandLoader } from './FileCommandLoader.js';
|
||||
|
||||
describe('FileCommandLoader - Markdown support', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a temporary directory for test commands
|
||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-md-test-'));
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Clean up
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should load markdown commands with frontmatter', async () => {
|
||||
// Create a test markdown command file
|
||||
const mdContent = `---
|
||||
description: Test markdown command
|
||||
---
|
||||
|
||||
This is a test prompt from markdown.`;
|
||||
|
||||
const commandPath = path.join(tempDir, 'test-command.md');
|
||||
await fs.writeFile(commandPath, mdContent, 'utf-8');
|
||||
|
||||
// Create loader with temp dir as command source
|
||||
const loader = new FileCommandLoader(null);
|
||||
|
||||
// Mock the getCommandDirectories to return our temp dir
|
||||
const originalMethod = loader['getCommandDirectories'];
|
||||
loader['getCommandDirectories'] = () => [{ path: tempDir }];
|
||||
|
||||
try {
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('test-command');
|
||||
expect(commands[0].description).toBe('Test markdown command');
|
||||
} finally {
|
||||
// Restore original method
|
||||
loader['getCommandDirectories'] = originalMethod;
|
||||
}
|
||||
});
|
||||
|
||||
it('should load markdown commands without frontmatter', async () => {
|
||||
// Create a test markdown command file without frontmatter
|
||||
const mdContent = 'This is a simple prompt without frontmatter.';
|
||||
|
||||
const commandPath = path.join(tempDir, 'simple-command.md');
|
||||
await fs.writeFile(commandPath, mdContent, 'utf-8');
|
||||
|
||||
const loader = new FileCommandLoader(null);
|
||||
const originalMethod = loader['getCommandDirectories'];
|
||||
loader['getCommandDirectories'] = () => [{ path: tempDir }];
|
||||
|
||||
try {
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
const simpleCommand = commands.find(
|
||||
(cmd) => cmd.name === 'simple-command',
|
||||
);
|
||||
expect(simpleCommand).toBeDefined();
|
||||
expect(simpleCommand?.description).toContain('Custom command from');
|
||||
} finally {
|
||||
loader['getCommandDirectories'] = originalMethod;
|
||||
}
|
||||
});
|
||||
|
||||
it('should load both toml and markdown commands', async () => {
|
||||
// Create both TOML and Markdown files
|
||||
const tomlContent = `prompt = "TOML prompt"
|
||||
description = "TOML command"`;
|
||||
|
||||
const mdContent = `---
|
||||
description: Markdown command
|
||||
---
|
||||
|
||||
Markdown prompt`;
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'toml-cmd.toml'),
|
||||
tomlContent,
|
||||
'utf-8',
|
||||
);
|
||||
await fs.writeFile(path.join(tempDir, 'md-cmd.md'), mdContent, 'utf-8');
|
||||
|
||||
const loader = new FileCommandLoader(null);
|
||||
const originalMethod = loader['getCommandDirectories'];
|
||||
loader['getCommandDirectories'] = () => [{ path: tempDir }];
|
||||
|
||||
try {
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
const tomlCommand = commands.find((cmd) => cmd.name === 'toml-cmd');
|
||||
const mdCommand = commands.find((cmd) => cmd.name === 'md-cmd');
|
||||
|
||||
expect(tomlCommand).toBeDefined();
|
||||
expect(tomlCommand?.description).toBe('TOML command');
|
||||
|
||||
expect(mdCommand).toBeDefined();
|
||||
expect(mdCommand?.description).toBe('Markdown command');
|
||||
} finally {
|
||||
loader['getCommandDirectories'] = originalMethod;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -568,9 +568,9 @@ describe('FileCommandLoader', () => {
|
||||
|
||||
expect(commands).toHaveLength(3);
|
||||
const commandNames = commands.map((cmd) => cmd.name);
|
||||
expect(commandNames).toEqual(['user', 'project', 'ext']);
|
||||
expect(commandNames).toEqual(['user', 'project', 'test-ext:ext']);
|
||||
|
||||
const extCommand = commands.find((cmd) => cmd.name === 'ext');
|
||||
const extCommand = commands.find((cmd) => cmd.name === 'test-ext:ext');
|
||||
expect(extCommand?.extensionName).toBe('test-ext');
|
||||
expect(extCommand?.description).toMatch(/^\[test-ext\]/);
|
||||
});
|
||||
@@ -656,14 +656,14 @@ describe('FileCommandLoader', () => {
|
||||
expect(result1.content).toEqual([{ text: 'Project deploy command' }]);
|
||||
}
|
||||
|
||||
expect(commands[2].name).toBe('deploy');
|
||||
expect(commands[2].name).toBe('test-ext:deploy');
|
||||
expect(commands[2].extensionName).toBe('test-ext');
|
||||
expect(commands[2].description).toMatch(/^\[test-ext\]/);
|
||||
const result2 = await commands[2].action?.(
|
||||
createMockCommandContext({
|
||||
invocation: {
|
||||
raw: '/deploy',
|
||||
name: 'deploy',
|
||||
raw: '/test-ext:deploy',
|
||||
name: 'test-ext:deploy',
|
||||
args: '',
|
||||
},
|
||||
}),
|
||||
@@ -729,7 +729,7 @@ describe('FileCommandLoader', () => {
|
||||
const commands = await loader.loadCommands(signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('active');
|
||||
expect(commands[0].name).toBe('active-ext:active');
|
||||
expect(commands[0].extensionName).toBe('active-ext');
|
||||
expect(commands[0].description).toMatch(/^\[active-ext\]/);
|
||||
});
|
||||
@@ -803,17 +803,17 @@ describe('FileCommandLoader', () => {
|
||||
expect(commands).toHaveLength(3);
|
||||
|
||||
const commandNames = commands.map((cmd) => cmd.name).sort();
|
||||
expect(commandNames).toEqual(['b:c', 'b:d:e', 'simple']);
|
||||
expect(commandNames).toEqual(['a:b:c', 'a:b:d:e', 'a:simple']);
|
||||
|
||||
const nestedCmd = commands.find((cmd) => cmd.name === 'b:c');
|
||||
const nestedCmd = commands.find((cmd) => cmd.name === 'a:b:c');
|
||||
expect(nestedCmd?.extensionName).toBe('a');
|
||||
expect(nestedCmd?.description).toMatch(/^\[a\]/);
|
||||
expect(nestedCmd).toBeDefined();
|
||||
const result = await nestedCmd!.action?.(
|
||||
createMockCommandContext({
|
||||
invocation: {
|
||||
raw: '/b:c',
|
||||
name: 'b:c',
|
||||
raw: '/a:b:c',
|
||||
name: 'a:b:c',
|
||||
args: '',
|
||||
},
|
||||
}),
|
||||
|
||||
@@ -5,34 +5,23 @@
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import * as fsSync from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import toml from '@iarna/toml';
|
||||
import { glob } from 'glob';
|
||||
import { z } from 'zod';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import { Storage } from '@qwen-code/qwen-code-core';
|
||||
import { EXTENSIONS_CONFIG_FILENAME, Storage } from '@qwen-code/qwen-code-core';
|
||||
import type { ICommandLoader } from './types.js';
|
||||
import type {
|
||||
CommandContext,
|
||||
SlashCommand,
|
||||
SlashCommandActionReturn,
|
||||
} from '../ui/commands/types.js';
|
||||
import { CommandKind } from '../ui/commands/types.js';
|
||||
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
|
||||
import type {
|
||||
IPromptProcessor,
|
||||
PromptPipelineContent,
|
||||
} from './prompt-processors/types.js';
|
||||
import {
|
||||
SHORTHAND_ARGS_PLACEHOLDER,
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
} from './prompt-processors/types.js';
|
||||
parseMarkdownCommand,
|
||||
MarkdownCommandDefSchema,
|
||||
} from './markdown-command-parser.js';
|
||||
import {
|
||||
ConfirmationRequiredError,
|
||||
ShellProcessor,
|
||||
} from './prompt-processors/shellProcessor.js';
|
||||
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
|
||||
createSlashCommandFromDefinition,
|
||||
type CommandDefinition,
|
||||
} from './command-factory.js';
|
||||
import type { SlashCommand } from '../ui/commands/types.js';
|
||||
|
||||
interface CommandDirectory {
|
||||
path: string;
|
||||
@@ -96,7 +85,12 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
const commandDirs = this.getCommandDirectories();
|
||||
for (const dirInfo of commandDirs) {
|
||||
try {
|
||||
const files = await glob('**/*.toml', {
|
||||
// Scan both .toml and .md files
|
||||
const tomlFiles = await glob('**/*.toml', {
|
||||
...globOptions,
|
||||
cwd: dirInfo.path,
|
||||
});
|
||||
const mdFiles = await glob('**/*.md', {
|
||||
...globOptions,
|
||||
cwd: dirInfo.path,
|
||||
});
|
||||
@@ -105,18 +99,28 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
return [];
|
||||
}
|
||||
|
||||
const commandPromises = files.map((file) =>
|
||||
this.parseAndAdaptFile(
|
||||
// Process TOML files
|
||||
const tomlCommandPromises = tomlFiles.map((file) =>
|
||||
this.parseAndAdaptTomlFile(
|
||||
path.join(dirInfo.path, file),
|
||||
dirInfo.path,
|
||||
dirInfo.extensionName,
|
||||
),
|
||||
);
|
||||
|
||||
const commands = (await Promise.all(commandPromises)).filter(
|
||||
(cmd): cmd is SlashCommand => cmd !== null,
|
||||
// Process Markdown files
|
||||
const mdCommandPromises = mdFiles.map((file) =>
|
||||
this.parseAndAdaptMarkdownFile(
|
||||
path.join(dirInfo.path, file),
|
||||
dirInfo.path,
|
||||
dirInfo.extensionName,
|
||||
),
|
||||
);
|
||||
|
||||
const commands = (
|
||||
await Promise.all([...tomlCommandPromises, ...mdCommandPromises])
|
||||
).filter((cmd): cmd is SlashCommand => cmd !== null);
|
||||
|
||||
// Add all commands without deduplication
|
||||
allCommands.push(...commands);
|
||||
} catch (error) {
|
||||
@@ -159,17 +163,73 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
.filter((ext) => ext.isActive)
|
||||
.sort((a, b) => a.name.localeCompare(b.name)); // Sort alphabetically for deterministic loading
|
||||
|
||||
const extensionCommandDirs = activeExtensions.map((ext) => ({
|
||||
path: path.join(ext.path, 'commands'),
|
||||
extensionName: ext.name,
|
||||
}));
|
||||
// Collect command directories from each extension
|
||||
for (const ext of activeExtensions) {
|
||||
// Get commands paths from extension config
|
||||
const commandsPaths = this.getExtensionCommandsPaths(ext);
|
||||
|
||||
dirs.push(...extensionCommandDirs);
|
||||
for (const cmdPath of commandsPaths) {
|
||||
dirs.push({
|
||||
path: cmdPath,
|
||||
extensionName: ext.name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dirs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get commands paths from an extension.
|
||||
* Returns paths from config.commands if specified, otherwise defaults to 'commands' directory.
|
||||
*/
|
||||
private getExtensionCommandsPaths(ext: {
|
||||
path: string;
|
||||
name: string;
|
||||
}): string[] {
|
||||
// Try to get extension config
|
||||
try {
|
||||
const configPath = path.join(ext.path, EXTENSIONS_CONFIG_FILENAME);
|
||||
if (fsSync.existsSync(configPath)) {
|
||||
const configContent = fsSync.readFileSync(configPath, 'utf-8');
|
||||
const config = JSON.parse(configContent);
|
||||
|
||||
if (config.commands) {
|
||||
const commandsArray = Array.isArray(config.commands)
|
||||
? config.commands
|
||||
: [config.commands];
|
||||
|
||||
return commandsArray
|
||||
.map((cmdPath: string) =>
|
||||
path.isAbsolute(cmdPath) ? cmdPath : path.join(ext.path, cmdPath),
|
||||
)
|
||||
.filter((cmdPath: string) => {
|
||||
try {
|
||||
return fsSync.existsSync(cmdPath);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Failed to read extension config for ${ext.name}:`, error);
|
||||
}
|
||||
|
||||
// Default fallback: use 'commands' directory
|
||||
const defaultPath = path.join(ext.path, 'commands');
|
||||
try {
|
||||
if (fsSync.existsSync(defaultPath)) {
|
||||
return [defaultPath];
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a single .toml file and transforms it into a SlashCommand object.
|
||||
* @param filePath The absolute path to the .toml file.
|
||||
@@ -177,7 +237,7 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
* @param extensionName Optional extension name to prefix commands with.
|
||||
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
|
||||
*/
|
||||
private async parseAndAdaptFile(
|
||||
private async parseAndAdaptTomlFile(
|
||||
filePath: string,
|
||||
baseDir: string,
|
||||
extensionName?: string,
|
||||
@@ -216,104 +276,79 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
|
||||
const validDef = validationResult.data;
|
||||
|
||||
const relativePathWithExt = path.relative(baseDir, filePath);
|
||||
const relativePath = relativePathWithExt.substring(
|
||||
0,
|
||||
relativePathWithExt.length - 5, // length of '.toml'
|
||||
);
|
||||
const baseCommandName = relativePath
|
||||
.split(path.sep)
|
||||
// Sanitize each path segment to prevent ambiguity. Since ':' is our
|
||||
// namespace separator, we replace any literal colons in filenames
|
||||
// with underscores to avoid naming conflicts.
|
||||
.map((segment) => segment.replaceAll(':', '_'))
|
||||
.join(':');
|
||||
|
||||
// Add extension name tag for extension commands
|
||||
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
|
||||
let description = validDef.description || defaultDescription;
|
||||
if (extensionName) {
|
||||
description = `[${extensionName}] ${description}`;
|
||||
}
|
||||
|
||||
const processors: IPromptProcessor[] = [];
|
||||
const usesArgs = validDef.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
|
||||
const usesShellInjection = validDef.prompt.includes(
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
);
|
||||
const usesAtFileInjection = validDef.prompt.includes(
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
);
|
||||
|
||||
// 1. @-File Injection (Security First).
|
||||
// This runs first to ensure we're not executing shell commands that
|
||||
// could dynamically generate malicious @-paths.
|
||||
if (usesAtFileInjection) {
|
||||
processors.push(new AtFileProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 2. Argument and Shell Injection.
|
||||
// This runs after file content has been safely injected.
|
||||
if (usesShellInjection || usesArgs) {
|
||||
processors.push(new ShellProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 3. Default Argument Handling.
|
||||
// Appends the raw invocation if no explicit {{args}} are used.
|
||||
if (!usesArgs) {
|
||||
processors.push(new DefaultArgumentProcessor());
|
||||
}
|
||||
|
||||
return {
|
||||
name: baseCommandName,
|
||||
description,
|
||||
kind: CommandKind.FILE,
|
||||
// Use factory to create command
|
||||
return createSlashCommandFromDefinition(
|
||||
filePath,
|
||||
baseDir,
|
||||
validDef,
|
||||
extensionName,
|
||||
action: async (
|
||||
context: CommandContext,
|
||||
_args: string,
|
||||
): Promise<SlashCommandActionReturn> => {
|
||||
if (!context.invocation) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Critical error: Command '${baseCommandName}' was executed without invocation context.`,
|
||||
);
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: [{ text: validDef.prompt }], // Fallback to unprocessed prompt
|
||||
};
|
||||
}
|
||||
'.toml',
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
let processedContent: PromptPipelineContent = [
|
||||
{ text: validDef.prompt },
|
||||
];
|
||||
for (const processor of processors) {
|
||||
processedContent = await processor.process(
|
||||
processedContent,
|
||||
context,
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Parses a single .md file and transforms it into a SlashCommand object.
|
||||
* @param filePath The absolute path to the .md file.
|
||||
* @param baseDir The root command directory for name calculation.
|
||||
* @param extensionName Optional extension name to prefix commands with.
|
||||
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
|
||||
*/
|
||||
private async parseAndAdaptMarkdownFile(
|
||||
filePath: string,
|
||||
baseDir: string,
|
||||
extensionName?: string,
|
||||
): Promise<SlashCommand | null> {
|
||||
let fileContent: string;
|
||||
try {
|
||||
fileContent = await fs.readFile(filePath, 'utf-8');
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Failed to read file ${filePath}:`,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: processedContent,
|
||||
};
|
||||
} catch (e) {
|
||||
// Check if it's our specific error type
|
||||
if (e instanceof ConfirmationRequiredError) {
|
||||
// Halt and request confirmation from the UI layer.
|
||||
return {
|
||||
type: 'confirm_shell_commands',
|
||||
commandsToConfirm: e.commandsToConfirm,
|
||||
originalInvocation: {
|
||||
raw: context.invocation.raw,
|
||||
},
|
||||
};
|
||||
}
|
||||
// Re-throw other errors to be handled by the global error handler.
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
let parsed: ReturnType<typeof parseMarkdownCommand>;
|
||||
try {
|
||||
parsed = parseMarkdownCommand(fileContent);
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Failed to parse Markdown file ${filePath}:`,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const validationResult = MarkdownCommandDefSchema.safeParse(parsed);
|
||||
|
||||
if (!validationResult.success) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Skipping invalid command file: ${filePath}. Validation errors:`,
|
||||
validationResult.error.flatten(),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const validDef = validationResult.data;
|
||||
|
||||
// Convert to CommandDefinition format
|
||||
const definition: CommandDefinition = {
|
||||
prompt: validDef.prompt,
|
||||
description:
|
||||
validDef.frontmatter?.description &&
|
||||
typeof validDef.frontmatter.description === 'string'
|
||||
? validDef.frontmatter.description
|
||||
: undefined,
|
||||
};
|
||||
|
||||
// Use factory to create command
|
||||
return createSlashCommandFromDefinition(
|
||||
filePath,
|
||||
baseDir,
|
||||
definition,
|
||||
extensionName,
|
||||
'.md',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
154
packages/cli/src/services/command-factory.ts
Normal file
154
packages/cli/src/services/command-factory.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* This file contains helper functions for FileCommandLoader to create SlashCommand
|
||||
* objects from parsed command definitions (TOML or Markdown).
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import type {
|
||||
CommandContext,
|
||||
SlashCommand,
|
||||
SlashCommandActionReturn,
|
||||
} from '../ui/commands/types.js';
|
||||
import { CommandKind } from '../ui/commands/types.js';
|
||||
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
|
||||
import type {
|
||||
IPromptProcessor,
|
||||
PromptPipelineContent,
|
||||
} from './prompt-processors/types.js';
|
||||
import {
|
||||
SHORTHAND_ARGS_PLACEHOLDER,
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
} from './prompt-processors/types.js';
|
||||
import {
|
||||
ConfirmationRequiredError,
|
||||
ShellProcessor,
|
||||
} from './prompt-processors/shellProcessor.js';
|
||||
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
|
||||
|
||||
export interface CommandDefinition {
|
||||
prompt: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a SlashCommand from a parsed command definition.
|
||||
* This function is used by both TOML and Markdown command loaders.
|
||||
*
|
||||
* @param filePath The absolute path to the command file
|
||||
* @param baseDir The root command directory for name calculation
|
||||
* @param definition The parsed command definition (prompt and optional description)
|
||||
* @param extensionName Optional extension name to prefix commands with
|
||||
* @param fileExtension The file extension (e.g., '.toml' or '.md')
|
||||
* @returns A SlashCommand object
|
||||
*/
|
||||
export function createSlashCommandFromDefinition(
|
||||
filePath: string,
|
||||
baseDir: string,
|
||||
definition: CommandDefinition,
|
||||
extensionName: string | undefined,
|
||||
fileExtension: string,
|
||||
): SlashCommand {
|
||||
const relativePathWithExt = path.relative(baseDir, filePath);
|
||||
const relativePath = relativePathWithExt.substring(
|
||||
0,
|
||||
relativePathWithExt.length - fileExtension.length,
|
||||
);
|
||||
const baseCommandName = relativePath
|
||||
.split(path.sep)
|
||||
// Sanitize each path segment to prevent ambiguity. Since ':' is our
|
||||
// namespace separator, we replace any literal colons in filenames
|
||||
// with underscores to avoid naming conflicts.
|
||||
.map((segment) => segment.replaceAll(':', '_'))
|
||||
.join(':');
|
||||
|
||||
// Add extension name tag for extension commands
|
||||
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
|
||||
let description = definition.description || defaultDescription;
|
||||
if (extensionName) {
|
||||
description = `[${extensionName}] ${description}`;
|
||||
}
|
||||
|
||||
const processors: IPromptProcessor[] = [];
|
||||
const usesArgs = definition.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
|
||||
const usesShellInjection = definition.prompt.includes(
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
);
|
||||
const usesAtFileInjection = definition.prompt.includes(
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
);
|
||||
|
||||
// 1. @-File Injection (Security First).
|
||||
// This runs first to ensure we're not executing shell commands that
|
||||
// could dynamically generate malicious @-paths.
|
||||
if (usesAtFileInjection) {
|
||||
processors.push(new AtFileProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 2. Argument and Shell Injection.
|
||||
// This runs after file content has been safely injected.
|
||||
if (usesShellInjection || usesArgs) {
|
||||
processors.push(new ShellProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 3. Default Argument Handling.
|
||||
// Appends the raw invocation if no explicit {{args}} are used.
|
||||
if (!usesArgs) {
|
||||
processors.push(new DefaultArgumentProcessor());
|
||||
}
|
||||
|
||||
return {
|
||||
name: baseCommandName,
|
||||
description,
|
||||
kind: CommandKind.FILE,
|
||||
extensionName,
|
||||
action: async (
|
||||
context: CommandContext,
|
||||
_args: string,
|
||||
): Promise<SlashCommandActionReturn> => {
|
||||
if (!context.invocation) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Critical error: Command '${baseCommandName}' was executed without invocation context.`,
|
||||
);
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: [{ text: definition.prompt }], // Fallback to unprocessed prompt
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
let processedContent: PromptPipelineContent = [
|
||||
{ text: definition.prompt },
|
||||
];
|
||||
for (const processor of processors) {
|
||||
processedContent = await processor.process(processedContent, context);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: processedContent,
|
||||
};
|
||||
} catch (e) {
|
||||
// Check if it's our specific error type
|
||||
if (e instanceof ConfirmationRequiredError) {
|
||||
// Halt and request confirmation from the UI layer.
|
||||
return {
|
||||
type: 'confirm_shell_commands',
|
||||
commandsToConfirm: e.commandsToConfirm,
|
||||
originalInvocation: {
|
||||
raw: context.invocation.raw,
|
||||
},
|
||||
};
|
||||
}
|
||||
// Re-throw other errors to be handled by the global error handler.
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
253
packages/cli/src/services/command-migration-tool.test.ts
Normal file
253
packages/cli/src/services/command-migration-tool.test.ts
Normal file
@@ -0,0 +1,253 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import os from 'node:os';
|
||||
import {
|
||||
detectTomlCommands,
|
||||
migrateTomlCommands,
|
||||
generateMigrationPrompt,
|
||||
} from './command-migration-tool.js';
|
||||
|
||||
describe('command-migration-tool', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-migration-test-'));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('detectTomlCommands', () => {
|
||||
it('should detect TOML files in directory', async () => {
|
||||
// Create some TOML files
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'cmd1.toml'),
|
||||
'prompt = "test"',
|
||||
'utf-8',
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'cmd2.toml'),
|
||||
'prompt = "test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const tomlFiles = await detectTomlCommands(tempDir);
|
||||
|
||||
expect(tomlFiles).toHaveLength(2);
|
||||
expect(tomlFiles).toContain('cmd1.toml');
|
||||
expect(tomlFiles).toContain('cmd2.toml');
|
||||
});
|
||||
|
||||
it('should detect TOML files in subdirectories', async () => {
|
||||
const subdir = path.join(tempDir, 'subdir');
|
||||
await fs.mkdir(subdir);
|
||||
await fs.writeFile(
|
||||
path.join(subdir, 'nested.toml'),
|
||||
'prompt = "test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const tomlFiles = await detectTomlCommands(tempDir);
|
||||
|
||||
expect(tomlFiles).toContain('subdir/nested.toml');
|
||||
});
|
||||
|
||||
it('should return empty array for non-existent directory', async () => {
|
||||
const nonExistent = path.join(tempDir, 'does-not-exist');
|
||||
|
||||
const tomlFiles = await detectTomlCommands(nonExistent);
|
||||
|
||||
expect(tomlFiles).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not detect non-TOML files', async () => {
|
||||
await fs.writeFile(path.join(tempDir, 'file.txt'), 'text', 'utf-8');
|
||||
await fs.writeFile(path.join(tempDir, 'file.md'), 'markdown', 'utf-8');
|
||||
|
||||
const tomlFiles = await detectTomlCommands(tempDir);
|
||||
|
||||
expect(tomlFiles).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateTomlCommands', () => {
|
||||
it('should migrate TOML file to Markdown', async () => {
|
||||
const tomlContent = `prompt = "Test prompt"
|
||||
description = "Test description"`;
|
||||
|
||||
await fs.writeFile(path.join(tempDir, 'test.toml'), tomlContent, 'utf-8');
|
||||
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: true,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.convertedFiles).toContain('test.toml');
|
||||
expect(result.failedFiles).toHaveLength(0);
|
||||
|
||||
// Check Markdown file was created
|
||||
const mdPath = path.join(tempDir, 'test.md');
|
||||
const mdContent = await fs.readFile(mdPath, 'utf-8');
|
||||
expect(mdContent).toContain('description: Test description');
|
||||
expect(mdContent).toContain('Test prompt');
|
||||
|
||||
// Check backup was created (original renamed to .toml.backup)
|
||||
const backupPath = path.join(tempDir, 'test.toml.backup');
|
||||
const backupExists = await fs
|
||||
.access(backupPath)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(backupExists).toBe(true);
|
||||
|
||||
// Original .toml file should not exist (renamed to .backup)
|
||||
const tomlExists = await fs
|
||||
.access(path.join(tempDir, 'test.toml'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(tomlExists).toBe(false);
|
||||
});
|
||||
|
||||
it('should delete original TOML when deleteOriginal is true', async () => {
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'delete-me.toml'),
|
||||
'prompt = "Test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: false,
|
||||
deleteOriginal: true,
|
||||
});
|
||||
|
||||
// Original should be deleted
|
||||
const tomlExists = await fs
|
||||
.access(path.join(tempDir, 'delete-me.toml'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(tomlExists).toBe(false);
|
||||
|
||||
// Markdown should exist
|
||||
const mdExists = await fs
|
||||
.access(path.join(tempDir, 'delete-me.md'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(mdExists).toBe(true);
|
||||
|
||||
// Backup should not exist (createBackup was false)
|
||||
const backupExists = await fs
|
||||
.access(path.join(tempDir, 'delete-me.toml.backup'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(backupExists).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail if Markdown file already exists', async () => {
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'existing.toml'),
|
||||
'prompt = "Test"',
|
||||
'utf-8',
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'existing.md'),
|
||||
'Already exists',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.failedFiles).toHaveLength(1);
|
||||
expect(result.failedFiles[0].file).toBe('existing.toml');
|
||||
expect(result.failedFiles[0].error).toContain('already exists');
|
||||
});
|
||||
|
||||
it('should handle migration without backup', async () => {
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'no-backup.toml'),
|
||||
'prompt = "Test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: false,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Original TOML file should still exist (no backup, no delete)
|
||||
const tomlExists = await fs
|
||||
.access(path.join(tempDir, 'no-backup.toml'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(tomlExists).toBe(true);
|
||||
|
||||
// Backup should not exist
|
||||
const backupExists = await fs
|
||||
.access(path.join(tempDir, 'no-backup.toml.backup'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(backupExists).toBe(false);
|
||||
});
|
||||
|
||||
it('should return success with empty results for no TOML files', async () => {
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.convertedFiles).toHaveLength(0);
|
||||
expect(result.failedFiles).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateMigrationPrompt', () => {
|
||||
it('should generate prompt for few files', () => {
|
||||
const files = ['cmd1.toml', 'cmd2.toml'];
|
||||
|
||||
const prompt = generateMigrationPrompt(files);
|
||||
|
||||
expect(prompt).toContain('Found 2 command files');
|
||||
expect(prompt).toContain('cmd1.toml');
|
||||
expect(prompt).toContain('cmd2.toml');
|
||||
expect(prompt).toContain('qwen-code migrate-commands');
|
||||
});
|
||||
|
||||
it('should truncate file list for many files', () => {
|
||||
const files = Array.from({ length: 10 }, (_, i) => `cmd${i}.toml`);
|
||||
|
||||
const prompt = generateMigrationPrompt(files);
|
||||
|
||||
expect(prompt).toContain('Found 10 command files');
|
||||
expect(prompt).toContain('... and 7 more');
|
||||
});
|
||||
|
||||
it('should return empty string for no files', () => {
|
||||
const prompt = generateMigrationPrompt([]);
|
||||
|
||||
expect(prompt).toBe('');
|
||||
});
|
||||
|
||||
it('should use singular form for single file', () => {
|
||||
const prompt = generateMigrationPrompt(['single.toml']);
|
||||
|
||||
expect(prompt).toContain('Found 1 command file');
|
||||
// Don't check for plural since "files" appears in other parts of the message
|
||||
});
|
||||
});
|
||||
});
|
||||
169
packages/cli/src/services/command-migration-tool.ts
Normal file
169
packages/cli/src/services/command-migration-tool.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Tool for migrating TOML commands to Markdown format.
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import { convertTomlToMarkdown } from '@qwen-code/qwen-code-core';
|
||||
|
||||
export interface MigrationResult {
|
||||
success: boolean;
|
||||
convertedFiles: string[];
|
||||
failedFiles: Array<{ file: string; error: string }>;
|
||||
}
|
||||
|
||||
export interface MigrationOptions {
|
||||
/** Directory containing command files */
|
||||
commandDir: string;
|
||||
/** Whether to create backups (default: true) */
|
||||
createBackup?: boolean;
|
||||
/** Whether to delete original TOML files after migration (default: false) */
|
||||
deleteOriginal?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a directory for TOML command files.
|
||||
* @param commandDir Directory to scan
|
||||
* @returns Array of TOML file paths (relative to commandDir)
|
||||
*/
|
||||
export async function detectTomlCommands(
|
||||
commandDir: string,
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
await fs.access(commandDir);
|
||||
} catch {
|
||||
// Directory doesn't exist
|
||||
return [];
|
||||
}
|
||||
|
||||
const tomlFiles = await glob('**/*.toml', {
|
||||
cwd: commandDir,
|
||||
nodir: true,
|
||||
dot: false,
|
||||
});
|
||||
|
||||
return tomlFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates TOML command files to Markdown format.
|
||||
* @param options Migration options
|
||||
* @returns Migration result with details
|
||||
*/
|
||||
export async function migrateTomlCommands(
|
||||
options: MigrationOptions,
|
||||
): Promise<MigrationResult> {
|
||||
const { commandDir, createBackup = true, deleteOriginal = false } = options;
|
||||
|
||||
const result: MigrationResult = {
|
||||
success: true,
|
||||
convertedFiles: [],
|
||||
failedFiles: [],
|
||||
};
|
||||
|
||||
// Detect TOML files
|
||||
const tomlFiles = await detectTomlCommands(commandDir);
|
||||
|
||||
if (tomlFiles.length === 0) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Process each TOML file
|
||||
for (const relativeFile of tomlFiles) {
|
||||
const tomlPath = path.join(commandDir, relativeFile);
|
||||
|
||||
try {
|
||||
// Read TOML file
|
||||
const tomlContent = await fs.readFile(tomlPath, 'utf-8');
|
||||
|
||||
// Convert to Markdown
|
||||
const markdownContent = convertTomlToMarkdown(tomlContent);
|
||||
|
||||
// Generate Markdown file path (same location, .md extension)
|
||||
const markdownPath = tomlPath.replace(/\.toml$/, '.md');
|
||||
|
||||
// Check if Markdown file already exists
|
||||
try {
|
||||
await fs.access(markdownPath);
|
||||
throw new Error(
|
||||
`Markdown file already exists: ${path.basename(markdownPath)}`,
|
||||
);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
// File doesn't exist, continue
|
||||
}
|
||||
|
||||
// Write Markdown file
|
||||
await fs.writeFile(markdownPath, markdownContent, 'utf-8');
|
||||
|
||||
// Backup original if requested (rename to .toml.backup)
|
||||
if (createBackup) {
|
||||
const backupPath = `${tomlPath}.backup`;
|
||||
await fs.rename(tomlPath, backupPath);
|
||||
} else if (deleteOriginal) {
|
||||
// Delete original if requested and no backup
|
||||
await fs.unlink(tomlPath);
|
||||
}
|
||||
|
||||
result.convertedFiles.push(relativeFile);
|
||||
} catch (error) {
|
||||
result.success = false;
|
||||
result.failedFiles.push({
|
||||
file: relativeFile,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a migration report message.
|
||||
* @param tomlFiles List of TOML files found
|
||||
* @returns Human-readable migration prompt message
|
||||
*/
|
||||
export function generateMigrationPrompt(tomlFiles: string[]): string {
|
||||
if (tomlFiles.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const count = tomlFiles.length;
|
||||
const fileList =
|
||||
tomlFiles.length <= 5
|
||||
? tomlFiles.map((f) => ` - ${f}`).join('\n')
|
||||
: ` - ${tomlFiles.slice(0, 3).join('\n - ')}\n - ... and ${tomlFiles.length - 3} more`;
|
||||
|
||||
return `
|
||||
⚠️ TOML Command Format Deprecation Notice
|
||||
|
||||
Found ${count} command file${count > 1 ? 's' : ''} in TOML format:
|
||||
${fileList}
|
||||
|
||||
The TOML format for commands is being deprecated in favor of Markdown format.
|
||||
Markdown format is more readable and easier to edit.
|
||||
|
||||
You can migrate these files automatically using:
|
||||
qwen-code migrate-commands
|
||||
|
||||
Or manually convert each file:
|
||||
- TOML: prompt = "..." / description = "..."
|
||||
- Markdown: YAML frontmatter + content
|
||||
|
||||
The migration tool will:
|
||||
✓ Convert TOML files to Markdown
|
||||
✓ Create backups of original files
|
||||
✓ Preserve all command functionality
|
||||
|
||||
TOML format will continue to work for now, but migration is recommended.
|
||||
`.trim();
|
||||
}
|
||||
144
packages/cli/src/services/markdown-command-parser.test.ts
Normal file
144
packages/cli/src/services/markdown-command-parser.test.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
parseMarkdownCommand,
|
||||
MarkdownCommandDefSchema,
|
||||
} from './markdown-command-parser.js';
|
||||
|
||||
describe('parseMarkdownCommand', () => {
|
||||
it('should parse markdown with YAML frontmatter', () => {
|
||||
const content = `---
|
||||
description: Test command
|
||||
---
|
||||
|
||||
This is the prompt content.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result).toEqual({
|
||||
frontmatter: {
|
||||
description: 'Test command',
|
||||
},
|
||||
prompt: 'This is the prompt content.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse markdown without frontmatter', () => {
|
||||
const content = 'This is just a prompt without frontmatter.';
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result).toEqual({
|
||||
prompt: 'This is just a prompt without frontmatter.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multi-line prompts', () => {
|
||||
const content = `---
|
||||
description: Multi-line test
|
||||
---
|
||||
|
||||
First line of prompt.
|
||||
Second line of prompt.
|
||||
Third line of prompt.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result.prompt).toBe(
|
||||
'First line of prompt.\nSecond line of prompt.\nThird line of prompt.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should trim whitespace from prompt', () => {
|
||||
const content = `---
|
||||
description: Whitespace test
|
||||
---
|
||||
|
||||
Prompt with leading and trailing spaces
|
||||
`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result.prompt).toBe('Prompt with leading and trailing spaces');
|
||||
});
|
||||
|
||||
it('should handle empty frontmatter', () => {
|
||||
const content = `---
|
||||
---
|
||||
|
||||
Prompt content after empty frontmatter.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
// Empty YAML frontmatter returns undefined, not {}
|
||||
expect(result.frontmatter).toBeUndefined();
|
||||
expect(result.prompt).toBe('Prompt content after empty frontmatter.');
|
||||
});
|
||||
|
||||
it('should handle invalid YAML frontmatter gracefully', () => {
|
||||
// The YAML parser we use is quite tolerant, so most "invalid" YAML
|
||||
// actually parses successfully. This test verifies that behavior.
|
||||
const content = `---
|
||||
description: test
|
||||
---
|
||||
|
||||
Prompt content.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result.frontmatter).toBeDefined();
|
||||
expect(result.prompt).toBe('Prompt content.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('MarkdownCommandDefSchema', () => {
|
||||
it('should validate valid markdown command def', () => {
|
||||
const validDef = {
|
||||
frontmatter: {
|
||||
description: 'Test description',
|
||||
},
|
||||
prompt: 'Test prompt',
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(validDef);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate markdown command def without frontmatter', () => {
|
||||
const validDef = {
|
||||
prompt: 'Test prompt',
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(validDef);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject command def without prompt', () => {
|
||||
const invalidDef = {
|
||||
frontmatter: {
|
||||
description: 'Test description',
|
||||
},
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject command def with non-string prompt', () => {
|
||||
const invalidDef = {
|
||||
prompt: 123,
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
64
packages/cli/src/services/markdown-command-parser.ts
Normal file
64
packages/cli/src/services/markdown-command-parser.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
import { parse as parseYaml } from '@qwen-code/qwen-code-core';
|
||||
|
||||
/**
|
||||
* Defines the Zod schema for a Markdown command definition file.
|
||||
* The frontmatter contains optional metadata, and the body is the prompt.
|
||||
*/
|
||||
export const MarkdownCommandDefSchema = z.object({
|
||||
frontmatter: z
|
||||
.object({
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
prompt: z.string({
|
||||
required_error: 'The prompt content is required.',
|
||||
invalid_type_error: 'The prompt content must be a string.',
|
||||
}),
|
||||
});
|
||||
|
||||
export type MarkdownCommandDef = z.infer<typeof MarkdownCommandDefSchema>;
|
||||
|
||||
/**
|
||||
* Parses a Markdown command file with optional YAML frontmatter.
|
||||
* @param content The file content
|
||||
* @returns Parsed command definition with frontmatter and prompt
|
||||
*/
|
||||
export function parseMarkdownCommand(content: string): MarkdownCommandDef {
|
||||
// Match YAML frontmatter pattern: ---\n...\n---\n
|
||||
// Allow empty frontmatter: ---\n---\n // Use (?:[\s\S]*?) to make the frontmatter content optional
|
||||
const frontmatterRegex = /^---\n([\s\S]*?)---\n([\s\S]*)$/;
|
||||
const match = content.match(frontmatterRegex);
|
||||
|
||||
if (!match) {
|
||||
// No frontmatter, entire content is the prompt
|
||||
return {
|
||||
prompt: content.trim(),
|
||||
};
|
||||
}
|
||||
|
||||
const [, frontmatterYaml, body] = match;
|
||||
|
||||
// Parse YAML frontmatter if not empty
|
||||
let frontmatter: Record<string, unknown> | undefined;
|
||||
if (frontmatterYaml.trim()) {
|
||||
try {
|
||||
frontmatter = parseYaml(frontmatterYaml) as Record<string, unknown>;
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to parse YAML frontmatter: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
frontmatter,
|
||||
prompt: body.trim(),
|
||||
};
|
||||
}
|
||||
5
packages/cli/src/services/test-commands/example.md
Normal file
5
packages/cli/src/services/test-commands/example.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
description: Example markdown command
|
||||
---
|
||||
|
||||
This is an example prompt from a markdown file.
|
||||
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
INSTALL_METADATA_FILENAME,
|
||||
} from '../config/extension.js';
|
||||
import {
|
||||
type MCPServerConfig,
|
||||
type ExtensionInstallMetadata,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
|
||||
export function createExtension({
|
||||
extensionsDir = 'extensions-dir',
|
||||
name = 'my-extension',
|
||||
version = '1.0.0',
|
||||
addContextFile = false,
|
||||
contextFileName = undefined as string | undefined,
|
||||
mcpServers = {} as Record<string, MCPServerConfig>,
|
||||
installMetadata = undefined as ExtensionInstallMetadata | undefined,
|
||||
} = {}): string {
|
||||
const extDir = path.join(extensionsDir, name);
|
||||
fs.mkdirSync(extDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name, version, contextFileName, mcpServers }),
|
||||
);
|
||||
|
||||
if (addContextFile) {
|
||||
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
|
||||
}
|
||||
|
||||
if (contextFileName) {
|
||||
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
|
||||
}
|
||||
|
||||
if (installMetadata) {
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, INSTALL_METADATA_FILENAME),
|
||||
JSON.stringify(installMetadata),
|
||||
);
|
||||
}
|
||||
return extDir;
|
||||
}
|
||||
@@ -76,7 +76,6 @@ vi.mock('./hooks/useFolderTrust.js');
|
||||
vi.mock('./hooks/useIdeTrustListener.js');
|
||||
vi.mock('./hooks/useMessageQueue.js');
|
||||
vi.mock('./hooks/useAutoAcceptIndicator.js');
|
||||
vi.mock('./hooks/useWorkspaceMigration.js');
|
||||
vi.mock('./hooks/useGitBranchName.js');
|
||||
vi.mock('./contexts/VimModeContext.js');
|
||||
vi.mock('./contexts/SessionContext.js');
|
||||
@@ -103,7 +102,6 @@ import { useFolderTrust } from './hooks/useFolderTrust.js';
|
||||
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
|
||||
import { useMessageQueue } from './hooks/useMessageQueue.js';
|
||||
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
|
||||
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
|
||||
import { useGitBranchName } from './hooks/useGitBranchName.js';
|
||||
import { useVimMode } from './contexts/VimModeContext.js';
|
||||
import { useSessionStats } from './contexts/SessionContext.js';
|
||||
@@ -134,7 +132,6 @@ describe('AppContainer State Management', () => {
|
||||
const mockedUseIdeTrustListener = useIdeTrustListener as Mock;
|
||||
const mockedUseMessageQueue = useMessageQueue as Mock;
|
||||
const mockedUseAutoAcceptIndicator = useAutoAcceptIndicator as Mock;
|
||||
const mockedUseWorkspaceMigration = useWorkspaceMigration as Mock;
|
||||
const mockedUseGitBranchName = useGitBranchName as Mock;
|
||||
const mockedUseVimMode = useVimMode as Mock;
|
||||
const mockedUseSessionStats = useSessionStats as Mock;
|
||||
@@ -239,12 +236,6 @@ describe('AppContainer State Management', () => {
|
||||
getQueuedMessagesText: vi.fn().mockReturnValue(''),
|
||||
});
|
||||
mockedUseAutoAcceptIndicator.mockReturnValue(false);
|
||||
mockedUseWorkspaceMigration.mockReturnValue({
|
||||
showWorkspaceMigrationDialog: false,
|
||||
workspaceExtensions: [],
|
||||
onWorkspaceMigrationDialogOpen: vi.fn(),
|
||||
onWorkspaceMigrationDialogClose: vi.fn(),
|
||||
});
|
||||
mockedUseGitBranchName.mockReturnValue('main');
|
||||
mockedUseVimMode.mockReturnValue({
|
||||
isVimEnabled: false,
|
||||
|
||||
@@ -37,6 +37,7 @@ import {
|
||||
getErrorMessage,
|
||||
getAllGeminiMdFilenames,
|
||||
ShellExecutionService,
|
||||
Storage,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { buildResumedHistoryItems } from './utils/resumeHistoryUtils.js';
|
||||
import { validateAuthMethod } from '../config/auth.js';
|
||||
@@ -75,6 +76,9 @@ import { useLoadingIndicator } from './hooks/useLoadingIndicator.js';
|
||||
import { useFolderTrust } from './hooks/useFolderTrust.js';
|
||||
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
|
||||
import { type IdeIntegrationNudgeResult } from './IdeIntegrationNudge.js';
|
||||
import { type CommandMigrationNudgeResult } from './CommandFormatMigrationNudge.js';
|
||||
import { useCommandMigration } from './hooks/useCommandMigration.js';
|
||||
import { migrateTomlCommands } from '../services/command-migration-tool.js';
|
||||
import { appEvents, AppEvent } from '../utils/events.js';
|
||||
import { type UpdateObject } from './utils/updateCheck.js';
|
||||
import { setUpdateHandler } from '../utils/handleAutoUpdate.js';
|
||||
@@ -82,10 +86,12 @@ import { ConsolePatcher } from './utils/ConsolePatcher.js';
|
||||
import { registerCleanup, runExitCleanup } from '../utils/cleanup.js';
|
||||
import { useMessageQueue } from './hooks/useMessageQueue.js';
|
||||
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
|
||||
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
|
||||
import { useSessionStats } from './contexts/SessionContext.js';
|
||||
import { useGitBranchName } from './hooks/useGitBranchName.js';
|
||||
import { useExtensionUpdates } from './hooks/useExtensionUpdates.js';
|
||||
import {
|
||||
useExtensionUpdates,
|
||||
useConfirmUpdateRequests,
|
||||
} from './hooks/useExtensionUpdates.js';
|
||||
import { ShellFocusContext } from './contexts/ShellFocusContext.js';
|
||||
import { t } from '../i18n/index.js';
|
||||
import { useWelcomeBack } from './hooks/useWelcomeBack.js';
|
||||
@@ -96,6 +102,10 @@ import { processVisionSwitchOutcome } from './hooks/useVisionAutoSwitch.js';
|
||||
import { useSubagentCreateDialog } from './hooks/useSubagentCreateDialog.js';
|
||||
import { useAgentsManagerDialog } from './hooks/useAgentsManagerDialog.js';
|
||||
import { useAttentionNotifications } from './hooks/useAttentionNotifications.js';
|
||||
import {
|
||||
requestConsentInteractive,
|
||||
requestConsentOrFail,
|
||||
} from '../commands/extensions/consent.js';
|
||||
|
||||
const CTRL_EXIT_PROMPT_DURATION_MS = 1000;
|
||||
|
||||
@@ -156,15 +166,23 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
config.isTrustedFolder(),
|
||||
);
|
||||
|
||||
const extensions = config.getExtensions();
|
||||
const extensionManager = config.getExtensionManager();
|
||||
|
||||
extensionManager.setRequestConsent(
|
||||
requestConsentOrFail.bind(null, (description) =>
|
||||
requestConsentInteractive(description, addConfirmUpdateExtensionRequest),
|
||||
),
|
||||
);
|
||||
|
||||
const { addConfirmUpdateExtensionRequest, confirmUpdateExtensionRequests } =
|
||||
useConfirmUpdateRequests();
|
||||
|
||||
const {
|
||||
extensionsUpdateState,
|
||||
extensionsUpdateStateInternal,
|
||||
dispatchExtensionStateUpdate,
|
||||
confirmUpdateExtensionRequests,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
} = useExtensionUpdates(
|
||||
extensions,
|
||||
extensionManager,
|
||||
historyManager.addItem,
|
||||
config.getWorkingDir(),
|
||||
);
|
||||
@@ -429,13 +447,6 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
remount: refreshStatic,
|
||||
});
|
||||
|
||||
const {
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
} = useWorkspaceMigration(settings);
|
||||
|
||||
const { toggleVimEnabled } = useVimMode();
|
||||
|
||||
const {
|
||||
@@ -571,11 +582,11 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
: [],
|
||||
config.getDebugMode(),
|
||||
config.getFileService(),
|
||||
settings.merged,
|
||||
config.getExtensionContextFilePaths(),
|
||||
config.isTrustedFolder(),
|
||||
settings.merged.context?.importFormat || 'tree', // Use setting or default to 'tree'
|
||||
config.getFileFilteringOptions(),
|
||||
config.getDiscoveryMaxDirs(),
|
||||
);
|
||||
|
||||
config.setUserMemory(memoryContent);
|
||||
@@ -838,6 +849,13 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
!idePromptAnswered,
|
||||
);
|
||||
|
||||
// Command migration nudge
|
||||
const {
|
||||
showMigrationNudge: shouldShowCommandMigrationNudge,
|
||||
tomlFiles: commandMigrationTomlFiles,
|
||||
setShowMigrationNudge: setShowCommandMigrationNudge,
|
||||
} = useCommandMigration(settings, config.storage);
|
||||
|
||||
const [showErrorDetails, setShowErrorDetails] = useState<boolean>(false);
|
||||
const [showToolDescriptions, setShowToolDescriptions] =
|
||||
useState<boolean>(false);
|
||||
@@ -933,6 +951,92 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
[handleSlashCommand, settings],
|
||||
);
|
||||
|
||||
const handleCommandMigrationComplete = useCallback(
|
||||
async (result: CommandMigrationNudgeResult) => {
|
||||
setShowCommandMigrationNudge(false);
|
||||
|
||||
if (result.userSelection === 'yes') {
|
||||
// Perform migration for both workspace and user levels
|
||||
try {
|
||||
const results = [];
|
||||
|
||||
// Migrate workspace commands
|
||||
const workspaceCommandsDir = config.storage.getProjectCommandsDir();
|
||||
const workspaceResult = await migrateTomlCommands({
|
||||
commandDir: workspaceCommandsDir,
|
||||
createBackup: true,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
if (
|
||||
workspaceResult.convertedFiles.length > 0 ||
|
||||
workspaceResult.failedFiles.length > 0
|
||||
) {
|
||||
results.push({ level: 'workspace', result: workspaceResult });
|
||||
}
|
||||
|
||||
// Migrate user commands
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const userResult = await migrateTomlCommands({
|
||||
commandDir: userCommandsDir,
|
||||
createBackup: true,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
if (
|
||||
userResult.convertedFiles.length > 0 ||
|
||||
userResult.failedFiles.length > 0
|
||||
) {
|
||||
results.push({ level: 'user', result: userResult });
|
||||
}
|
||||
|
||||
// Report results
|
||||
for (const { level, result: migrationResult } of results) {
|
||||
if (
|
||||
migrationResult.success &&
|
||||
migrationResult.convertedFiles.length > 0
|
||||
) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: `[${level}] Successfully migrated ${migrationResult.convertedFiles.length} command file${migrationResult.convertedFiles.length > 1 ? 's' : ''} to Markdown format. Original files backed up as .toml.backup`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
|
||||
if (migrationResult.failedFiles.length > 0) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: `[${level}] Failed to migrate ${migrationResult.failedFiles.length} file${migrationResult.failedFiles.length > 1 ? 's' : ''}:\n${migrationResult.failedFiles.map((f) => ` • ${f.file}: ${f.error}`).join('\n')}`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (results.length === 0) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'No TOML files found to migrate.',
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: `❌ Migration failed: ${getErrorMessage(error)}`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
[historyManager, setShowCommandMigrationNudge, config.storage],
|
||||
);
|
||||
|
||||
const { elapsedTime, currentLoadingPhrase } = useLoadingIndicator(
|
||||
streamingState,
|
||||
settings.merged.ui?.customWittyPhrases,
|
||||
@@ -1175,8 +1279,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
|
||||
const dialogsVisible =
|
||||
showWelcomeBackDialog ||
|
||||
showWorkspaceMigrationDialog ||
|
||||
shouldShowIdePrompt ||
|
||||
shouldShowCommandMigrationNudge ||
|
||||
isFolderTrustDialogOpen ||
|
||||
!!shellConfirmationRequest ||
|
||||
!!confirmationRequest ||
|
||||
@@ -1242,6 +1346,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
suggestionsWidth,
|
||||
isInputActive,
|
||||
shouldShowIdePrompt,
|
||||
shouldShowCommandMigrationNudge,
|
||||
commandMigrationTomlFiles,
|
||||
isFolderTrustDialogOpen: isFolderTrustDialogOpen ?? false,
|
||||
isTrustedFolder,
|
||||
constrainHeight,
|
||||
@@ -1258,8 +1364,6 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
historyRemountKey,
|
||||
messageQueue,
|
||||
showAutoAcceptIndicator,
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
currentModel,
|
||||
contextFileNames,
|
||||
errorCount,
|
||||
@@ -1331,6 +1435,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
suggestionsWidth,
|
||||
isInputActive,
|
||||
shouldShowIdePrompt,
|
||||
shouldShowCommandMigrationNudge,
|
||||
commandMigrationTomlFiles,
|
||||
isFolderTrustDialogOpen,
|
||||
isTrustedFolder,
|
||||
constrainHeight,
|
||||
@@ -1347,8 +1453,6 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
historyRemountKey,
|
||||
messageQueue,
|
||||
showAutoAcceptIndicator,
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
contextFileNames,
|
||||
errorCount,
|
||||
availableTerminalHeight,
|
||||
@@ -1402,14 +1506,13 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
setShellModeActive,
|
||||
vimHandleInput,
|
||||
handleIdePromptComplete,
|
||||
handleCommandMigrationComplete,
|
||||
handleFolderTrustSelect,
|
||||
setConstrainHeight,
|
||||
onEscapePromptChange: handleEscapePromptChange,
|
||||
refreshStatic,
|
||||
handleFinalSubmit,
|
||||
handleClearScreen,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
// Vision switch dialog
|
||||
handleVisionSwitchSelect,
|
||||
// Welcome back dialog
|
||||
@@ -1439,14 +1542,13 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
setShellModeActive,
|
||||
vimHandleInput,
|
||||
handleIdePromptComplete,
|
||||
handleCommandMigrationComplete,
|
||||
handleFolderTrustSelect,
|
||||
setConstrainHeight,
|
||||
handleEscapePromptChange,
|
||||
refreshStatic,
|
||||
handleFinalSubmit,
|
||||
handleClearScreen,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
handleVisionSwitchSelect,
|
||||
handleWelcomeBackSelection,
|
||||
handleWelcomeBackClose,
|
||||
|
||||
94
packages/cli/src/ui/CommandFormatMigrationNudge.tsx
Normal file
94
packages/cli/src/ui/CommandFormatMigrationNudge.tsx
Normal file
@@ -0,0 +1,94 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Box, Text } from 'ink';
|
||||
import type { RadioSelectItem } from './components/shared/RadioButtonSelect.js';
|
||||
import { RadioButtonSelect } from './components/shared/RadioButtonSelect.js';
|
||||
import { useKeypress } from './hooks/useKeypress.js';
|
||||
import { theme } from './semantic-colors.js';
|
||||
import { t } from '../i18n/index.js';
|
||||
|
||||
export type CommandMigrationNudgeResult = {
|
||||
userSelection: 'yes' | 'no';
|
||||
};
|
||||
|
||||
interface CommandFormatMigrationNudgeProps {
|
||||
tomlFiles: string[];
|
||||
onComplete: (result: CommandMigrationNudgeResult) => void;
|
||||
}
|
||||
|
||||
export function CommandFormatMigrationNudge({
|
||||
tomlFiles,
|
||||
onComplete,
|
||||
}: CommandFormatMigrationNudgeProps) {
|
||||
useKeypress(
|
||||
(key) => {
|
||||
if (key.name === 'escape') {
|
||||
onComplete({
|
||||
userSelection: 'no',
|
||||
});
|
||||
}
|
||||
},
|
||||
{ isActive: true },
|
||||
);
|
||||
|
||||
const OPTIONS: Array<RadioSelectItem<CommandMigrationNudgeResult>> = [
|
||||
{
|
||||
label: t('Yes'),
|
||||
value: {
|
||||
userSelection: 'yes',
|
||||
},
|
||||
key: 'Yes',
|
||||
},
|
||||
{
|
||||
label: t('No (esc)'),
|
||||
value: {
|
||||
userSelection: 'no',
|
||||
},
|
||||
key: 'No (esc)',
|
||||
},
|
||||
];
|
||||
|
||||
const count = tomlFiles.length;
|
||||
const fileList =
|
||||
count <= 3
|
||||
? tomlFiles.map((f) => ` • ${f}`).join('\n')
|
||||
: ` • ${tomlFiles.slice(0, 2).join('\n • ')}\n • ${t('... and {{count}} more', { count: String(count - 2) })}`;
|
||||
|
||||
return (
|
||||
<Box
|
||||
flexDirection="column"
|
||||
borderStyle="round"
|
||||
borderColor={theme.status.warning}
|
||||
padding={1}
|
||||
width="100%"
|
||||
marginLeft={1}
|
||||
>
|
||||
<Box marginBottom={1} flexDirection="column">
|
||||
<Text>
|
||||
<Text color={theme.status.warning}>{'⚠️ '}</Text>
|
||||
<Text bold>{t('Command Format Migration')}</Text>
|
||||
</Text>
|
||||
<Text color={theme.text.secondary}>
|
||||
{count > 1
|
||||
? t('Found {{count}} TOML command files:', { count: String(count) })
|
||||
: t('Found {{count}} TOML command file:', { count: String(count) })}
|
||||
</Text>
|
||||
<Text color={theme.text.secondary}>{fileList}</Text>
|
||||
<Text>{''}</Text>
|
||||
<Text color={theme.text.secondary}>
|
||||
{t(
|
||||
'The TOML format is deprecated. Would you like to migrate them to Markdown format?',
|
||||
)}
|
||||
</Text>
|
||||
<Text color={theme.text.secondary}>
|
||||
{t('(Backups will be created and original files will be preserved)')}
|
||||
</Text>
|
||||
</Box>
|
||||
<RadioButtonSelect items={OPTIONS} onSelect={onComplete} />
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -4,11 +4,6 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
updateAllUpdatableExtensions,
|
||||
updateExtension,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { createMockCommandContext } from '../../test-utils/mockCommandContext.js';
|
||||
import { MessageType } from '../types.js';
|
||||
import { extensionsCommand } from './extensionsCommand.js';
|
||||
@@ -22,34 +17,59 @@ import {
|
||||
type MockedFunction,
|
||||
} from 'vitest';
|
||||
import { ExtensionUpdateState } from '../state/extensions.js';
|
||||
import {
|
||||
type Extension,
|
||||
ExtensionManager,
|
||||
parseInstallSource,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
|
||||
vi.mock('../../config/extensions/update.js', () => ({
|
||||
updateExtension: vi.fn(),
|
||||
updateAllUpdatableExtensions: vi.fn(),
|
||||
checkForAllExtensionUpdates: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockUpdateExtension = updateExtension as MockedFunction<
|
||||
typeof updateExtension
|
||||
>;
|
||||
|
||||
const mockUpdateAllUpdatableExtensions =
|
||||
updateAllUpdatableExtensions as MockedFunction<
|
||||
typeof updateAllUpdatableExtensions
|
||||
>;
|
||||
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<typeof import('@qwen-code/qwen-code-core')>();
|
||||
return {
|
||||
...actual,
|
||||
parseInstallSource: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
const mockGetExtensions = vi.fn();
|
||||
const mockUpdateExtension = vi.fn();
|
||||
const mockUpdateAllUpdatableExtensions = vi.fn();
|
||||
const mockCheckForAllExtensionUpdates = vi.fn();
|
||||
const mockInstallExtension = vi.fn();
|
||||
const mockUninstallExtension = vi.fn();
|
||||
const mockGetLoadedExtensions = vi.fn();
|
||||
const mockEnableExtension = vi.fn();
|
||||
const mockDisableExtension = vi.fn();
|
||||
|
||||
const createMockExtensionManager = () => ({
|
||||
updateExtension: mockUpdateExtension,
|
||||
updateAllUpdatableExtensions: mockUpdateAllUpdatableExtensions,
|
||||
checkForAllExtensionUpdates: mockCheckForAllExtensionUpdates,
|
||||
installExtension: mockInstallExtension,
|
||||
uninstallExtension: mockUninstallExtension,
|
||||
getLoadedExtensions: mockGetLoadedExtensions,
|
||||
enableExtension: mockEnableExtension,
|
||||
disableExtension: mockDisableExtension,
|
||||
});
|
||||
|
||||
describe('extensionsCommand', () => {
|
||||
let mockContext: CommandContext;
|
||||
let mockExtensionManager: ReturnType<typeof createMockExtensionManager>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
mockExtensionManager = createMockExtensionManager();
|
||||
mockGetExtensions.mockReturnValue([]);
|
||||
mockGetLoadedExtensions.mockReturnValue([]);
|
||||
mockCheckForAllExtensionUpdates.mockResolvedValue(undefined);
|
||||
mockContext = createMockCommandContext({
|
||||
services: {
|
||||
config: {
|
||||
getExtensions: mockGetExtensions,
|
||||
getWorkingDir: () => '/test/dir',
|
||||
getExtensionManager: () =>
|
||||
mockExtensionManager as unknown as ExtensionManager,
|
||||
},
|
||||
},
|
||||
ui: {
|
||||
@@ -59,8 +79,9 @@ describe('extensionsCommand', () => {
|
||||
});
|
||||
|
||||
describe('list', () => {
|
||||
it('should add an EXTENSIONS_LIST item to the UI', async () => {
|
||||
it('should add an EXTENSIONS_LIST item to the UI when extensions exist', async () => {
|
||||
if (!extensionsCommand.action) throw new Error('Action not defined');
|
||||
mockGetExtensions.mockReturnValue([{ name: 'test-ext', isActive: true }]);
|
||||
await extensionsCommand.action(mockContext, '');
|
||||
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
@@ -70,6 +91,20 @@ describe('extensionsCommand', () => {
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should show info message when no extensions installed', async () => {
|
||||
if (!extensionsCommand.action) throw new Error('Action not defined');
|
||||
mockGetExtensions.mockReturnValue([]);
|
||||
await extensionsCommand.action(mockContext, '');
|
||||
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'No extensions installed. Run `/extensions explore` to check out the gallery.',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
@@ -93,6 +128,7 @@ describe('extensionsCommand', () => {
|
||||
});
|
||||
|
||||
it('should inform user if there are no extensions to update with --all', async () => {
|
||||
mockGetExtensions.mockReturnValue([{ name: 'ext-one', isActive: true }]);
|
||||
mockUpdateAllUpdatableExtensions.mockResolvedValue([]);
|
||||
await updateAction(mockContext, '--all');
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
@@ -105,6 +141,7 @@ describe('extensionsCommand', () => {
|
||||
});
|
||||
|
||||
it('should call setPendingItem and addItem in a finally block on success', async () => {
|
||||
mockGetExtensions.mockReturnValue([{ name: 'ext-one', isActive: true }]);
|
||||
mockUpdateAllUpdatableExtensions.mockResolvedValue([
|
||||
{
|
||||
name: 'ext-one',
|
||||
@@ -131,6 +168,7 @@ describe('extensionsCommand', () => {
|
||||
});
|
||||
|
||||
it('should call setPendingItem and addItem in a finally block on failure', async () => {
|
||||
mockGetExtensions.mockReturnValue([{ name: 'ext-one', isActive: true }]);
|
||||
mockUpdateAllUpdatableExtensions.mockRejectedValue(
|
||||
new Error('Something went wrong'),
|
||||
);
|
||||
@@ -155,11 +193,14 @@ describe('extensionsCommand', () => {
|
||||
});
|
||||
|
||||
it('should update a single extension by name', async () => {
|
||||
const extension: GeminiCLIExtension = {
|
||||
const extension: Extension = {
|
||||
id: 'ext-one',
|
||||
name: 'ext-one',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: '/test/dir/ext-one',
|
||||
contextFiles: [],
|
||||
config: { name: 'ext-one', version: '1.0.0' },
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
autoUpdate: false,
|
||||
@@ -179,16 +220,23 @@ describe('extensionsCommand', () => {
|
||||
await updateAction(mockContext, 'ext-one');
|
||||
expect(mockUpdateExtension).toHaveBeenCalledWith(
|
||||
extension,
|
||||
'/test/dir',
|
||||
expect.any(Function),
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
expect.any(Function),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors when updating a single extension', async () => {
|
||||
mockUpdateExtension.mockRejectedValue(new Error('Extension not found'));
|
||||
mockGetExtensions.mockReturnValue([]);
|
||||
// Provide at least one extension so we don't get "No extensions installed" message
|
||||
const otherExtension: Extension = {
|
||||
id: 'other-ext',
|
||||
name: 'other-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: '/test/dir/other-ext',
|
||||
contextFiles: [],
|
||||
config: { name: 'other-ext', version: '1.0.0' },
|
||||
};
|
||||
mockGetExtensions.mockReturnValue([otherExtension]);
|
||||
await updateAction(mockContext, 'ext-one');
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
@@ -200,22 +248,28 @@ describe('extensionsCommand', () => {
|
||||
});
|
||||
|
||||
it('should update multiple extensions by name', async () => {
|
||||
const extensionOne: GeminiCLIExtension = {
|
||||
const extensionOne: Extension = {
|
||||
id: 'ext-one',
|
||||
name: 'ext-one',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: '/test/dir/ext-one',
|
||||
contextFiles: [],
|
||||
config: { name: 'ext-one', version: '1.0.0' },
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
autoUpdate: false,
|
||||
source: 'https://github.com/some/extension.git',
|
||||
},
|
||||
};
|
||||
const extensionTwo: GeminiCLIExtension = {
|
||||
const extensionTwo: Extension = {
|
||||
id: 'ext-two',
|
||||
name: 'ext-two',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: '/test/dir/ext-two',
|
||||
contextFiles: [],
|
||||
config: { name: 'ext-two', version: '1.0.0' },
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
autoUpdate: false,
|
||||
@@ -223,14 +277,14 @@ describe('extensionsCommand', () => {
|
||||
},
|
||||
};
|
||||
mockGetExtensions.mockReturnValue([extensionOne, extensionTwo]);
|
||||
mockContext.ui.extensionsUpdateState.set(
|
||||
extensionOne.name,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
);
|
||||
mockContext.ui.extensionsUpdateState.set(
|
||||
extensionTwo.name,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
);
|
||||
mockContext.ui.extensionsUpdateState.set(extensionOne.name, {
|
||||
status: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
processed: false,
|
||||
});
|
||||
mockContext.ui.extensionsUpdateState.set(extensionTwo.name, {
|
||||
status: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
processed: false,
|
||||
});
|
||||
mockUpdateExtension
|
||||
.mockResolvedValueOnce({
|
||||
name: 'ext-one',
|
||||
@@ -265,18 +319,24 @@ describe('extensionsCommand', () => {
|
||||
throw new Error('Update completion not found');
|
||||
}
|
||||
|
||||
const extensionOne: GeminiCLIExtension = {
|
||||
const extensionOne: Extension = {
|
||||
id: 'ext-one',
|
||||
name: 'ext-one',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: '/test/dir/ext-one',
|
||||
contextFiles: [],
|
||||
config: { name: 'ext-one', version: '1.0.0' },
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
autoUpdate: false,
|
||||
source: 'https://github.com/some/extension.git',
|
||||
},
|
||||
};
|
||||
const extensionTwo: GeminiCLIExtension = {
|
||||
const extensionTwo: Extension = {
|
||||
id: 'another-ext',
|
||||
contextFiles: [],
|
||||
config: { name: 'another-ext', version: '1.0.0' },
|
||||
name: 'another-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
@@ -287,8 +347,11 @@ describe('extensionsCommand', () => {
|
||||
source: 'https://github.com/some/extension.git',
|
||||
},
|
||||
};
|
||||
const allExt: GeminiCLIExtension = {
|
||||
const allExt: Extension = {
|
||||
id: 'all-ext',
|
||||
name: 'all-ext',
|
||||
contextFiles: [],
|
||||
config: { name: 'all-ext', version: '1.0.0' },
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: '/test/dir/all-ext',
|
||||
@@ -331,5 +394,387 @@ describe('extensionsCommand', () => {
|
||||
expect(suggestions).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
it('should call reloadCommands in finally block', async () => {
|
||||
mockGetExtensions.mockReturnValue([{ name: 'ext-one', isActive: true }]);
|
||||
mockUpdateAllUpdatableExtensions.mockResolvedValue([
|
||||
{
|
||||
name: 'ext-one',
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.0.1',
|
||||
},
|
||||
]);
|
||||
await updateAction(mockContext, '--all');
|
||||
expect(mockContext.ui.reloadCommands).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('install', () => {
|
||||
const installAction = extensionsCommand.subCommands?.find(
|
||||
(cmd) => cmd.name === 'install',
|
||||
)?.action;
|
||||
|
||||
if (!installAction) {
|
||||
throw new Error('Install action not found');
|
||||
}
|
||||
|
||||
const mockParseInstallSource = parseInstallSource as MockedFunction<
|
||||
typeof parseInstallSource
|
||||
>;
|
||||
|
||||
// Create a real ExtensionManager mock that passes instanceof check
|
||||
let realMockExtensionManager: ExtensionManager;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
// Create a mock that inherits from ExtensionManager prototype
|
||||
realMockExtensionManager = Object.create(ExtensionManager.prototype);
|
||||
realMockExtensionManager.installExtension = mockInstallExtension;
|
||||
|
||||
mockContext = createMockCommandContext({
|
||||
services: {
|
||||
config: {
|
||||
getExtensions: mockGetExtensions,
|
||||
getWorkingDir: () => '/test/dir',
|
||||
getExtensionManager: () => realMockExtensionManager,
|
||||
},
|
||||
},
|
||||
ui: {
|
||||
dispatchExtensionStateUpdate: vi.fn(),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should show usage if no source is provided', async () => {
|
||||
await installAction(mockContext, '');
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: 'Usage: /extensions install <source>',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should install extension successfully', async () => {
|
||||
mockParseInstallSource.mockResolvedValue({
|
||||
type: 'git',
|
||||
source: 'https://github.com/test/extension',
|
||||
});
|
||||
mockInstallExtension.mockResolvedValue({
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
await installAction(mockContext, 'https://github.com/test/extension');
|
||||
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'Installing extension from "https://github.com/test/extension"...',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'Extension "test-extension" installed successfully.',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
expect(mockContext.ui.reloadCommands).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle install errors', async () => {
|
||||
mockParseInstallSource.mockRejectedValue(
|
||||
new Error('Install source not found.'),
|
||||
);
|
||||
|
||||
await installAction(mockContext, '/invalid/path');
|
||||
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: 'Failed to install extension from "/invalid/path": Install source not found.',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uninstall', () => {
|
||||
const uninstallAction = extensionsCommand.subCommands?.find(
|
||||
(cmd) => cmd.name === 'uninstall',
|
||||
)?.action;
|
||||
|
||||
if (!uninstallAction) {
|
||||
throw new Error('Uninstall action not found');
|
||||
}
|
||||
|
||||
let realMockExtensionManager: ExtensionManager;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
realMockExtensionManager = Object.create(ExtensionManager.prototype);
|
||||
realMockExtensionManager.uninstallExtension = mockUninstallExtension;
|
||||
|
||||
mockContext = createMockCommandContext({
|
||||
services: {
|
||||
config: {
|
||||
getExtensions: mockGetExtensions,
|
||||
getWorkingDir: () => '/test/dir',
|
||||
getExtensionManager: () => realMockExtensionManager,
|
||||
},
|
||||
},
|
||||
ui: {
|
||||
dispatchExtensionStateUpdate: vi.fn(),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should show usage if no name is provided', async () => {
|
||||
await uninstallAction(mockContext, '');
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: 'Usage: /extensions uninstall <extension-name>',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should uninstall extension successfully', async () => {
|
||||
mockUninstallExtension.mockResolvedValue(undefined);
|
||||
|
||||
await uninstallAction(mockContext, 'test-extension');
|
||||
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'Uninstalling extension "test-extension"...',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
expect(mockUninstallExtension).toHaveBeenCalledWith(
|
||||
'test-extension',
|
||||
false,
|
||||
);
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'Extension "test-extension" uninstalled successfully.',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
expect(mockContext.ui.reloadCommands).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle uninstall errors', async () => {
|
||||
mockUninstallExtension.mockRejectedValue(
|
||||
new Error('Extension not found.'),
|
||||
);
|
||||
|
||||
await uninstallAction(mockContext, 'nonexistent-extension');
|
||||
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: 'Failed to uninstall extension "nonexistent-extension": Extension not found.',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('disable', () => {
|
||||
const disableAction = extensionsCommand.subCommands?.find(
|
||||
(cmd) => cmd.name === 'disable',
|
||||
)?.action;
|
||||
|
||||
if (!disableAction) {
|
||||
throw new Error('Disable action not found');
|
||||
}
|
||||
|
||||
let realMockExtensionManager: ExtensionManager;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
realMockExtensionManager = Object.create(ExtensionManager.prototype);
|
||||
realMockExtensionManager.disableExtension = mockDisableExtension;
|
||||
realMockExtensionManager.getLoadedExtensions = mockGetLoadedExtensions;
|
||||
|
||||
mockContext = createMockCommandContext({
|
||||
invocation: {
|
||||
raw: '/extensions disable',
|
||||
name: 'disable',
|
||||
args: '',
|
||||
},
|
||||
services: {
|
||||
config: {
|
||||
getExtensions: mockGetExtensions,
|
||||
getWorkingDir: () => '/test/dir',
|
||||
getExtensionManager: () => realMockExtensionManager,
|
||||
},
|
||||
},
|
||||
ui: {
|
||||
dispatchExtensionStateUpdate: vi.fn(),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should show usage if invalid args are provided', async () => {
|
||||
await disableAction(mockContext, '');
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: 'Usage: /extensions disable <extension> [--scope=<user|workspace>]',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should disable extension at user scope', async () => {
|
||||
mockDisableExtension.mockResolvedValue(undefined);
|
||||
|
||||
await disableAction(mockContext, 'test-extension --scope=user');
|
||||
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'test-extension',
|
||||
'User',
|
||||
);
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'Extension "test-extension" disabled for the scope "User"',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should disable extension at workspace scope', async () => {
|
||||
mockDisableExtension.mockResolvedValue(undefined);
|
||||
|
||||
await disableAction(mockContext, 'test-extension --scope workspace');
|
||||
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'test-extension',
|
||||
'Workspace',
|
||||
);
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'Extension "test-extension" disabled for the scope "Workspace"',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should show error for invalid scope', async () => {
|
||||
await disableAction(mockContext, 'test-extension --scope=invalid');
|
||||
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: 'Unsupported scope invalid, should be one of "user" or "workspace"',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('enable', () => {
|
||||
const enableAction = extensionsCommand.subCommands?.find(
|
||||
(cmd) => cmd.name === 'enable',
|
||||
)?.action;
|
||||
|
||||
if (!enableAction) {
|
||||
throw new Error('Enable action not found');
|
||||
}
|
||||
|
||||
let realMockExtensionManager: ExtensionManager;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
realMockExtensionManager = Object.create(ExtensionManager.prototype);
|
||||
realMockExtensionManager.enableExtension = mockEnableExtension;
|
||||
realMockExtensionManager.getLoadedExtensions = mockGetLoadedExtensions;
|
||||
|
||||
mockContext = createMockCommandContext({
|
||||
invocation: {
|
||||
raw: '/extensions enable',
|
||||
name: 'enable',
|
||||
args: '',
|
||||
},
|
||||
services: {
|
||||
config: {
|
||||
getExtensions: mockGetExtensions,
|
||||
getWorkingDir: () => '/test/dir',
|
||||
getExtensionManager: () => realMockExtensionManager,
|
||||
},
|
||||
},
|
||||
ui: {
|
||||
dispatchExtensionStateUpdate: vi.fn(),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should show usage if invalid args are provided', async () => {
|
||||
await enableAction(mockContext, '');
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: 'Usage: /extensions enable <extension> [--scope=<user|workspace>]',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should enable extension at user scope', async () => {
|
||||
mockEnableExtension.mockResolvedValue(undefined);
|
||||
|
||||
await enableAction(mockContext, 'test-extension --scope=user');
|
||||
|
||||
expect(mockEnableExtension).toHaveBeenCalledWith(
|
||||
'test-extension',
|
||||
'User',
|
||||
);
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'Extension "test-extension" enabled for the scope "User"',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should enable extension at workspace scope', async () => {
|
||||
mockEnableExtension.mockResolvedValue(undefined);
|
||||
|
||||
await enableAction(mockContext, 'test-extension --scope workspace');
|
||||
|
||||
expect(mockEnableExtension).toHaveBeenCalledWith(
|
||||
'test-extension',
|
||||
'Workspace',
|
||||
);
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'Extension "test-extension" enabled for the scope "Workspace"',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should show error for invalid scope', async () => {
|
||||
await enableAction(mockContext, 'test-extension --scope=invalid');
|
||||
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: 'Unsupported scope invalid, should be one of "user" or "workspace"',
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,13 +4,6 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { requestConsentInteractive } from '../../config/extension.js';
|
||||
import {
|
||||
updateAllUpdatableExtensions,
|
||||
type ExtensionUpdateInfo,
|
||||
updateExtension,
|
||||
checkForAllExtensionUpdates,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { ExtensionUpdateState } from '../state/extensions.js';
|
||||
import { MessageType } from '../types.js';
|
||||
@@ -20,8 +13,39 @@ import {
|
||||
CommandKind,
|
||||
} from './types.js';
|
||||
import { t } from '../../i18n/index.js';
|
||||
import {
|
||||
ExtensionManager,
|
||||
parseInstallSource,
|
||||
type ExtensionUpdateInfo,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { SettingScope } from '../../config/settings.js';
|
||||
|
||||
function showMessageIfNoExtensions(
|
||||
context: CommandContext,
|
||||
extensions: unknown[],
|
||||
): boolean {
|
||||
if (extensions.length === 0) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: t('No extensions installed.'),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async function listAction(context: CommandContext) {
|
||||
const extensions = context.services.config
|
||||
? context.services.config.getExtensions()
|
||||
: [];
|
||||
|
||||
if (showMessageIfNoExtensions(context, extensions)) {
|
||||
return;
|
||||
}
|
||||
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.EXTENSIONS_LIST,
|
||||
@@ -34,42 +58,52 @@ async function updateAction(context: CommandContext, args: string) {
|
||||
const updateArgs = args.split(' ').filter((value) => value.length > 0);
|
||||
const all = updateArgs.length === 1 && updateArgs[0] === '--all';
|
||||
const names = all ? undefined : updateArgs;
|
||||
let updateInfos: ExtensionUpdateInfo[] = [];
|
||||
|
||||
if (!all && names?.length === 0) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: 'Usage: /extensions update <extension-names>|--all',
|
||||
text: t('Usage: /extensions update <extension-names>|--all'),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let updateInfos: ExtensionUpdateInfo[] = [];
|
||||
|
||||
const extensionManager = context.services.config!.getExtensionManager();
|
||||
const extensions = context.services.config
|
||||
? context.services.config.getExtensions()
|
||||
: [];
|
||||
|
||||
if (showMessageIfNoExtensions(context, extensions)) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
try {
|
||||
await checkForAllExtensionUpdates(
|
||||
context.services.config!.getExtensions(),
|
||||
context.ui.dispatchExtensionStateUpdate,
|
||||
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
|
||||
await extensionManager.checkForAllExtensionUpdates((extensionName, state) =>
|
||||
context.ui.dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
}),
|
||||
);
|
||||
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
|
||||
|
||||
context.ui.setPendingItem({
|
||||
type: MessageType.EXTENSIONS_LIST,
|
||||
});
|
||||
if (all) {
|
||||
updateInfos = await updateAllUpdatableExtensions(
|
||||
context.services.config!.getWorkingDir(),
|
||||
// We don't have the ability to prompt for consent yet in this flow.
|
||||
(description) =>
|
||||
requestConsentInteractive(
|
||||
description,
|
||||
context.ui.addConfirmUpdateExtensionRequest,
|
||||
),
|
||||
context.services.config!.getExtensions(),
|
||||
updateInfos = await extensionManager.updateAllUpdatableExtensions(
|
||||
context.ui.extensionsUpdateState,
|
||||
context.ui.dispatchExtensionStateUpdate,
|
||||
(extensionName, state) =>
|
||||
context.ui.dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
}),
|
||||
);
|
||||
} else if (names?.length) {
|
||||
const workingDir = context.services.config!.getWorkingDir();
|
||||
const extensions = context.services.config!.getExtensions();
|
||||
for (const name of names) {
|
||||
const extension = extensions.find(
|
||||
@@ -79,23 +113,21 @@ async function updateAction(context: CommandContext, args: string) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: `Extension ${name} not found.`,
|
||||
text: t('Extension "{{name}}" not found.', { name }),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
const updateInfo = await updateExtension(
|
||||
const updateInfo = await extensionManager.updateExtension(
|
||||
extension,
|
||||
workingDir,
|
||||
(description) =>
|
||||
requestConsentInteractive(
|
||||
description,
|
||||
context.ui.addConfirmUpdateExtensionRequest,
|
||||
),
|
||||
context.ui.extensionsUpdateState.get(extension.name)?.status ??
|
||||
ExtensionUpdateState.UNKNOWN,
|
||||
context.ui.dispatchExtensionStateUpdate,
|
||||
(extensionName, state) =>
|
||||
context.ui.dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
}),
|
||||
);
|
||||
if (updateInfo) updateInfos.push(updateInfo);
|
||||
}
|
||||
@@ -105,7 +137,7 @@ async function updateAction(context: CommandContext, args: string) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'No extensions to update.',
|
||||
text: t('No extensions to update.'),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
@@ -126,10 +158,288 @@ async function updateAction(context: CommandContext, args: string) {
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
context.ui.reloadCommands();
|
||||
context.ui.setPendingItem(null);
|
||||
}
|
||||
}
|
||||
|
||||
async function installAction(context: CommandContext, args: string) {
|
||||
const extensionManager = context.services.config?.getExtensionManager();
|
||||
if (!(extensionManager instanceof ExtensionManager)) {
|
||||
console.error(
|
||||
`Cannot ${context.invocation?.name} extensions in this environment`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const source = args.trim();
|
||||
if (!source) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: t('Usage: /extensions install <source>'),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const installMetadata = await parseInstallSource(source);
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: t('Installing extension from "{{source}}"...', { source }),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
const extension = await extensionManager.installExtension(installMetadata);
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: t('Extension "{{name}}" installed successfully.', {
|
||||
name: extension.name,
|
||||
}),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
// FIXME: refresh command controlled by ui for now, cannot be auto refreshed by extensionManager
|
||||
context.ui.reloadCommands();
|
||||
} catch (error) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: t('Failed to install extension from "{{source}}": {{error}}', {
|
||||
source,
|
||||
error: getErrorMessage(error),
|
||||
}),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
async function uninstallAction(context: CommandContext, args: string) {
|
||||
const extensionManager = context.services.config?.getExtensionManager();
|
||||
if (!(extensionManager instanceof ExtensionManager)) {
|
||||
console.error(
|
||||
`Cannot ${context.invocation?.name} extensions in this environment`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const name = args.trim();
|
||||
if (!name) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: t('Usage: /extensions uninstall <extension-name>'),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: t('Uninstalling extension "{{name}}"...', { name }),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
|
||||
try {
|
||||
await extensionManager.uninstallExtension(name, false);
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: t('Extension "{{name}}" uninstalled successfully.', { name }),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
context.ui.reloadCommands();
|
||||
} catch (error) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: t('Failed to uninstall extension "{{name}}": {{error}}', {
|
||||
name,
|
||||
error: getErrorMessage(error),
|
||||
}),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function getEnableDisableContext(
|
||||
context: CommandContext,
|
||||
argumentsString: string,
|
||||
): {
|
||||
extensionManager: ExtensionManager;
|
||||
names: string[];
|
||||
scope: SettingScope;
|
||||
} | null {
|
||||
const extensionManager = context.services.config?.getExtensionManager();
|
||||
if (!(extensionManager instanceof ExtensionManager)) {
|
||||
console.error(
|
||||
`Cannot ${context.invocation?.name} extensions in this environment`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
const parts = argumentsString.split(' ');
|
||||
const name = parts[0];
|
||||
if (
|
||||
name === '' ||
|
||||
!(
|
||||
(parts.length === 2 && parts[1].startsWith('--scope=')) || // --scope=<scope>
|
||||
(parts.length === 3 && parts[1] === '--scope') // --scope <scope>
|
||||
)
|
||||
) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: t(
|
||||
'Usage: /extensions {{command}} <extension> [--scope=<user|workspace>]',
|
||||
{
|
||||
command: context.invocation?.name ?? '',
|
||||
},
|
||||
),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
let scope: SettingScope;
|
||||
// Transform `--scope=<scope>` to `--scope <scope>`.
|
||||
if (parts.length === 2) {
|
||||
parts.push(...parts[1].split('='));
|
||||
parts.splice(1, 1);
|
||||
}
|
||||
switch (parts[2].toLowerCase()) {
|
||||
case 'workspace':
|
||||
scope = SettingScope.Workspace;
|
||||
break;
|
||||
case 'user':
|
||||
scope = SettingScope.User;
|
||||
break;
|
||||
default:
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: t(
|
||||
'Unsupported scope "{{scope}}", should be one of "user" or "workspace"',
|
||||
{
|
||||
scope: parts[2],
|
||||
},
|
||||
),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
let names: string[] = [];
|
||||
if (name === '--all') {
|
||||
let extensions = extensionManager.getLoadedExtensions();
|
||||
if (context.invocation?.name === 'enable') {
|
||||
extensions = extensions.filter((ext) => !ext.isActive);
|
||||
}
|
||||
if (context.invocation?.name === 'disable') {
|
||||
extensions = extensions.filter((ext) => ext.isActive);
|
||||
}
|
||||
names = extensions.map((ext) => ext.name);
|
||||
} else {
|
||||
names = [name];
|
||||
}
|
||||
|
||||
return {
|
||||
extensionManager,
|
||||
names,
|
||||
scope,
|
||||
};
|
||||
}
|
||||
|
||||
async function disableAction(context: CommandContext, args: string) {
|
||||
const enableContext = getEnableDisableContext(context, args);
|
||||
if (!enableContext) return;
|
||||
|
||||
const { names, scope, extensionManager } = enableContext;
|
||||
for (const name of names) {
|
||||
await extensionManager.disableExtension(name, scope);
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: t('Extension "{{name}}" disabled for scope "{{scope}}"', {
|
||||
name,
|
||||
scope,
|
||||
}),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
context.ui.reloadCommands();
|
||||
}
|
||||
}
|
||||
|
||||
async function enableAction(context: CommandContext, args: string) {
|
||||
const enableContext = getEnableDisableContext(context, args);
|
||||
if (!enableContext) return;
|
||||
|
||||
const { names, scope, extensionManager } = enableContext;
|
||||
for (const name of names) {
|
||||
await extensionManager.enableExtension(name, scope);
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: t('Extension "{{name}}" enabled for scope "{{scope}}"', {
|
||||
name,
|
||||
scope,
|
||||
}),
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
context.ui.reloadCommands();
|
||||
}
|
||||
}
|
||||
|
||||
export async function completeExtensions(
|
||||
context: CommandContext,
|
||||
partialArg: string,
|
||||
) {
|
||||
let extensions = context.services.config?.getExtensions() ?? [];
|
||||
|
||||
if (context.invocation?.name === 'enable') {
|
||||
extensions = extensions.filter((ext) => !ext.isActive);
|
||||
}
|
||||
if (
|
||||
context.invocation?.name === 'disable' ||
|
||||
context.invocation?.name === 'restart'
|
||||
) {
|
||||
extensions = extensions.filter((ext) => ext.isActive);
|
||||
}
|
||||
const extensionNames = extensions.map((ext) => ext.name);
|
||||
const suggestions = extensionNames.filter((name) =>
|
||||
name.startsWith(partialArg),
|
||||
);
|
||||
|
||||
if ('--all'.startsWith(partialArg) || 'all'.startsWith(partialArg)) {
|
||||
suggestions.unshift('--all');
|
||||
}
|
||||
|
||||
return suggestions;
|
||||
}
|
||||
|
||||
export async function completeExtensionsAndScopes(
|
||||
context: CommandContext,
|
||||
partialArg: string,
|
||||
) {
|
||||
const completions = await completeExtensions(context, partialArg);
|
||||
return completions.flatMap((s) => [
|
||||
`${s} --scope user`,
|
||||
`${s} --scope workspace`,
|
||||
]);
|
||||
}
|
||||
|
||||
const listExtensionsCommand: SlashCommand = {
|
||||
name: 'list',
|
||||
get description() {
|
||||
@@ -146,19 +456,46 @@ const updateExtensionsCommand: SlashCommand = {
|
||||
},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
action: updateAction,
|
||||
completion: async (context, partialArg) => {
|
||||
const extensions = context.services.config?.getExtensions() ?? [];
|
||||
const extensionNames = extensions.map((ext) => ext.name);
|
||||
const suggestions = extensionNames.filter((name) =>
|
||||
name.startsWith(partialArg),
|
||||
);
|
||||
completion: completeExtensions,
|
||||
};
|
||||
|
||||
if ('--all'.startsWith(partialArg) || 'all'.startsWith(partialArg)) {
|
||||
suggestions.unshift('--all');
|
||||
}
|
||||
|
||||
return suggestions;
|
||||
const disableCommand: SlashCommand = {
|
||||
name: 'disable',
|
||||
get description() {
|
||||
return t('Disable an extension');
|
||||
},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
action: disableAction,
|
||||
completion: completeExtensionsAndScopes,
|
||||
};
|
||||
|
||||
const enableCommand: SlashCommand = {
|
||||
name: 'enable',
|
||||
get description() {
|
||||
return t('Enable an extension');
|
||||
},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
action: enableAction,
|
||||
completion: completeExtensionsAndScopes,
|
||||
};
|
||||
|
||||
const installCommand: SlashCommand = {
|
||||
name: 'install',
|
||||
get description() {
|
||||
return t('Install an extension from a git repo or local path');
|
||||
},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
action: installAction,
|
||||
};
|
||||
|
||||
const uninstallCommand: SlashCommand = {
|
||||
name: 'uninstall',
|
||||
get description() {
|
||||
return t('Uninstall an extension');
|
||||
},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
action: uninstallAction,
|
||||
completion: completeExtensions,
|
||||
};
|
||||
|
||||
export const extensionsCommand: SlashCommand = {
|
||||
@@ -167,7 +504,14 @@ export const extensionsCommand: SlashCommand = {
|
||||
return t('Manage extensions');
|
||||
},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
subCommands: [listExtensionsCommand, updateExtensionsCommand],
|
||||
subCommands: [
|
||||
listExtensionsCommand,
|
||||
updateExtensionsCommand,
|
||||
disableCommand,
|
||||
enableCommand,
|
||||
installCommand,
|
||||
uninstallCommand,
|
||||
],
|
||||
action: (context, args) =>
|
||||
// Default to list if no subcommand is provided
|
||||
listExtensionsCommand.action!(context, args),
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import type { Mock } from 'vitest';
|
||||
import { vi, describe, it, expect, beforeEach } from 'vitest';
|
||||
import { memoryCommand } from './memoryCommand.js';
|
||||
import type { SlashCommand, type CommandContext } from './types.js';
|
||||
import type { SlashCommand, CommandContext } from './types.js';
|
||||
import { createMockCommandContext } from '../../test-utils/mockCommandContext.js';
|
||||
import { MessageType } from '../types.js';
|
||||
import type { LoadedSettings } from '../../config/settings.js';
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
import { Box, Text } from 'ink';
|
||||
import { IdeIntegrationNudge } from '../IdeIntegrationNudge.js';
|
||||
import { CommandFormatMigrationNudge } from '../CommandFormatMigrationNudge.js';
|
||||
import { LoopDetectionConfirmation } from './LoopDetectionConfirmation.js';
|
||||
import { FolderTrustDialog } from './FolderTrustDialog.js';
|
||||
import { ShellConfirmationDialog } from './ShellConfirmationDialog.js';
|
||||
@@ -16,7 +17,6 @@ import { QwenOAuthProgress } from './QwenOAuthProgress.js';
|
||||
import { AuthDialog } from '../auth/AuthDialog.js';
|
||||
import { OpenAIKeyPrompt } from './OpenAIKeyPrompt.js';
|
||||
import { EditorSettingsDialog } from './EditorSettingsDialog.js';
|
||||
import { WorkspaceMigrationDialog } from './WorkspaceMigrationDialog.js';
|
||||
import { PermissionsModifyTrustDialog } from './PermissionsModifyTrustDialog.js';
|
||||
import { ModelDialog } from './ModelDialog.js';
|
||||
import { ApprovalModeDialog } from './ApprovalModeDialog.js';
|
||||
@@ -76,15 +76,6 @@ export const DialogManager = ({
|
||||
if (uiState.showIdeRestartPrompt) {
|
||||
return <IdeTrustChangeDialog reason={uiState.ideTrustRestartReason} />;
|
||||
}
|
||||
if (uiState.showWorkspaceMigrationDialog) {
|
||||
return (
|
||||
<WorkspaceMigrationDialog
|
||||
workspaceExtensions={uiState.workspaceExtensions}
|
||||
onOpen={uiActions.onWorkspaceMigrationDialogOpen}
|
||||
onClose={uiActions.onWorkspaceMigrationDialogClose}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (uiState.shouldShowIdePrompt) {
|
||||
return (
|
||||
<IdeIntegrationNudge
|
||||
@@ -93,6 +84,14 @@ export const DialogManager = ({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (uiState.shouldShowCommandMigrationNudge) {
|
||||
return (
|
||||
<CommandFormatMigrationNudge
|
||||
tomlFiles={uiState.commandMigrationTomlFiles}
|
||||
onComplete={uiActions.handleCommandMigrationComplete}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (uiState.isFolderTrustDialogOpen) {
|
||||
return (
|
||||
<FolderTrustDialog
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Box, Text } from 'ink';
|
||||
import {
|
||||
type Extension,
|
||||
performWorkspaceExtensionMigration,
|
||||
} from '../../config/extension.js';
|
||||
import { RadioButtonSelect } from './shared/RadioButtonSelect.js';
|
||||
import { theme } from '../semantic-colors.js';
|
||||
import { useState } from 'react';
|
||||
import { useKeypress } from '../hooks/useKeypress.js';
|
||||
|
||||
export function WorkspaceMigrationDialog(props: {
|
||||
workspaceExtensions: Extension[];
|
||||
onOpen: () => void;
|
||||
onClose: () => void;
|
||||
}) {
|
||||
const { workspaceExtensions, onOpen, onClose } = props;
|
||||
const [migrationComplete, setMigrationComplete] = useState(false);
|
||||
const [failedExtensions, setFailedExtensions] = useState<string[]>([]);
|
||||
onOpen();
|
||||
const onMigrate = async () => {
|
||||
const failed = await performWorkspaceExtensionMigration(
|
||||
workspaceExtensions,
|
||||
// We aren't updating extensions, just moving them around, don't need to ask for consent.
|
||||
async (_) => true,
|
||||
);
|
||||
setFailedExtensions(failed);
|
||||
setMigrationComplete(true);
|
||||
};
|
||||
|
||||
useKeypress(
|
||||
(key) => {
|
||||
if (migrationComplete && key.sequence === 'q') {
|
||||
process.exit(0);
|
||||
}
|
||||
},
|
||||
{ isActive: true },
|
||||
);
|
||||
|
||||
if (migrationComplete) {
|
||||
return (
|
||||
<Box
|
||||
flexDirection="column"
|
||||
borderStyle="round"
|
||||
borderColor={theme.border.default}
|
||||
padding={1}
|
||||
>
|
||||
{failedExtensions.length > 0 ? (
|
||||
<>
|
||||
<Text color={theme.text.primary}>
|
||||
The following extensions failed to migrate. Please try installing
|
||||
them manually. To see other changes, Qwen Code must be restarted.
|
||||
Press 'q' to quit.
|
||||
</Text>
|
||||
<Box flexDirection="column" marginTop={1} marginLeft={2}>
|
||||
{failedExtensions.map((failed) => (
|
||||
<Text key={failed}>- {failed}</Text>
|
||||
))}
|
||||
</Box>
|
||||
</>
|
||||
) : (
|
||||
<Text color={theme.text.primary}>
|
||||
Migration complete. To see changes, Qwen Code must be restarted.
|
||||
Press 'q' to quit.
|
||||
</Text>
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Box
|
||||
flexDirection="column"
|
||||
borderStyle="round"
|
||||
borderColor={theme.border.default}
|
||||
padding={1}
|
||||
>
|
||||
<Text bold color={theme.text.primary}>
|
||||
Workspace-level extensions are deprecated{'\n'}
|
||||
</Text>
|
||||
<Text color={theme.text.primary}>
|
||||
Would you like to install them at the user level?
|
||||
</Text>
|
||||
<Text color={theme.text.primary}>
|
||||
The extension definition will remain in your workspace directory.
|
||||
</Text>
|
||||
<Text color={theme.text.primary}>
|
||||
If you opt to skip, you can install them manually using the extensions
|
||||
install command.
|
||||
</Text>
|
||||
|
||||
<Box flexDirection="column" marginTop={1} marginLeft={2}>
|
||||
{workspaceExtensions.map((extension) => (
|
||||
<Text key={extension.config.name}>- {extension.config.name}</Text>
|
||||
))}
|
||||
</Box>
|
||||
<Box marginTop={1}>
|
||||
<RadioButtonSelect
|
||||
items={[
|
||||
{ label: 'Install all', value: 'migrate', key: 'migrate' },
|
||||
{ label: 'Skip', value: 'skip', key: 'skip' },
|
||||
]}
|
||||
onSelect={(value: string) => {
|
||||
if (value === 'migrate') {
|
||||
onMigrate();
|
||||
} else {
|
||||
onClose();
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -58,7 +58,11 @@ export const ActionSelectionStep = ({
|
||||
},
|
||||
];
|
||||
|
||||
const actions = selectedAgent?.isBuiltin
|
||||
// Extension-level agents are also read-only (like builtin)
|
||||
const isReadOnly =
|
||||
selectedAgent?.isBuiltin || selectedAgent?.level === 'extension';
|
||||
|
||||
const actions = isReadOnly
|
||||
? allActions.filter(
|
||||
(action) => action.value === 'view' || action.value === 'back',
|
||||
)
|
||||
|
||||
@@ -12,10 +12,11 @@ import { type SubagentConfig } from '@qwen-code/qwen-code-core';
|
||||
import { t } from '../../../../i18n/index.js';
|
||||
|
||||
interface NavigationState {
|
||||
currentBlock: 'project' | 'user' | 'builtin';
|
||||
currentBlock: 'project' | 'user' | 'builtin' | 'extension';
|
||||
projectIndex: number;
|
||||
userIndex: number;
|
||||
builtinIndex: number;
|
||||
extensionIndex: number;
|
||||
}
|
||||
|
||||
interface AgentSelectionStepProps {
|
||||
@@ -32,6 +33,7 @@ export const AgentSelectionStep = ({
|
||||
projectIndex: 0,
|
||||
userIndex: 0,
|
||||
builtinIndex: 0,
|
||||
extensionIndex: 0,
|
||||
});
|
||||
|
||||
// Group agents by level
|
||||
@@ -47,6 +49,10 @@ export const AgentSelectionStep = ({
|
||||
() => availableAgents.filter((agent) => agent.level === 'builtin'),
|
||||
[availableAgents],
|
||||
);
|
||||
const extensionAgents = useMemo(
|
||||
() => availableAgents.filter((agent) => agent.level === 'extension'),
|
||||
[availableAgents],
|
||||
);
|
||||
const projectNames = useMemo(
|
||||
() => new Set(projectAgents.map((agent) => agent.name)),
|
||||
[projectAgents],
|
||||
@@ -60,8 +66,10 @@ export const AgentSelectionStep = ({
|
||||
setNavigation((prev) => ({ ...prev, currentBlock: 'user' }));
|
||||
} else if (builtinAgents.length > 0) {
|
||||
setNavigation((prev) => ({ ...prev, currentBlock: 'builtin' }));
|
||||
} else if (extensionAgents.length > 0) {
|
||||
setNavigation((prev) => ({ ...prev, currentBlock: 'extension' }));
|
||||
}
|
||||
}, [projectAgents, userAgents, builtinAgents]);
|
||||
}, [projectAgents, userAgents, builtinAgents, extensionAgents]);
|
||||
|
||||
// Custom keyboard navigation
|
||||
useKeypress(
|
||||
@@ -87,6 +95,13 @@ export const AgentSelectionStep = ({
|
||||
currentBlock: 'user',
|
||||
userIndex: userAgents.length - 1,
|
||||
};
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to last item in extension block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'extension',
|
||||
extensionIndex: extensionAgents.length - 1,
|
||||
};
|
||||
} else {
|
||||
// Wrap to last item in project block
|
||||
return { ...prev, projectIndex: projectAgents.length - 1 };
|
||||
@@ -108,11 +123,18 @@ export const AgentSelectionStep = ({
|
||||
currentBlock: 'builtin',
|
||||
builtinIndex: builtinAgents.length - 1,
|
||||
};
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to last item in extension block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'extension',
|
||||
extensionIndex: extensionAgents.length - 1,
|
||||
};
|
||||
} else {
|
||||
// Wrap to last item in user block
|
||||
return { ...prev, userIndex: userAgents.length - 1 };
|
||||
}
|
||||
} else {
|
||||
} else if (prev.currentBlock === 'builtin') {
|
||||
// builtin block
|
||||
if (prev.builtinIndex > 0) {
|
||||
return { ...prev, builtinIndex: prev.builtinIndex - 1 };
|
||||
@@ -130,10 +152,46 @@ export const AgentSelectionStep = ({
|
||||
currentBlock: 'project',
|
||||
projectIndex: projectAgents.length - 1,
|
||||
};
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to last item in extension block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'extension',
|
||||
extensionIndex: extensionAgents.length - 1,
|
||||
};
|
||||
} else {
|
||||
// Wrap to last item in builtin block
|
||||
return { ...prev, builtinIndex: builtinAgents.length - 1 };
|
||||
}
|
||||
} else {
|
||||
// extension block
|
||||
if (prev.extensionIndex > 0) {
|
||||
return { ...prev, extensionIndex: prev.extensionIndex - 1 };
|
||||
} else if (userAgents.length > 0) {
|
||||
// Move to last item in user block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'user',
|
||||
userIndex: userAgents.length - 1,
|
||||
};
|
||||
} else if (projectAgents.length > 0) {
|
||||
// Move to last item in project block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'project',
|
||||
projectIndex: projectAgents.length - 1,
|
||||
};
|
||||
} else if (builtinAgents.length > 0) {
|
||||
// Move to last item in builtin block
|
||||
return {
|
||||
...prev,
|
||||
currentBlock: 'builtin',
|
||||
builtinIndex: builtinAgents.length - 1,
|
||||
};
|
||||
} else {
|
||||
// Wrap to last item in extension block
|
||||
return { ...prev, extensionIndex: extensionAgents.length - 1 };
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (name === 'down' || name === 'j') {
|
||||
@@ -147,6 +205,9 @@ export const AgentSelectionStep = ({
|
||||
} else if (builtinAgents.length > 0) {
|
||||
// Move to first item in builtin block
|
||||
return { ...prev, currentBlock: 'builtin', builtinIndex: 0 };
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to first item in extension block
|
||||
return { ...prev, currentBlock: 'extension', extensionIndex: 0 };
|
||||
} else {
|
||||
// Wrap to first item in project block
|
||||
return { ...prev, projectIndex: 0 };
|
||||
@@ -157,6 +218,9 @@ export const AgentSelectionStep = ({
|
||||
} else if (builtinAgents.length > 0) {
|
||||
// Move to first item in builtin block
|
||||
return { ...prev, currentBlock: 'builtin', builtinIndex: 0 };
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to first item in extension block
|
||||
return { ...prev, currentBlock: 'extension', extensionIndex: 0 };
|
||||
} else if (projectAgents.length > 0) {
|
||||
// Move to first item in project block
|
||||
return { ...prev, currentBlock: 'project', projectIndex: 0 };
|
||||
@@ -164,10 +228,13 @@ export const AgentSelectionStep = ({
|
||||
// Wrap to first item in user block
|
||||
return { ...prev, userIndex: 0 };
|
||||
}
|
||||
} else {
|
||||
} else if (prev.currentBlock === 'builtin') {
|
||||
// builtin block
|
||||
if (prev.builtinIndex < builtinAgents.length - 1) {
|
||||
return { ...prev, builtinIndex: prev.builtinIndex + 1 };
|
||||
} else if (extensionAgents.length > 0) {
|
||||
// Move to first item in extension block
|
||||
return { ...prev, currentBlock: 'extension', extensionIndex: 0 };
|
||||
} else if (projectAgents.length > 0) {
|
||||
// Move to first item in project block
|
||||
return { ...prev, currentBlock: 'project', projectIndex: 0 };
|
||||
@@ -178,6 +245,23 @@ export const AgentSelectionStep = ({
|
||||
// Wrap to first item in builtin block
|
||||
return { ...prev, builtinIndex: 0 };
|
||||
}
|
||||
} else {
|
||||
// extension block
|
||||
if (prev.extensionIndex < extensionAgents.length - 1) {
|
||||
return { ...prev, extensionIndex: prev.extensionIndex + 1 };
|
||||
} else if (projectAgents.length > 0) {
|
||||
// Move to first item in project block
|
||||
return { ...prev, currentBlock: 'project', projectIndex: 0 };
|
||||
} else if (userAgents.length > 0) {
|
||||
// Move to first item in user block
|
||||
return { ...prev, currentBlock: 'user', userIndex: 0 };
|
||||
} else if (builtinAgents.length > 0) {
|
||||
// Move to first item in builtin block
|
||||
return { ...prev, currentBlock: 'builtin', builtinIndex: 0 };
|
||||
} else {
|
||||
// Wrap to first item in extension block
|
||||
return { ...prev, extensionIndex: 0 };
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (name === 'return' || name === 'space') {
|
||||
@@ -188,11 +272,17 @@ export const AgentSelectionStep = ({
|
||||
} else if (navigation.currentBlock === 'user') {
|
||||
// User agents come after project agents in the availableAgents array
|
||||
globalIndex = projectAgents.length + navigation.userIndex;
|
||||
} else {
|
||||
// builtin block
|
||||
} else if (navigation.currentBlock === 'builtin') {
|
||||
// Builtin agents come after project and user agents in the availableAgents array
|
||||
globalIndex =
|
||||
projectAgents.length + userAgents.length + navigation.builtinIndex;
|
||||
} else {
|
||||
// Extension agents come after project, user, and builtin agents
|
||||
globalIndex =
|
||||
projectAgents.length +
|
||||
userAgents.length +
|
||||
builtinAgents.length +
|
||||
navigation.extensionIndex;
|
||||
}
|
||||
|
||||
if (globalIndex >= 0 && globalIndex < availableAgents.length) {
|
||||
@@ -218,7 +308,7 @@ export const AgentSelectionStep = ({
|
||||
const renderAgentItem = (
|
||||
agent: {
|
||||
name: string;
|
||||
level: 'project' | 'user' | 'builtin' | 'session';
|
||||
level: 'project' | 'user' | 'builtin' | 'session' | 'extension';
|
||||
isBuiltin?: boolean;
|
||||
},
|
||||
index: number,
|
||||
@@ -258,7 +348,8 @@ export const AgentSelectionStep = ({
|
||||
const enabledAgentsCount =
|
||||
projectAgents.length +
|
||||
userAgents.filter((agent) => !projectNames.has(agent.name)).length +
|
||||
builtinAgents.length;
|
||||
builtinAgents.length +
|
||||
extensionAgents.length;
|
||||
|
||||
return (
|
||||
<Box flexDirection="column">
|
||||
@@ -305,7 +396,10 @@ export const AgentSelectionStep = ({
|
||||
|
||||
{/* Built-in Agents */}
|
||||
{builtinAgents.length > 0 && (
|
||||
<Box flexDirection="column">
|
||||
<Box
|
||||
flexDirection="column"
|
||||
marginBottom={extensionAgents.length > 0 ? 1 : 0}
|
||||
>
|
||||
<Text color={theme.text.primary} bold>
|
||||
{t('Built-in Agents')}
|
||||
</Text>
|
||||
@@ -320,10 +414,28 @@ export const AgentSelectionStep = ({
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{/* Extension Agents */}
|
||||
{extensionAgents.length > 0 && (
|
||||
<Box flexDirection="column">
|
||||
<Text color={theme.text.primary} bold>
|
||||
{t('Extension Agents')}
|
||||
</Text>
|
||||
<Box marginTop={1} flexDirection="column">
|
||||
{extensionAgents.map((agent, index) => {
|
||||
const isSelected =
|
||||
navigation.currentBlock === 'extension' &&
|
||||
navigation.extensionIndex === index;
|
||||
return renderAgentItem(agent, index, isSelected);
|
||||
})}
|
||||
</Box>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{/* Agent count summary */}
|
||||
{(projectAgents.length > 0 ||
|
||||
userAgents.length > 0 ||
|
||||
builtinAgents.length > 0) && (
|
||||
builtinAgents.length > 0 ||
|
||||
extensionAgents.length > 0) && (
|
||||
<Box marginTop={1}>
|
||||
<Text color={theme.text.secondary}>
|
||||
{t('Using: {{count}} agents', {
|
||||
|
||||
@@ -95,7 +95,11 @@ export function AgentsManagerDialog({
|
||||
|
||||
try {
|
||||
const subagentManager = config.getSubagentManager();
|
||||
await subagentManager.deleteSubagent(agent.name, agent.level);
|
||||
await subagentManager.deleteSubagent(
|
||||
agent.name,
|
||||
agent.level,
|
||||
agent.extensionName,
|
||||
);
|
||||
|
||||
// Reload agents to get updated state
|
||||
await loadAgents();
|
||||
|
||||
@@ -18,7 +18,6 @@ const mockUseUIState = vi.mocked(useUIState);
|
||||
const mockExtensions = [
|
||||
{ name: 'ext-one', version: '1.0.0', isActive: true },
|
||||
{ name: 'ext-two', version: '2.1.0', isActive: true },
|
||||
{ name: 'ext-disabled', version: '3.0.0', isActive: false },
|
||||
];
|
||||
|
||||
describe('<ExtensionsList />', () => {
|
||||
@@ -29,7 +28,6 @@ describe('<ExtensionsList />', () => {
|
||||
const mockUIState = (
|
||||
extensions: unknown[],
|
||||
extensionsUpdateState: Map<string, ExtensionUpdateState>,
|
||||
disabledExtensions: string[] = [],
|
||||
) => {
|
||||
mockUseUIState.mockReturnValue({
|
||||
commandContext: createMockCommandContext({
|
||||
@@ -37,13 +35,6 @@ describe('<ExtensionsList />', () => {
|
||||
config: {
|
||||
getExtensions: () => extensions,
|
||||
},
|
||||
settings: {
|
||||
merged: {
|
||||
extensions: {
|
||||
disabled: disabledExtensions,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
extensionsUpdateState,
|
||||
@@ -58,12 +49,11 @@ describe('<ExtensionsList />', () => {
|
||||
});
|
||||
|
||||
it('should render a list of extensions with their version and status', () => {
|
||||
mockUIState(mockExtensions, new Map(), ['ext-disabled']);
|
||||
mockUIState(mockExtensions, new Map());
|
||||
const { lastFrame } = render(<ExtensionsList />);
|
||||
const output = lastFrame();
|
||||
expect(output).toContain('ext-one (v1.0.0) - active');
|
||||
expect(output).toContain('ext-two (v2.1.0) - active');
|
||||
expect(output).toContain('ext-disabled (v3.0.0) - disabled');
|
||||
});
|
||||
|
||||
it('should display "unknown state" if an extension has no update state', () => {
|
||||
|
||||
@@ -9,12 +9,10 @@ import { useUIState } from '../../contexts/UIStateContext.js';
|
||||
import { ExtensionUpdateState } from '../../state/extensions.js';
|
||||
|
||||
export const ExtensionsList = () => {
|
||||
const { commandContext, extensionsUpdateState } = useUIState();
|
||||
const allExtensions = commandContext.services.config!.getExtensions();
|
||||
const settings = commandContext.services.settings;
|
||||
const disabledExtensions = settings.merged.extensions?.disabled ?? [];
|
||||
const { extensionsUpdateState, commandContext } = useUIState();
|
||||
const extensions = commandContext.services.config?.getExtensions() || [];
|
||||
|
||||
if (allExtensions.length === 0) {
|
||||
if (extensions.length === 0) {
|
||||
return <Text>No extensions installed.</Text>;
|
||||
}
|
||||
|
||||
@@ -22,10 +20,11 @@ export const ExtensionsList = () => {
|
||||
<Box flexDirection="column" marginTop={1} marginBottom={1}>
|
||||
<Text>Installed extensions:</Text>
|
||||
<Box flexDirection="column" paddingLeft={2}>
|
||||
{allExtensions.map((ext) => {
|
||||
{extensions.map((ext) => {
|
||||
const state = extensionsUpdateState.get(ext.name);
|
||||
const isActive = !disabledExtensions.includes(ext.name);
|
||||
const isActive = ext.isActive;
|
||||
const activeString = isActive ? 'active' : 'disabled';
|
||||
const activeColor = isActive ? 'green' : 'grey';
|
||||
|
||||
let stateColor = 'gray';
|
||||
const stateText = state || 'unknown state';
|
||||
@@ -44,6 +43,7 @@ export const ExtensionsList = () => {
|
||||
break;
|
||||
case ExtensionUpdateState.UP_TO_DATE:
|
||||
case ExtensionUpdateState.NOT_UPDATABLE:
|
||||
case ExtensionUpdateState.UPDATED:
|
||||
stateColor = 'green';
|
||||
break;
|
||||
default:
|
||||
@@ -52,12 +52,22 @@ export const ExtensionsList = () => {
|
||||
}
|
||||
|
||||
return (
|
||||
<Box key={ext.name}>
|
||||
<Box key={ext.name} flexDirection="column" marginBottom={1}>
|
||||
<Text>
|
||||
<Text color="cyan">{`${ext.name} (v${ext.version})`}</Text>
|
||||
{` - ${activeString}`}
|
||||
<Text color={activeColor}>{` - ${activeString}`}</Text>
|
||||
{<Text color={stateColor}>{` (${stateText})`}</Text>}
|
||||
</Text>
|
||||
{ext.resolvedSettings && ext.resolvedSettings.length > 0 && (
|
||||
<Box flexDirection="column" paddingLeft={2}>
|
||||
<Text>settings:</Text>
|
||||
{ext.resolvedSettings.map((setting) => (
|
||||
<Text key={setting.name}>
|
||||
- {setting.name}: {setting.value}
|
||||
</Text>
|
||||
))}
|
||||
</Box>
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
import { createContext, useContext } from 'react';
|
||||
import { type Key } from '../hooks/useKeypress.js';
|
||||
import { type IdeIntegrationNudgeResult } from '../IdeIntegrationNudge.js';
|
||||
import { type CommandMigrationNudgeResult } from '../CommandFormatMigrationNudge.js';
|
||||
import { type FolderTrustChoice } from '../components/FolderTrustDialog.js';
|
||||
import {
|
||||
type AuthType,
|
||||
@@ -46,14 +47,13 @@ export interface UIActions {
|
||||
setShellModeActive: (value: boolean) => void;
|
||||
vimHandleInput: (key: Key) => boolean;
|
||||
handleIdePromptComplete: (result: IdeIntegrationNudgeResult) => void;
|
||||
handleCommandMigrationComplete: (result: CommandMigrationNudgeResult) => void;
|
||||
handleFolderTrustSelect: (choice: FolderTrustChoice) => void;
|
||||
setConstrainHeight: (value: boolean) => void;
|
||||
onEscapePromptChange: (show: boolean) => void;
|
||||
refreshStatic: () => void;
|
||||
handleFinalSubmit: (value: string) => void;
|
||||
handleClearScreen: () => void;
|
||||
onWorkspaceMigrationDialogOpen: () => void;
|
||||
onWorkspaceMigrationDialogClose: () => void;
|
||||
// Vision switch dialog
|
||||
handleVisionSwitchSelect: (outcome: VisionSwitchOutcome) => void;
|
||||
// Welcome back dialog
|
||||
|
||||
@@ -72,6 +72,8 @@ export interface UIState {
|
||||
suggestionsWidth: number;
|
||||
isInputActive: boolean;
|
||||
shouldShowIdePrompt: boolean;
|
||||
shouldShowCommandMigrationNudge: boolean;
|
||||
commandMigrationTomlFiles: string[];
|
||||
isFolderTrustDialogOpen: boolean;
|
||||
isTrustedFolder: boolean | undefined;
|
||||
constrainHeight: boolean;
|
||||
@@ -87,9 +89,6 @@ export interface UIState {
|
||||
historyRemountKey: number;
|
||||
messageQueue: string[];
|
||||
showAutoAcceptIndicator: ApprovalMode;
|
||||
showWorkspaceMigrationDialog: boolean;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
workspaceExtensions: any[]; // Extension[]
|
||||
// Quota-related state
|
||||
currentModel: string;
|
||||
contextFileNames: string[];
|
||||
|
||||
51
packages/cli/src/ui/hooks/useCommandMigration.ts
Normal file
51
packages/cli/src/ui/hooks/useCommandMigration.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Storage } from '@qwen-code/qwen-code-core';
|
||||
import { detectTomlCommands } from '../../services/command-migration-tool.js';
|
||||
import type { LoadedSettings } from '../../config/settings.js';
|
||||
|
||||
/**
|
||||
* Hook to detect TOML command files and manage migration nudge visibility.
|
||||
* Checks all command directories: workspace, user, and global levels.
|
||||
*/
|
||||
export function useCommandMigration(
|
||||
settings: LoadedSettings,
|
||||
storage: Storage,
|
||||
) {
|
||||
const [showMigrationNudge, setShowMigrationNudge] = useState(false);
|
||||
const [tomlFiles, setTomlFiles] = useState<string[]>([]);
|
||||
|
||||
useEffect(() => {
|
||||
const checkTomlCommands = async () => {
|
||||
const allFiles: string[] = [];
|
||||
|
||||
// Check workspace commands directory (.qwen/commands)
|
||||
const workspaceCommandsDir = storage.getProjectCommandsDir();
|
||||
const workspaceFiles = await detectTomlCommands(workspaceCommandsDir);
|
||||
allFiles.push(...workspaceFiles.map((f) => `workspace: ${f}`));
|
||||
|
||||
// Check user commands directory (~/.qwen/commands)
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const userFiles = await detectTomlCommands(userCommandsDir);
|
||||
allFiles.push(...userFiles.map((f) => `user: ${f}`));
|
||||
|
||||
if (allFiles.length > 0) {
|
||||
setTomlFiles(allFiles);
|
||||
setShowMigrationNudge(true);
|
||||
}
|
||||
};
|
||||
|
||||
checkTomlCommands();
|
||||
}, [storage]);
|
||||
|
||||
return {
|
||||
showMigrationNudge,
|
||||
tomlFiles,
|
||||
setShowMigrationNudge,
|
||||
};
|
||||
}
|
||||
@@ -4,26 +4,21 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi } from 'vitest';
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
ExtensionStorage,
|
||||
annotateActiveExtensions,
|
||||
loadExtension,
|
||||
} from '../../config/extension.js';
|
||||
import { createExtension } from '../../test-utils/createExtension.js';
|
||||
|
||||
import { useExtensionUpdates } from './useExtensionUpdates.js';
|
||||
import { QWEN_DIR, type GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
QWEN_DIR,
|
||||
type ExtensionManager,
|
||||
type Extension,
|
||||
type ExtensionUpdateInfo,
|
||||
ExtensionUpdateState,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { MessageType } from '../types.js';
|
||||
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
|
||||
import {
|
||||
checkForAllExtensionUpdates,
|
||||
updateExtension,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { ExtensionUpdateState } from '../state/extensions.js';
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const mockedOs = await importOriginal<typeof os>();
|
||||
@@ -33,63 +28,85 @@ vi.mock('os', async (importOriginal) => {
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../../config/extensions/update.js', () => ({
|
||||
checkForAllExtensionUpdates: vi.fn(),
|
||||
updateExtension: vi.fn(),
|
||||
}));
|
||||
function createMockExtension(overrides: Partial<Extension> = {}): Extension {
|
||||
return {
|
||||
id: 'test-extension-id',
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
path: '/some/path',
|
||||
isActive: true,
|
||||
config: {
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
},
|
||||
contextFiles: [],
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo',
|
||||
autoUpdate: false,
|
||||
},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createMockExtensionManager(
|
||||
extensions: Extension[],
|
||||
checkCallback?: (
|
||||
callback: (extensionName: string, state: ExtensionUpdateState) => void,
|
||||
) => Promise<void>,
|
||||
updateResult?: ExtensionUpdateInfo | undefined,
|
||||
): ExtensionManager {
|
||||
return {
|
||||
getLoadedExtensions: vi.fn(() => extensions),
|
||||
checkForAllExtensionUpdates: vi.fn(
|
||||
async (
|
||||
callback: (extensionName: string, state: ExtensionUpdateState) => void,
|
||||
) => {
|
||||
if (checkCallback) {
|
||||
await checkCallback(callback);
|
||||
}
|
||||
},
|
||||
),
|
||||
updateExtension: vi.fn(async () => updateResult),
|
||||
} as unknown as ExtensionManager;
|
||||
}
|
||||
|
||||
describe('useExtensionUpdates', () => {
|
||||
let tempHomeDir: string;
|
||||
let userExtensionsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
|
||||
);
|
||||
tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'qwen-cli-test-home-'));
|
||||
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
|
||||
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
|
||||
fs.mkdirSync(userExtensionsDir, { recursive: true });
|
||||
vi.mocked(checkForAllExtensionUpdates).mockReset();
|
||||
vi.mocked(updateExtension).mockReset();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should check for updates and log a message if an update is available', async () => {
|
||||
const extensions = [
|
||||
{
|
||||
name: 'test-extension',
|
||||
const extension = createMockExtension({
|
||||
name: 'test-extension',
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
version: '1.0.0',
|
||||
path: '/some/path',
|
||||
isActive: true,
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo',
|
||||
autoUpdate: false,
|
||||
},
|
||||
source: 'https://some/repo',
|
||||
autoUpdate: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
const addItem = vi.fn();
|
||||
const cwd = '/test/cwd';
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
const extensionManager = createMockExtensionManager(
|
||||
[extension],
|
||||
async (callback) => {
|
||||
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
);
|
||||
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
|
||||
);
|
||||
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(addItem).toHaveBeenCalledWith(
|
||||
@@ -103,43 +120,32 @@ describe('useExtensionUpdates', () => {
|
||||
});
|
||||
|
||||
it('should check for updates and automatically update if autoUpdate is true', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
const extension = createMockExtension({
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
source: 'https://some.git/repo',
|
||||
autoUpdate: true,
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[loadExtension({ extensionDir, workspaceDir: tempHomeDir })!],
|
||||
tempHomeDir,
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
const addItem = vi.fn();
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
const extensionManager = createMockExtensionManager(
|
||||
[extension],
|
||||
async (callback) => {
|
||||
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
{
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: 'test-extension',
|
||||
},
|
||||
);
|
||||
|
||||
vi.mocked(updateExtension).mockResolvedValue({
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: '',
|
||||
});
|
||||
|
||||
renderHook(() => useExtensionUpdates([extension], addItem, tempHomeDir));
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
|
||||
);
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
@@ -156,77 +162,64 @@ describe('useExtensionUpdates', () => {
|
||||
});
|
||||
|
||||
it('should batch update notifications for multiple extensions', async () => {
|
||||
const extensionDir1 = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
const extension1 = createMockExtension({
|
||||
id: 'test-extension-1-id',
|
||||
name: 'test-extension-1',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo1',
|
||||
type: 'git',
|
||||
source: 'https://some.git/repo1',
|
||||
autoUpdate: true,
|
||||
},
|
||||
});
|
||||
const extensionDir2 = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
const extension2 = createMockExtension({
|
||||
id: 'test-extension-2-id',
|
||||
name: 'test-extension-2',
|
||||
version: '2.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo2',
|
||||
type: 'git',
|
||||
source: 'https://some.git/repo2',
|
||||
autoUpdate: true,
|
||||
},
|
||||
});
|
||||
|
||||
const extensions = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: extensionDir1,
|
||||
workspaceDir: tempHomeDir,
|
||||
})!,
|
||||
loadExtension({
|
||||
extensionDir: extensionDir2,
|
||||
workspaceDir: tempHomeDir,
|
||||
})!,
|
||||
],
|
||||
tempHomeDir,
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
);
|
||||
|
||||
const addItem = vi.fn();
|
||||
let updateCallCount = 0;
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
const extensionManager = {
|
||||
getLoadedExtensions: vi.fn(() => [extension1, extension2]),
|
||||
checkForAllExtensionUpdates: vi.fn(
|
||||
async (
|
||||
callback: (
|
||||
extensionName: string,
|
||||
state: ExtensionUpdateState,
|
||||
) => void,
|
||||
) => {
|
||||
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
),
|
||||
updateExtension: vi.fn(async () => {
|
||||
updateCallCount++;
|
||||
if (updateCallCount === 1) {
|
||||
return {
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: 'test-extension-1',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension-2',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
originalVersion: '2.0.0',
|
||||
updatedVersion: '2.1.0',
|
||||
name: 'test-extension-2',
|
||||
};
|
||||
}),
|
||||
} as unknown as ExtensionManager;
|
||||
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
|
||||
);
|
||||
|
||||
vi.mocked(updateExtension)
|
||||
.mockResolvedValueOnce({
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: '',
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
originalVersion: '2.0.0',
|
||||
updatedVersion: '2.1.0',
|
||||
name: '',
|
||||
});
|
||||
|
||||
renderHook(() => useExtensionUpdates(extensions, addItem, tempHomeDir));
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(addItem).toHaveBeenCalledTimes(2);
|
||||
@@ -250,60 +243,40 @@ describe('useExtensionUpdates', () => {
|
||||
});
|
||||
|
||||
it('should batch update notifications for multiple extensions with autoUpdate: false', async () => {
|
||||
const extensions = [
|
||||
{
|
||||
name: 'test-extension-1',
|
||||
const extension1 = createMockExtension({
|
||||
id: 'test-extension-1-id',
|
||||
name: 'test-extension-1',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
version: '1.0.0',
|
||||
path: '/some/path1',
|
||||
isActive: true,
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo1',
|
||||
autoUpdate: false,
|
||||
},
|
||||
source: 'https://some/repo1',
|
||||
autoUpdate: false,
|
||||
},
|
||||
{
|
||||
name: 'test-extension-2',
|
||||
});
|
||||
const extension2 = createMockExtension({
|
||||
id: 'test-extension-2-id',
|
||||
name: 'test-extension-2',
|
||||
version: '2.0.0',
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
version: '2.0.0',
|
||||
path: '/some/path2',
|
||||
isActive: true,
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo2',
|
||||
autoUpdate: false,
|
||||
},
|
||||
source: 'https://some/repo2',
|
||||
autoUpdate: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
const addItem = vi.fn();
|
||||
const cwd = '/test/cwd';
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({ type: 'BATCH_CHECK_START' });
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension-1',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
const extensionManager = createMockExtensionManager(
|
||||
[extension1, extension2],
|
||||
async (callback) => {
|
||||
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
await new Promise((r) => setTimeout(r, 50));
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension-2',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
dispatch({ type: 'BATCH_CHECK_END' });
|
||||
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
);
|
||||
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
|
||||
);
|
||||
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(addItem).toHaveBeenCalledTimes(1);
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import type { ExtensionManager } from '@qwen-code/qwen-code-core';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import {
|
||||
ExtensionUpdateState,
|
||||
@@ -14,11 +14,6 @@ import {
|
||||
import { useCallback, useEffect, useMemo, useReducer } from 'react';
|
||||
import type { UseHistoryManagerReturn } from './useHistoryManager.js';
|
||||
import { MessageType, type ConfirmationRequest } from '../types.js';
|
||||
import {
|
||||
checkForAllExtensionUpdates,
|
||||
updateExtension,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { requestConsentInteractive } from '../../config/extension.js';
|
||||
import { checkExhaustive } from '../../utils/checks.js';
|
||||
|
||||
type ConfirmationRequestWrapper = {
|
||||
@@ -45,15 +40,7 @@ function confirmationRequestsReducer(
|
||||
}
|
||||
}
|
||||
|
||||
export const useExtensionUpdates = (
|
||||
extensions: GeminiCLIExtension[],
|
||||
addItem: UseHistoryManagerReturn['addItem'],
|
||||
cwd: string,
|
||||
) => {
|
||||
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
|
||||
extensionUpdatesReducer,
|
||||
initialExtensionUpdatesState,
|
||||
);
|
||||
export const useConfirmUpdateRequests = () => {
|
||||
const [
|
||||
confirmUpdateExtensionRequests,
|
||||
dispatchConfirmUpdateExtensionRequests,
|
||||
@@ -78,15 +65,52 @@ export const useExtensionUpdates = (
|
||||
},
|
||||
[dispatchConfirmUpdateExtensionRequests],
|
||||
);
|
||||
return {
|
||||
addConfirmUpdateExtensionRequest,
|
||||
confirmUpdateExtensionRequests,
|
||||
dispatchConfirmUpdateExtensionRequests,
|
||||
};
|
||||
};
|
||||
|
||||
export const useExtensionUpdates = (
|
||||
extensionManager: ExtensionManager,
|
||||
addItem: UseHistoryManagerReturn['addItem'],
|
||||
cwd: string,
|
||||
) => {
|
||||
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
|
||||
extensionUpdatesReducer,
|
||||
initialExtensionUpdatesState,
|
||||
);
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
await checkForAllExtensionUpdates(
|
||||
extensions,
|
||||
dispatchExtensionStateUpdate,
|
||||
const extensionsToCheck = extensions.filter((extension) => {
|
||||
const currentStatus = extensionsUpdateState.extensionStatuses.get(
|
||||
extension.name,
|
||||
);
|
||||
if (!currentStatus) return true;
|
||||
const currentState = currentStatus.status;
|
||||
return !currentState || currentState === ExtensionUpdateState.UNKNOWN;
|
||||
});
|
||||
if (extensionsToCheck.length === 0) return;
|
||||
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
|
||||
await extensionManager.checkForAllExtensionUpdates(
|
||||
(extensionName: string, state: ExtensionUpdateState) => {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
});
|
||||
},
|
||||
);
|
||||
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
|
||||
})();
|
||||
}, [extensions, extensions.length, dispatchExtensionStateUpdate]);
|
||||
}, [
|
||||
extensions,
|
||||
extensionManager,
|
||||
extensionsUpdateState.extensionStatuses,
|
||||
dispatchExtensionStateUpdate,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (extensionsUpdateState.batchChecksInProgress > 0) {
|
||||
@@ -113,17 +137,17 @@ export const useExtensionUpdates = (
|
||||
});
|
||||
|
||||
if (extension.installMetadata?.autoUpdate) {
|
||||
updateExtension(
|
||||
extension,
|
||||
cwd,
|
||||
(description) =>
|
||||
requestConsentInteractive(
|
||||
description,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
),
|
||||
currentState.status,
|
||||
dispatchExtensionStateUpdate,
|
||||
)
|
||||
extensionManager
|
||||
.updateExtension(
|
||||
extension,
|
||||
currentState.status,
|
||||
(extensionName, state) => {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
});
|
||||
},
|
||||
)
|
||||
.then((result) => {
|
||||
if (!result) return;
|
||||
addItem(
|
||||
@@ -157,13 +181,7 @@ export const useExtensionUpdates = (
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}, [
|
||||
extensions,
|
||||
extensionsUpdateState,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
addItem,
|
||||
cwd,
|
||||
]);
|
||||
}, [extensions, extensionManager, extensionsUpdateState, addItem, cwd]);
|
||||
|
||||
const extensionsUpdateStateComputed = useMemo(() => {
|
||||
const result = new Map<string, ExtensionUpdateState>();
|
||||
@@ -180,7 +198,5 @@ export const useExtensionUpdates = (
|
||||
extensionsUpdateState: extensionsUpdateStateComputed,
|
||||
extensionsUpdateStateInternal: extensionsUpdateState.extensionStatuses,
|
||||
dispatchExtensionStateUpdate,
|
||||
confirmUpdateExtensionRequests,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
};
|
||||
};
|
||||
|
||||
0
packages/cli/src/ui/hooks/useTomlMigration.ts
Normal file
0
packages/cli/src/ui/hooks/useTomlMigration.ts
Normal file
@@ -1,70 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { useState, useEffect } from 'react';
|
||||
import {
|
||||
type Extension,
|
||||
getWorkspaceExtensions,
|
||||
} from '../../config/extension.js';
|
||||
import { type LoadedSettings, SettingScope } from '../../config/settings.js';
|
||||
import process from 'node:process';
|
||||
|
||||
export function useWorkspaceMigration(settings: LoadedSettings) {
|
||||
const [showWorkspaceMigrationDialog, setShowWorkspaceMigrationDialog] =
|
||||
useState(false);
|
||||
const [workspaceExtensions, setWorkspaceExtensions] = useState<Extension[]>(
|
||||
[],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
// Default to true if not set.
|
||||
if (!(settings.merged.experimental?.extensionManagement ?? true)) {
|
||||
return;
|
||||
}
|
||||
const cwd = process.cwd();
|
||||
const extensions = getWorkspaceExtensions(cwd);
|
||||
if (
|
||||
extensions.length > 0 &&
|
||||
!settings.merged.extensions?.workspacesWithMigrationNudge?.includes(cwd)
|
||||
) {
|
||||
setWorkspaceExtensions(extensions);
|
||||
setShowWorkspaceMigrationDialog(true);
|
||||
console.log(settings.merged.extensions);
|
||||
}
|
||||
}, [
|
||||
settings.merged.extensions,
|
||||
settings.merged.experimental?.extensionManagement,
|
||||
]);
|
||||
|
||||
const onWorkspaceMigrationDialogOpen = () => {
|
||||
const userSettings = settings.forScope(SettingScope.User);
|
||||
const extensionSettings = userSettings.settings.extensions || {
|
||||
disabled: [],
|
||||
};
|
||||
const workspacesWithMigrationNudge =
|
||||
extensionSettings.workspacesWithMigrationNudge || [];
|
||||
|
||||
const cwd = process.cwd();
|
||||
if (!workspacesWithMigrationNudge.includes(cwd)) {
|
||||
workspacesWithMigrationNudge.push(cwd);
|
||||
}
|
||||
|
||||
extensionSettings.workspacesWithMigrationNudge =
|
||||
workspacesWithMigrationNudge;
|
||||
settings.setValue(SettingScope.User, 'extensions', extensionSettings);
|
||||
};
|
||||
|
||||
const onWorkspaceMigrationDialogClose = () => {
|
||||
setShowWorkspaceMigrationDialog(false);
|
||||
};
|
||||
|
||||
return {
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
};
|
||||
}
|
||||
@@ -10,6 +10,7 @@ export enum ExtensionUpdateState {
|
||||
CHECKING_FOR_UPDATES = 'checking for updates',
|
||||
UPDATED_NEEDS_RESTART = 'updated, needs restart',
|
||||
UPDATING = 'updating',
|
||||
UPDATED = 'updated',
|
||||
UPDATE_AVAILABLE = 'update available',
|
||||
UP_TO_DATE = 'up to date',
|
||||
ERROR = 'error',
|
||||
|
||||
@@ -17,10 +17,16 @@
|
||||
* resolveEnvVarsInString("URL: ${BASE_URL}/api") // Returns "URL: https://api.example.com/api"
|
||||
* resolveEnvVarsInString("Missing: $UNDEFINED_VAR") // Returns "Missing: $UNDEFINED_VAR"
|
||||
*/
|
||||
export function resolveEnvVarsInString(value: string): string {
|
||||
export function resolveEnvVarsInString(
|
||||
value: string,
|
||||
customEnv?: Record<string, string>,
|
||||
): string {
|
||||
const envVarRegex = /\$(?:(\w+)|{([^}]+)})/g; // Find $VAR_NAME or ${VAR_NAME}
|
||||
return value.replace(envVarRegex, (match, varName1, varName2) => {
|
||||
const varName = varName1 || varName2;
|
||||
if (customEnv && typeof customEnv[varName] === 'string') {
|
||||
return customEnv[varName];
|
||||
}
|
||||
if (process && process.env && typeof process.env[varName] === 'string') {
|
||||
return process.env[varName]!;
|
||||
}
|
||||
@@ -47,8 +53,11 @@ export function resolveEnvVarsInString(value: string): string {
|
||||
* };
|
||||
* const resolved = resolveEnvVarsInObject(config);
|
||||
*/
|
||||
export function resolveEnvVarsInObject<T>(obj: T): T {
|
||||
return resolveEnvVarsInObjectInternal(obj, new WeakSet());
|
||||
export function resolveEnvVarsInObject<T>(
|
||||
obj: T,
|
||||
customEnv?: Record<string, string>,
|
||||
): T {
|
||||
return resolveEnvVarsInObjectInternal(obj, new WeakSet(), customEnv);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -61,6 +70,7 @@ export function resolveEnvVarsInObject<T>(obj: T): T {
|
||||
function resolveEnvVarsInObjectInternal<T>(
|
||||
obj: T,
|
||||
visited: WeakSet<object>,
|
||||
customEnv?: Record<string, string>,
|
||||
): T {
|
||||
if (
|
||||
obj === null ||
|
||||
@@ -72,7 +82,7 @@ function resolveEnvVarsInObjectInternal<T>(
|
||||
}
|
||||
|
||||
if (typeof obj === 'string') {
|
||||
return resolveEnvVarsInString(obj) as unknown as T;
|
||||
return resolveEnvVarsInString(obj, customEnv) as unknown as T;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
@@ -84,7 +94,7 @@ function resolveEnvVarsInObjectInternal<T>(
|
||||
|
||||
visited.add(obj);
|
||||
const result = obj.map((item) =>
|
||||
resolveEnvVarsInObjectInternal(item, visited),
|
||||
resolveEnvVarsInObjectInternal(item, visited, customEnv),
|
||||
) as unknown as T;
|
||||
visited.delete(obj);
|
||||
return result;
|
||||
@@ -101,7 +111,11 @@ function resolveEnvVarsInObjectInternal<T>(
|
||||
const newObj = { ...obj } as T;
|
||||
for (const key in newObj) {
|
||||
if (Object.prototype.hasOwnProperty.call(newObj, key)) {
|
||||
newObj[key] = resolveEnvVarsInObjectInternal(newObj[key], visited);
|
||||
newObj[key] = resolveEnvVarsInObjectInternal(
|
||||
newObj[key],
|
||||
visited,
|
||||
customEnv,
|
||||
);
|
||||
}
|
||||
}
|
||||
visited.delete(obj as object);
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
"src/**/*.ts",
|
||||
"src/**/*.tsx",
|
||||
"src/**/*.json",
|
||||
"./package.json"
|
||||
"./package.json",
|
||||
"../core/src/utils/toml-to-markdown-converter.test.ts",
|
||||
"../core/src/utils/toml-to-markdown-converter.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
|
||||
@@ -80,6 +80,10 @@ import {
|
||||
type TelemetryTarget,
|
||||
uiTelemetryService,
|
||||
} from '../telemetry/index.js';
|
||||
import {
|
||||
ExtensionManager,
|
||||
type Extension,
|
||||
} from '../extension/extensionManager.js';
|
||||
|
||||
// Utils
|
||||
import { shouldAttemptBrowserLaunch } from '../utils/browser.js';
|
||||
@@ -102,6 +106,7 @@ import {
|
||||
type ResumedSessionData,
|
||||
} from '../services/sessionService.js';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { loadServerHierarchicalMemory } from '../utils/memoryDiscovery.js';
|
||||
|
||||
import {
|
||||
ModelsConfig,
|
||||
@@ -198,20 +203,17 @@ export interface GitCoAuthorSettings {
|
||||
email?: string;
|
||||
}
|
||||
|
||||
export interface GeminiCLIExtension {
|
||||
name: string;
|
||||
version: string;
|
||||
isActive: boolean;
|
||||
path: string;
|
||||
installMetadata?: ExtensionInstallMetadata;
|
||||
}
|
||||
|
||||
export interface ExtensionInstallMetadata {
|
||||
source: string;
|
||||
type: 'git' | 'local' | 'link' | 'github-release';
|
||||
type: 'git' | 'local' | 'link' | 'github-release' | 'marketplace';
|
||||
releaseTag?: string; // Only present for github-release installs.
|
||||
ref?: string;
|
||||
autoUpdate?: boolean;
|
||||
allowPreRelease?: boolean;
|
||||
marketplace?: {
|
||||
marketplaceSource: string;
|
||||
pluginName: string;
|
||||
};
|
||||
}
|
||||
|
||||
export const DEFAULT_TRUNCATE_TOOL_OUTPUT_THRESHOLD = 25_000;
|
||||
@@ -309,14 +311,15 @@ export interface ConfigParameters {
|
||||
includeDirectories?: string[];
|
||||
bugCommand?: BugCommandSettings;
|
||||
model?: string;
|
||||
extensionContextFilePaths?: string[];
|
||||
outputLanguageFilePath?: string;
|
||||
maxSessionTurns?: number;
|
||||
sessionTokenLimit?: number;
|
||||
experimentalSkills?: boolean;
|
||||
experimentalZedIntegration?: boolean;
|
||||
listExtensions?: boolean;
|
||||
extensions?: GeminiCLIExtension[];
|
||||
blockedMcpServers?: Array<{ name: string; extensionName: string }>;
|
||||
overrideExtensions?: string[];
|
||||
allowedMcpServers?: string[];
|
||||
excludedMcpServers?: string[];
|
||||
noBrowser?: boolean;
|
||||
summarizeToolOutput?: Record<string, SummarizeToolOutputSettings>;
|
||||
folderTrustFeature?: boolean;
|
||||
@@ -331,6 +334,8 @@ export interface ConfigParameters {
|
||||
generationConfigSources?: ContentGeneratorConfigSources;
|
||||
cliVersion?: string;
|
||||
loadMemoryFromIncludeDirectories?: boolean;
|
||||
importFormat?: 'tree' | 'flat';
|
||||
discoveryMaxDirs?: number;
|
||||
chatRecording?: boolean;
|
||||
// Web search providers
|
||||
webSearch?: {
|
||||
@@ -349,7 +354,6 @@ export interface ConfigParameters {
|
||||
shouldUseNodePtyShell?: boolean;
|
||||
skipNextSpeakerCheck?: boolean;
|
||||
shellExecutionConfig?: ShellExecutionConfig;
|
||||
extensionManagement?: boolean;
|
||||
skipLoopDetection?: boolean;
|
||||
vlmSwitchMode?: string;
|
||||
truncateToolOutputThreshold?: number;
|
||||
@@ -404,6 +408,7 @@ export class Config {
|
||||
private toolRegistry!: ToolRegistry;
|
||||
private promptRegistry!: PromptRegistry;
|
||||
private subagentManager!: SubagentManager;
|
||||
private extensionManager!: ExtensionManager;
|
||||
private skillManager: SkillManager | null = null;
|
||||
private fileSystemService: FileSystemService;
|
||||
private contentGeneratorConfig!: ContentGeneratorConfig;
|
||||
@@ -429,6 +434,8 @@ export class Config {
|
||||
private readonly toolCallCommand: string | undefined;
|
||||
private readonly mcpServerCommand: string | undefined;
|
||||
private mcpServers: Record<string, MCPServerConfig> | undefined;
|
||||
private readonly allowedMcpServers?: string[];
|
||||
private readonly excludedMcpServers?: string[];
|
||||
private sessionSubagents: SubagentConfig[];
|
||||
private userMemory: string;
|
||||
private sdkMode: boolean;
|
||||
@@ -455,7 +462,7 @@ export class Config {
|
||||
private readonly proxy: string | undefined;
|
||||
private readonly cwd: string;
|
||||
private readonly bugCommand: BugCommandSettings | undefined;
|
||||
private readonly extensionContextFilePaths: string[];
|
||||
private readonly outputLanguageFilePath?: string;
|
||||
private readonly noBrowser: boolean;
|
||||
private readonly folderTrustFeature: boolean;
|
||||
private readonly folderTrust: boolean;
|
||||
@@ -464,11 +471,8 @@ export class Config {
|
||||
private readonly maxSessionTurns: number;
|
||||
private readonly sessionTokenLimit: number;
|
||||
private readonly listExtensions: boolean;
|
||||
private readonly _extensions: GeminiCLIExtension[];
|
||||
private readonly _blockedMcpServers: Array<{
|
||||
name: string;
|
||||
extensionName: string;
|
||||
}>;
|
||||
private readonly overrideExtensions?: string[];
|
||||
|
||||
private readonly summarizeToolOutput:
|
||||
| Record<string, SummarizeToolOutputSettings>
|
||||
| undefined;
|
||||
@@ -477,6 +481,8 @@ export class Config {
|
||||
private readonly experimentalSkills: boolean = false;
|
||||
private readonly chatRecordingEnabled: boolean;
|
||||
private readonly loadMemoryFromIncludeDirectories: boolean = false;
|
||||
private readonly importFormat: 'tree' | 'flat';
|
||||
private readonly discoveryMaxDirs: number;
|
||||
private readonly webSearch?: {
|
||||
provider: Array<{
|
||||
type: 'tavily' | 'google' | 'dashscope';
|
||||
@@ -493,7 +499,6 @@ export class Config {
|
||||
private readonly shouldUseNodePtyShell: boolean;
|
||||
private readonly skipNextSpeakerCheck: boolean;
|
||||
private shellExecutionConfig: ShellExecutionConfig;
|
||||
private readonly extensionManagement: boolean = true;
|
||||
private readonly skipLoopDetection: boolean;
|
||||
private readonly skipStartupContext: boolean;
|
||||
private readonly vlmSwitchMode: string | undefined;
|
||||
@@ -534,6 +539,8 @@ export class Config {
|
||||
this.toolCallCommand = params.toolCallCommand;
|
||||
this.mcpServerCommand = params.mcpServerCommand;
|
||||
this.mcpServers = params.mcpServers;
|
||||
this.allowedMcpServers = params.allowedMcpServers;
|
||||
this.excludedMcpServers = params.excludedMcpServers;
|
||||
this.sessionSubagents = params.sessionSubagents ?? [];
|
||||
this.sdkMode = params.sdkMode ?? false;
|
||||
this.userMemory = params.userMemory ?? '';
|
||||
@@ -556,6 +563,7 @@ export class Config {
|
||||
email: 'qwen-coder@alibabacloud.com',
|
||||
};
|
||||
this.usageStatisticsEnabled = params.usageStatisticsEnabled ?? true;
|
||||
this.outputLanguageFilePath = params.outputLanguageFilePath;
|
||||
|
||||
this.fileFiltering = {
|
||||
respectGitIgnore: params.fileFiltering?.respectGitIgnore ?? true,
|
||||
@@ -569,15 +577,13 @@ export class Config {
|
||||
this.cwd = params.cwd ?? process.cwd();
|
||||
this.fileDiscoveryService = params.fileDiscoveryService ?? null;
|
||||
this.bugCommand = params.bugCommand;
|
||||
this.extensionContextFilePaths = params.extensionContextFilePaths ?? [];
|
||||
this.maxSessionTurns = params.maxSessionTurns ?? -1;
|
||||
this.sessionTokenLimit = params.sessionTokenLimit ?? -1;
|
||||
this.experimentalZedIntegration =
|
||||
params.experimentalZedIntegration ?? false;
|
||||
this.experimentalSkills = params.experimentalSkills ?? false;
|
||||
this.listExtensions = params.listExtensions ?? false;
|
||||
this._extensions = params.extensions ?? [];
|
||||
this._blockedMcpServers = params.blockedMcpServers ?? [];
|
||||
this.overrideExtensions = params.overrideExtensions;
|
||||
this.noBrowser = params.noBrowser ?? false;
|
||||
this.summarizeToolOutput = params.summarizeToolOutput;
|
||||
this.folderTrustFeature = params.folderTrustFeature ?? false;
|
||||
@@ -590,6 +596,8 @@ export class Config {
|
||||
|
||||
this.loadMemoryFromIncludeDirectories =
|
||||
params.loadMemoryFromIncludeDirectories ?? false;
|
||||
this.importFormat = params.importFormat ?? 'tree';
|
||||
this.discoveryMaxDirs = params.discoveryMaxDirs ?? 200;
|
||||
this.chatCompression = params.chatCompression;
|
||||
this.interactive = params.interactive ?? false;
|
||||
this.trustedFolder = params.trustedFolder;
|
||||
@@ -615,7 +623,6 @@ export class Config {
|
||||
params.truncateToolOutputLines ?? DEFAULT_TRUNCATE_TOOL_OUTPUT_LINES;
|
||||
this.enableToolOutputTruncation = params.enableToolOutputTruncation ?? true;
|
||||
this.useSmartEdit = params.useSmartEdit ?? false;
|
||||
this.extensionManagement = params.extensionManagement ?? true;
|
||||
this.channel = params.channel;
|
||||
this.storage = new Storage(this.targetDir);
|
||||
this.vlmSwitchMode = params.vlmSwitchMode;
|
||||
@@ -653,6 +660,11 @@ export class Config {
|
||||
this.chatRecordingService = this.chatRecordingEnabled
|
||||
? new ChatRecordingService(this)
|
||||
: undefined;
|
||||
this.extensionManager = new ExtensionManager({
|
||||
workspaceDir: this.targetDir,
|
||||
enabledExtensionOverrides: this.overrideExtensions,
|
||||
isWorkspaceTrusted: this.isTrustedFolder(),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -671,6 +683,9 @@ export class Config {
|
||||
await this.getGitService();
|
||||
}
|
||||
this.promptRegistry = new PromptRegistry();
|
||||
this.extensionManager.setConfig(this);
|
||||
await this.extensionManager.refreshCache();
|
||||
|
||||
this.subagentManager = new SubagentManager(this);
|
||||
if (this.getExperimentalSkills()) {
|
||||
this.skillManager = new SkillManager(this);
|
||||
@@ -682,6 +697,10 @@ export class Config {
|
||||
this.subagentManager.loadSessionSubagents(this.sessionSubagents);
|
||||
}
|
||||
|
||||
await this.extensionManager.refreshCache();
|
||||
|
||||
await this.refreshHierarchicalMemory();
|
||||
|
||||
this.toolRegistry = await this.createToolRegistry(
|
||||
options?.sendSdkMcpMessage,
|
||||
);
|
||||
@@ -691,6 +710,24 @@ export class Config {
|
||||
logStartSession(this, new StartSessionEvent(this));
|
||||
}
|
||||
|
||||
async refreshHierarchicalMemory(): Promise<void> {
|
||||
const { memoryContent, fileCount } = await loadServerHierarchicalMemory(
|
||||
this.getWorkingDir(),
|
||||
this.shouldLoadMemoryFromIncludeDirectories()
|
||||
? this.getWorkspaceContext().getDirectories()
|
||||
: [],
|
||||
this.getDebugMode(),
|
||||
this.getFileService(),
|
||||
this.getExtensionContextFilePaths(),
|
||||
this.getFolderTrust(),
|
||||
this.getImportFormat(),
|
||||
this.getFileFilteringOptions(),
|
||||
this.getDiscoveryMaxDirs(),
|
||||
);
|
||||
this.setUserMemory(memoryContent);
|
||||
this.setGeminiMdFileCount(fileCount);
|
||||
}
|
||||
|
||||
getContentGenerator(): ContentGenerator {
|
||||
return this.contentGenerator;
|
||||
}
|
||||
@@ -812,6 +849,14 @@ export class Config {
|
||||
return this.loadMemoryFromIncludeDirectories;
|
||||
}
|
||||
|
||||
getImportFormat(): 'tree' | 'flat' {
|
||||
return this.importFormat;
|
||||
}
|
||||
|
||||
getDiscoveryMaxDirs(): number {
|
||||
return this.discoveryMaxDirs;
|
||||
}
|
||||
|
||||
getContentGeneratorConfig(): ContentGeneratorConfig {
|
||||
return this.contentGeneratorConfig;
|
||||
}
|
||||
@@ -1005,7 +1050,14 @@ export class Config {
|
||||
}
|
||||
|
||||
getExcludeTools(): string[] | undefined {
|
||||
return this.excludeTools;
|
||||
const allExcludeTools = new Set(this.excludeTools || []);
|
||||
const extensions = this.getActiveExtensions();
|
||||
for (const extension of extensions) {
|
||||
for (const tool of extension.config.excludeTools || []) {
|
||||
allExcludeTools.add(tool);
|
||||
}
|
||||
}
|
||||
return [...allExcludeTools];
|
||||
}
|
||||
|
||||
getToolDiscoveryCommand(): string | undefined {
|
||||
@@ -1021,7 +1073,37 @@ export class Config {
|
||||
}
|
||||
|
||||
getMcpServers(): Record<string, MCPServerConfig> | undefined {
|
||||
return this.mcpServers;
|
||||
let mcpServers = { ...(this.mcpServers || {}) };
|
||||
const extensions = this.getActiveExtensions();
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) return;
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if (this.allowedMcpServers) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(([key]) =>
|
||||
this.allowedMcpServers?.includes(key),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (this.excludedMcpServers) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(
|
||||
([key]) => !this.excludedMcpServers?.includes(key),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
addMcpServers(servers: Record<string, MCPServerConfig>): void {
|
||||
@@ -1200,7 +1282,13 @@ export class Config {
|
||||
}
|
||||
|
||||
getExtensionContextFilePaths(): string[] {
|
||||
return this.extensionContextFilePaths;
|
||||
const extensionContextFilePaths = this.getActiveExtensions().flatMap(
|
||||
(e) => e.contextFiles,
|
||||
);
|
||||
return [
|
||||
...extensionContextFilePaths,
|
||||
...(this.outputLanguageFilePath ? [this.outputLanguageFilePath] : []),
|
||||
];
|
||||
}
|
||||
|
||||
getExperimentalZedIntegration(): boolean {
|
||||
@@ -1215,16 +1303,54 @@ export class Config {
|
||||
return this.listExtensions;
|
||||
}
|
||||
|
||||
getExtensionManagement(): boolean {
|
||||
return this.extensionManagement;
|
||||
getExtensionManager(): ExtensionManager {
|
||||
return this.extensionManager;
|
||||
}
|
||||
|
||||
getExtensions(): GeminiCLIExtension[] {
|
||||
return this._extensions;
|
||||
getExtensions(): Extension[] {
|
||||
const extensions = this.extensionManager.getLoadedExtensions();
|
||||
if (this.overrideExtensions) {
|
||||
return extensions.filter((e) =>
|
||||
this.overrideExtensions?.includes(e.name),
|
||||
);
|
||||
} else {
|
||||
return extensions;
|
||||
}
|
||||
}
|
||||
|
||||
getActiveExtensions(): Extension[] {
|
||||
return this.getExtensions().filter((e) => e.isActive);
|
||||
}
|
||||
|
||||
getBlockedMcpServers(): Array<{ name: string; extensionName: string }> {
|
||||
return this._blockedMcpServers;
|
||||
const mcpServers = { ...(this.mcpServers || {}) };
|
||||
const extensions = this.getActiveExtensions();
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) return;
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
const blockedMcpServers: Array<{ name: string; extensionName: string }> =
|
||||
[];
|
||||
|
||||
if (this.allowedMcpServers) {
|
||||
Object.entries(mcpServers).forEach(([key, server]) => {
|
||||
const isAllowed = this.allowedMcpServers?.includes(key);
|
||||
if (!isAllowed) {
|
||||
blockedMcpServers.push({
|
||||
name: key,
|
||||
extensionName: server.extensionName || '',
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
return blockedMcpServers;
|
||||
}
|
||||
|
||||
getNoBrowser(): boolean {
|
||||
|
||||
121
packages/core/src/extension/claude-converter.test.ts
Normal file
121
packages/core/src/extension/claude-converter.test.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
convertClaudeToQwenConfig,
|
||||
mergeClaudeConfigs,
|
||||
isClaudePluginConfig,
|
||||
type ClaudePluginConfig,
|
||||
type ClaudeMarketplacePluginConfig,
|
||||
} from './claude-converter.js';
|
||||
|
||||
describe('convertClaudeToQwenConfig', () => {
|
||||
it('should convert basic Claude config', () => {
|
||||
const claudeConfig: ClaudePluginConfig = {
|
||||
name: 'claude-plugin',
|
||||
version: '1.0.0',
|
||||
};
|
||||
|
||||
const result = convertClaudeToQwenConfig(claudeConfig);
|
||||
|
||||
expect(result.name).toBe('claude-plugin');
|
||||
expect(result.version).toBe('1.0.0');
|
||||
});
|
||||
|
||||
it('should convert config with basic fields only', () => {
|
||||
const claudeConfig: ClaudePluginConfig = {
|
||||
name: 'full-plugin',
|
||||
version: '1.0.0',
|
||||
commands: 'commands',
|
||||
agents: ['agents/agent1.md'],
|
||||
skills: ['skills/skill1'],
|
||||
};
|
||||
|
||||
const result = convertClaudeToQwenConfig(claudeConfig);
|
||||
|
||||
// Commands, skills, agents are collected as directories, not in config
|
||||
expect(result.name).toBe('full-plugin');
|
||||
expect(result.version).toBe('1.0.0');
|
||||
expect(result.mcpServers).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should throw error for missing name', () => {
|
||||
const invalidConfig = {
|
||||
version: '1.0.0',
|
||||
} as ClaudePluginConfig;
|
||||
|
||||
expect(() => convertClaudeToQwenConfig(invalidConfig)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeClaudeConfigs', () => {
|
||||
it('should merge marketplace and plugin configs', () => {
|
||||
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
|
||||
name: 'marketplace-name',
|
||||
version: '2.0.0',
|
||||
source: 'github:org/repo',
|
||||
description: 'From marketplace',
|
||||
};
|
||||
|
||||
const pluginConfig: ClaudePluginConfig = {
|
||||
name: 'plugin-name',
|
||||
version: '1.0.0',
|
||||
commands: 'commands',
|
||||
};
|
||||
|
||||
const merged = mergeClaudeConfigs(marketplacePlugin, pluginConfig);
|
||||
|
||||
// Marketplace takes precedence
|
||||
expect(merged.name).toBe('marketplace-name');
|
||||
expect(merged.version).toBe('2.0.0');
|
||||
expect(merged.description).toBe('From marketplace');
|
||||
// Plugin fields preserved
|
||||
expect(merged.commands).toBe('commands');
|
||||
});
|
||||
|
||||
it('should work with strict=false and no plugin config', () => {
|
||||
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
|
||||
name: 'standalone',
|
||||
version: '1.0.0',
|
||||
source: 'local',
|
||||
strict: false,
|
||||
commands: 'commands',
|
||||
};
|
||||
|
||||
const merged = mergeClaudeConfigs(marketplacePlugin);
|
||||
|
||||
expect(merged.name).toBe('standalone');
|
||||
expect(merged.commands).toBe('commands');
|
||||
});
|
||||
|
||||
it('should throw error for strict mode without plugin config', () => {
|
||||
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
|
||||
name: 'strict-plugin',
|
||||
version: '1.0.0',
|
||||
source: 'github:org/repo',
|
||||
strict: true,
|
||||
};
|
||||
|
||||
expect(() => mergeClaudeConfigs(marketplacePlugin)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('isClaudePluginConfig', () => {
|
||||
it('should identify Claude plugin directory', () => {
|
||||
const extensionDir = '/tmp/test-extension';
|
||||
const marketplace = {
|
||||
marketplaceSource: 'https://test.com',
|
||||
pluginName: 'test-plugin',
|
||||
};
|
||||
|
||||
// This will check if marketplace.json exists and contains the plugin
|
||||
// Note: In real usage, this requires actual file system setup
|
||||
expect(typeof isClaudePluginConfig(extensionDir, marketplace)).toBe(
|
||||
'boolean',
|
||||
);
|
||||
});
|
||||
});
|
||||
745
packages/core/src/extension/claude-converter.ts
Normal file
745
packages/core/src/extension/claude-converter.ts
Normal file
@@ -0,0 +1,745 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Converter for Claude Code plugins to Qwen Code format.
|
||||
*/
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import type { ExtensionConfig } from './extensionManager.js';
|
||||
import { ExtensionStorage } from './storage.js';
|
||||
import type {
|
||||
ExtensionInstallMetadata,
|
||||
MCPServerConfig,
|
||||
} from '../config/config.js';
|
||||
import { cloneFromGit, downloadFromGitHubRelease } from './github.js';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { copyDirectory } from './gemini-converter.js';
|
||||
import {
|
||||
parse as parseYaml,
|
||||
stringify as stringifyYaml,
|
||||
} from '../utils/yaml-parser.js';
|
||||
|
||||
export interface ClaudePluginConfig {
|
||||
name: string;
|
||||
version: string;
|
||||
description?: string;
|
||||
author?: { name?: string; email?: string; url?: string };
|
||||
homepage?: string;
|
||||
repository?: string;
|
||||
license?: string;
|
||||
keywords?: string[];
|
||||
commands?: string | string[];
|
||||
agents?: string | string[];
|
||||
skills?: string | string[];
|
||||
hooks?: string;
|
||||
mcpServers?: string | Record<string, MCPServerConfig>;
|
||||
outputStyles?: string | string[];
|
||||
lspServers?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Claude Code subagent configuration format.
|
||||
* Based on https://code.claude.com/docs/en/sub-agents
|
||||
*/
|
||||
export interface ClaudeAgentConfig {
|
||||
/** Unique identifier using lowercase letters and hyphens */
|
||||
name: string;
|
||||
/** When Claude should delegate to this subagent */
|
||||
description: string;
|
||||
/** Tools the subagent can use. Inherits all tools if omitted */
|
||||
tools?: string[];
|
||||
/** Tools to deny, removed from inherited or specified list */
|
||||
disallowedTools?: string[];
|
||||
/** Model to use: sonnet, opus, haiku, or inherit */
|
||||
model?: string;
|
||||
/** Permission mode: default, acceptEdits, dontAsk, bypassPermissions, or plan */
|
||||
permissionMode?: string;
|
||||
/** Skills to load into the subagent's context at startup */
|
||||
skills?: string[];
|
||||
/** Hooks configuration */
|
||||
hooks?: unknown;
|
||||
/** System prompt content */
|
||||
systemPrompt?: string;
|
||||
/** subagent color */
|
||||
color?: string;
|
||||
}
|
||||
|
||||
export type ClaudePluginSource =
|
||||
| { source: 'github'; repo: string }
|
||||
| { source: 'url'; url: string };
|
||||
|
||||
export interface ClaudeMarketplacePluginConfig extends ClaudePluginConfig {
|
||||
source: string | ClaudePluginSource;
|
||||
category?: string;
|
||||
strict?: boolean;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
export interface ClaudeMarketplaceConfig {
|
||||
name: string;
|
||||
owner: { name: string; email: string };
|
||||
plugins: ClaudeMarketplacePluginConfig[];
|
||||
metadata?: { description?: string; version?: string; pluginRoot?: string };
|
||||
}
|
||||
|
||||
const CLAUDE_TOOLS_MAPPING: Record<string, string | string[]> = {
|
||||
AskUserQuestion: 'None',
|
||||
Bash: 'Shell',
|
||||
BashOutput: 'None',
|
||||
Edit: 'Edit',
|
||||
ExitPlanMode: 'ExitPlanMode',
|
||||
Glob: 'Glob',
|
||||
Grep: 'Grep',
|
||||
KillShell: 'None',
|
||||
NotebookEdit: 'None',
|
||||
Read: ['ReadFile', 'ReadManyFiles'],
|
||||
Skill: 'Skill',
|
||||
Task: 'Task',
|
||||
TodoWrite: 'TodoWrite',
|
||||
WebFetch: 'WebFetch',
|
||||
WebSearch: 'WebSearch',
|
||||
Write: 'WriteFile',
|
||||
LS: 'ListFiles',
|
||||
};
|
||||
|
||||
const claudeBuildInToolsTransform = (tools: string[]): string[] => {
|
||||
const transformedTools: string[] = [];
|
||||
tools.forEach((tool) => {
|
||||
if (!CLAUDE_TOOLS_MAPPING[tool]) {
|
||||
transformedTools.push(tool);
|
||||
} else {
|
||||
if (CLAUDE_TOOLS_MAPPING[tool] === 'None') {
|
||||
return;
|
||||
} else if (Array.isArray(CLAUDE_TOOLS_MAPPING[tool])) {
|
||||
transformedTools.push(...CLAUDE_TOOLS_MAPPING[tool]);
|
||||
} else {
|
||||
transformedTools.push(CLAUDE_TOOLS_MAPPING[tool]);
|
||||
}
|
||||
}
|
||||
});
|
||||
return transformedTools;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses a value that can be either a comma-separated string or an array.
|
||||
* Claude agent config can have tools like 'Glob, Grep, Read' or ['Glob', 'Grep', 'Read']
|
||||
* @param value The value to parse
|
||||
* @returns Array of strings or undefined
|
||||
*/
|
||||
function parseStringOrArray(value: unknown): string[] | undefined {
|
||||
if (value === undefined || value === null) {
|
||||
return undefined;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(String);
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
// Split by comma and trim whitespace
|
||||
return value
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter((s) => s.length > 0);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Claude agent config to Qwen Code subagent format.
|
||||
* @param claudeAgent Claude agent configuration
|
||||
* @returns Converted agent config compatible with Qwen Code SubagentConfig
|
||||
*/
|
||||
export function convertClaudeAgentConfig(
|
||||
claudeAgent: ClaudeAgentConfig,
|
||||
): Record<string, unknown> {
|
||||
// Base config with required fields
|
||||
const qwenAgent: Record<string, unknown> = {
|
||||
name: claudeAgent.name,
|
||||
description: claudeAgent.description,
|
||||
};
|
||||
|
||||
if (claudeAgent.color) {
|
||||
qwenAgent['color'] = claudeAgent.color;
|
||||
}
|
||||
|
||||
// Convert system prompt if present
|
||||
if (claudeAgent.systemPrompt) {
|
||||
qwenAgent['systemPrompt'] = claudeAgent.systemPrompt;
|
||||
}
|
||||
|
||||
// Convert tools using claudeBuildInToolsTransform
|
||||
if (claudeAgent.tools && claudeAgent.tools.length > 0) {
|
||||
qwenAgent['tools'] = claudeBuildInToolsTransform(claudeAgent.tools);
|
||||
}
|
||||
|
||||
// Convert model to modelConfig
|
||||
if (claudeAgent.model) {
|
||||
// Map Claude model names to Qwen model config
|
||||
// Claude uses: sonnet, opus, haiku, inherit
|
||||
// We preserve the model name for now, the actual mapping will be handled at runtime
|
||||
qwenAgent['modelConfig'] = {
|
||||
model: claudeAgent.model === 'inherit' ? undefined : claudeAgent.model,
|
||||
};
|
||||
}
|
||||
|
||||
// Preserve unsupported fields as-is for potential future compatibility
|
||||
// These fields are not supported by Qwen Code SubagentConfig but we keep them
|
||||
if (claudeAgent.permissionMode) {
|
||||
qwenAgent['permissionMode'] = claudeAgent.permissionMode;
|
||||
}
|
||||
if (claudeAgent.hooks) {
|
||||
qwenAgent['hooks'] = claudeAgent.hooks;
|
||||
}
|
||||
if (claudeAgent.skills && claudeAgent.skills.length > 0) {
|
||||
qwenAgent['skills'] = claudeAgent.skills;
|
||||
}
|
||||
if (claudeAgent.disallowedTools && claudeAgent.disallowedTools.length > 0) {
|
||||
qwenAgent['disallowedTools'] = claudeAgent.disallowedTools;
|
||||
}
|
||||
|
||||
return qwenAgent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts all agent files in a directory from Claude format to Qwen format.
|
||||
* Parses the YAML frontmatter, converts the configuration, and writes back.
|
||||
* @param agentsDir Directory containing agent markdown files
|
||||
*/
|
||||
async function convertAgentFiles(agentsDir: string): Promise<void> {
|
||||
if (!fs.existsSync(agentsDir)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const files = await fs.promises.readdir(agentsDir);
|
||||
|
||||
for (const file of files) {
|
||||
if (!file.endsWith('.md')) continue;
|
||||
|
||||
const filePath = path.join(agentsDir, file);
|
||||
|
||||
try {
|
||||
const content = await fs.promises.readFile(filePath, 'utf-8');
|
||||
|
||||
// Parse frontmatter
|
||||
const frontmatterRegex = /^---\n([\s\S]*?)\n---\n([\s\S]*)$/;
|
||||
const match = content.match(frontmatterRegex);
|
||||
|
||||
if (!match) {
|
||||
// No frontmatter, skip this file
|
||||
continue;
|
||||
}
|
||||
|
||||
const [, frontmatterYaml, body] = match;
|
||||
const frontmatter = parseYaml(frontmatterYaml) as Record<string, unknown>;
|
||||
|
||||
// Build Claude agent config from frontmatter
|
||||
// Note: Claude tools/disallowedTools/skills can be comma-separated strings like 'Glob, Grep, Read'
|
||||
const claudeAgent: ClaudeAgentConfig = {
|
||||
name: String(frontmatter['name'] || ''),
|
||||
description: String(frontmatter['description'] || ''),
|
||||
tools: parseStringOrArray(frontmatter['tools']),
|
||||
disallowedTools: parseStringOrArray(frontmatter['disallowedTools']),
|
||||
model: frontmatter['model'] as string | undefined,
|
||||
permissionMode: frontmatter['permissionMode'] as string | undefined,
|
||||
skills: parseStringOrArray(frontmatter['skills']),
|
||||
hooks: frontmatter['hooks'],
|
||||
color: frontmatter['color'] as string | undefined,
|
||||
systemPrompt: body.trim(),
|
||||
};
|
||||
|
||||
// Convert to Qwen format
|
||||
const qwenAgent = convertClaudeAgentConfig(claudeAgent);
|
||||
|
||||
// Build new frontmatter (excluding systemPrompt as it goes in body)
|
||||
const newFrontmatter: Record<string, unknown> = {};
|
||||
for (const [key, value] of Object.entries(qwenAgent)) {
|
||||
if (key !== 'systemPrompt' && value !== undefined) {
|
||||
newFrontmatter[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// Write converted content back
|
||||
const newYaml = stringifyYaml(newFrontmatter);
|
||||
const systemPrompt = (qwenAgent['systemPrompt'] as string) || body.trim();
|
||||
const newContent = `---
|
||||
${newYaml}
|
||||
---
|
||||
|
||||
${systemPrompt}
|
||||
`;
|
||||
|
||||
await fs.promises.writeFile(filePath, newContent, 'utf-8');
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`[Claude Converter] Failed to convert agent file ${filePath}: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Claude plugin config to Qwen Code format.
|
||||
* @param claudeConfig Claude plugin configuration
|
||||
* @returns Qwen ExtensionConfig
|
||||
*/
|
||||
export function convertClaudeToQwenConfig(
|
||||
claudeConfig: ClaudePluginConfig,
|
||||
): ExtensionConfig {
|
||||
// Validate required fields
|
||||
if (!claudeConfig.name || !claudeConfig.version) {
|
||||
throw new Error('Claude plugin config must have name and version fields');
|
||||
}
|
||||
|
||||
// Parse MCP servers
|
||||
let mcpServers: Record<string, MCPServerConfig> | undefined;
|
||||
if (claudeConfig.mcpServers) {
|
||||
if (typeof claudeConfig.mcpServers === 'string') {
|
||||
// TODO: Load from file path
|
||||
console.warn(
|
||||
`[Claude Converter] MCP servers path not yet supported: ${claudeConfig.mcpServers}`,
|
||||
);
|
||||
} else {
|
||||
mcpServers = claudeConfig.mcpServers;
|
||||
}
|
||||
}
|
||||
|
||||
// Warn about unsupported fields
|
||||
if (claudeConfig.hooks) {
|
||||
console.warn(
|
||||
`[Claude Converter] Hooks are not yet supported in ${claudeConfig.name}`,
|
||||
);
|
||||
}
|
||||
if (claudeConfig.outputStyles) {
|
||||
console.warn(
|
||||
`[Claude Converter] Output styles are not yet supported in ${claudeConfig.name}`,
|
||||
);
|
||||
}
|
||||
if (claudeConfig.lspServers) {
|
||||
console.warn(
|
||||
`[Claude Converter] LSP servers are not yet supported in ${claudeConfig.name}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Direct field mapping - commands, skills, agents will be collected as folders
|
||||
return {
|
||||
name: claudeConfig.name,
|
||||
version: claudeConfig.version,
|
||||
mcpServers,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a complete Claude plugin package to Qwen Code format.
|
||||
* Creates a new temporary directory with:
|
||||
* 1. Converted qwen-extension.json
|
||||
* 2. Commands, skills, and agents collected to respective folders
|
||||
* 3. MCP servers resolved from JSON files if needed
|
||||
* 4. All other files preserved
|
||||
*/
|
||||
export async function convertClaudePluginPackage(
|
||||
extensionDir: string,
|
||||
pluginName: string,
|
||||
): Promise<{ config: ExtensionConfig; convertedDir: string }> {
|
||||
// Step 1: Load marketplace.json
|
||||
const marketplaceJsonPath = path.join(
|
||||
extensionDir,
|
||||
'.claude-plugin',
|
||||
'marketplace.json',
|
||||
);
|
||||
if (!fs.existsSync(marketplaceJsonPath)) {
|
||||
throw new Error(
|
||||
`Marketplace configuration not found at ${marketplaceJsonPath}`,
|
||||
);
|
||||
}
|
||||
|
||||
const marketplaceContent = fs.readFileSync(marketplaceJsonPath, 'utf-8');
|
||||
const marketplaceConfig: ClaudeMarketplaceConfig =
|
||||
JSON.parse(marketplaceContent);
|
||||
|
||||
// Find the target plugin in marketplace
|
||||
const marketplacePlugin = marketplaceConfig.plugins.find(
|
||||
(p) => p.name === pluginName,
|
||||
);
|
||||
if (!marketplacePlugin) {
|
||||
throw new Error(`Plugin ${pluginName} not found in marketplace.json`);
|
||||
}
|
||||
|
||||
// Step 2: Resolve plugin source directory based on source field
|
||||
const pluginDir = path.join(
|
||||
extensionDir,
|
||||
`plugin${createHash('sha256').update(`${extensionDir}/${pluginName}`).digest('hex')}`,
|
||||
);
|
||||
await fs.promises.mkdir(pluginDir, { recursive: true });
|
||||
|
||||
const pluginSource = await resolvePluginSource(
|
||||
marketplacePlugin,
|
||||
extensionDir,
|
||||
pluginDir,
|
||||
);
|
||||
|
||||
if (!fs.existsSync(pluginSource)) {
|
||||
throw new Error(`Plugin source directory not found: ${pluginSource}`);
|
||||
}
|
||||
|
||||
// Step 3: Load and merge plugin.json if exists (based on strict mode)
|
||||
const strict = marketplacePlugin.strict ?? true;
|
||||
let mergedConfig: ClaudePluginConfig;
|
||||
|
||||
if (strict) {
|
||||
const pluginJsonPath = path.join(
|
||||
pluginSource,
|
||||
'.claude-plugin',
|
||||
'plugin.json',
|
||||
);
|
||||
if (!fs.existsSync(pluginJsonPath)) {
|
||||
throw new Error(`Strict mode requires plugin.json at ${pluginJsonPath}`);
|
||||
}
|
||||
const pluginContent = fs.readFileSync(pluginJsonPath, 'utf-8');
|
||||
const pluginConfig: ClaudePluginConfig = JSON.parse(pluginContent);
|
||||
mergedConfig = mergeClaudeConfigs(marketplacePlugin, pluginConfig);
|
||||
} else {
|
||||
mergedConfig = marketplacePlugin as ClaudePluginConfig;
|
||||
}
|
||||
|
||||
// Step 4: Resolve MCP servers from JSON files if needed
|
||||
if (mergedConfig.mcpServers && typeof mergedConfig.mcpServers === 'string') {
|
||||
const mcpServersPath = path.isAbsolute(mergedConfig.mcpServers)
|
||||
? mergedConfig.mcpServers
|
||||
: path.join(pluginSource, mergedConfig.mcpServers);
|
||||
|
||||
if (fs.existsSync(mcpServersPath)) {
|
||||
try {
|
||||
const mcpContent = fs.readFileSync(mcpServersPath, 'utf-8');
|
||||
mergedConfig.mcpServers = JSON.parse(mcpContent) as Record<
|
||||
string,
|
||||
MCPServerConfig
|
||||
>;
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`Failed to parse MCP servers file ${mcpServersPath}: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 5: Create temporary directory for converted extension
|
||||
const tmpDir = await ExtensionStorage.createTmpDir();
|
||||
|
||||
try {
|
||||
// Step 6: Copy plugin files to temporary directory
|
||||
await copyDirectory(pluginSource, tmpDir);
|
||||
|
||||
// Step 7: Collect commands to commands folder
|
||||
if (mergedConfig.commands) {
|
||||
const commandsDestDir = path.join(tmpDir, 'commands');
|
||||
await collectResources(
|
||||
mergedConfig.commands,
|
||||
pluginSource,
|
||||
commandsDestDir,
|
||||
);
|
||||
}
|
||||
|
||||
// Step 8: Collect skills to skills folder
|
||||
if (mergedConfig.skills) {
|
||||
const skillsDestDir = path.join(tmpDir, 'skills');
|
||||
await collectResources(mergedConfig.skills, pluginSource, skillsDestDir);
|
||||
}
|
||||
|
||||
// Step 9: Collect agents to agents folder
|
||||
const agentsDestDir = path.join(tmpDir, 'agents');
|
||||
if (mergedConfig.agents) {
|
||||
await collectResources(mergedConfig.agents, pluginSource, agentsDestDir);
|
||||
}
|
||||
// Step 9.1: Convert collected agent files from Claude format to Qwen format
|
||||
await convertAgentFiles(agentsDestDir);
|
||||
|
||||
// Step 10: Convert to Qwen format config
|
||||
const qwenConfig = convertClaudeToQwenConfig(mergedConfig);
|
||||
|
||||
// Step 11: Write qwen-extension.json
|
||||
const qwenConfigPath = path.join(tmpDir, 'qwen-extension.json');
|
||||
fs.writeFileSync(
|
||||
qwenConfigPath,
|
||||
JSON.stringify(qwenConfig, null, 2),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
return {
|
||||
config: qwenConfig,
|
||||
convertedDir: tmpDir,
|
||||
};
|
||||
} catch (error) {
|
||||
// Clean up temporary directory on error
|
||||
try {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects resources (commands, skills, agents) to a destination folder.
|
||||
* If a resource is already in the destination folder, it will be skipped.
|
||||
* @param resourcePaths String or array of resource paths
|
||||
* @param pluginRoot Root directory of the plugin
|
||||
* @param destDir Destination directory for collected resources
|
||||
*/
|
||||
async function collectResources(
|
||||
resourcePaths: string | string[],
|
||||
pluginRoot: string,
|
||||
destDir: string,
|
||||
): Promise<void> {
|
||||
const paths = Array.isArray(resourcePaths) ? resourcePaths : [resourcePaths];
|
||||
|
||||
// Create destination directory
|
||||
if (!fs.existsSync(destDir)) {
|
||||
fs.mkdirSync(destDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Get the destination folder name (e.g., 'commands', 'skills', 'agents')
|
||||
const destFolderName = path.basename(destDir);
|
||||
|
||||
for (const resourcePath of paths) {
|
||||
const resolvedPath = path.isAbsolute(resourcePath)
|
||||
? resourcePath
|
||||
: path.join(pluginRoot, resourcePath);
|
||||
|
||||
if (!fs.existsSync(resolvedPath)) {
|
||||
console.warn(`Resource path not found: ${resolvedPath}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const stat = fs.statSync(resolvedPath);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
// If it's a directory, check if it's already the destination folder
|
||||
const dirName = path.basename(resolvedPath);
|
||||
const parentDir = path.dirname(resolvedPath);
|
||||
|
||||
// If the directory is already named as the destination folder (e.g., 'commands')
|
||||
// and it's at the plugin root level, skip it
|
||||
if (dirName === destFolderName && parentDir === pluginRoot) {
|
||||
console.log(
|
||||
`Skipping ${resolvedPath} as it's already in the correct location`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Copy all files from the directory
|
||||
const files = await glob('**/*', {
|
||||
cwd: resolvedPath,
|
||||
nodir: true,
|
||||
dot: false,
|
||||
});
|
||||
|
||||
for (const file of files) {
|
||||
const srcFile = path.join(resolvedPath, file);
|
||||
const destFile = path.join(destDir, file);
|
||||
|
||||
// Ensure parent directory exists
|
||||
const destFileDir = path.dirname(destFile);
|
||||
if (!fs.existsSync(destFileDir)) {
|
||||
fs.mkdirSync(destFileDir, { recursive: true });
|
||||
}
|
||||
|
||||
fs.copyFileSync(srcFile, destFile);
|
||||
}
|
||||
} else {
|
||||
// If it's a file, check if it's already in the destination folder
|
||||
const relativePath = path.relative(pluginRoot, resolvedPath);
|
||||
|
||||
// Check if the file path starts with the destination folder name
|
||||
// e.g., 'commands/test1.md' or 'commands/me/test.md' should be skipped
|
||||
const segments = relativePath.split(path.sep);
|
||||
if (segments.length > 0 && segments[0] === destFolderName) {
|
||||
console.log(
|
||||
`Skipping ${resolvedPath} as it's already in ${destFolderName}/`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Copy the file to destination
|
||||
const fileName = path.basename(resolvedPath);
|
||||
const destFile = path.join(destDir, fileName);
|
||||
fs.copyFileSync(resolvedPath, destFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges marketplace plugin config with the actual plugin.json config.
|
||||
* Marketplace config takes precedence for conflicting fields.
|
||||
* @param marketplacePlugin Marketplace plugin definition
|
||||
* @param pluginConfig Actual plugin.json config (optional if strict=false)
|
||||
* @returns Merged Claude plugin config
|
||||
*/
|
||||
export function mergeClaudeConfigs(
|
||||
marketplacePlugin: ClaudeMarketplacePluginConfig,
|
||||
pluginConfig?: ClaudePluginConfig,
|
||||
): ClaudePluginConfig {
|
||||
if (!pluginConfig && marketplacePlugin.strict !== false) {
|
||||
throw new Error(
|
||||
`Plugin ${marketplacePlugin.name} requires plugin.json (strict mode)`,
|
||||
);
|
||||
}
|
||||
|
||||
// Start with plugin.json config (if exists)
|
||||
const merged: ClaudePluginConfig = pluginConfig
|
||||
? { ...pluginConfig }
|
||||
: {
|
||||
name: marketplacePlugin.name,
|
||||
version: '1.0.0', // Default version if not in marketplace
|
||||
};
|
||||
|
||||
// Overlay marketplace config (takes precedence)
|
||||
if (marketplacePlugin.name) merged.name = marketplacePlugin.name;
|
||||
if (marketplacePlugin.version) merged.version = marketplacePlugin.version;
|
||||
if (marketplacePlugin.description)
|
||||
merged.description = marketplacePlugin.description;
|
||||
if (marketplacePlugin.author) merged.author = marketplacePlugin.author;
|
||||
if (marketplacePlugin.homepage) merged.homepage = marketplacePlugin.homepage;
|
||||
if (marketplacePlugin.repository)
|
||||
merged.repository = marketplacePlugin.repository;
|
||||
if (marketplacePlugin.license) merged.license = marketplacePlugin.license;
|
||||
if (marketplacePlugin.keywords) merged.keywords = marketplacePlugin.keywords;
|
||||
if (marketplacePlugin.commands) merged.commands = marketplacePlugin.commands;
|
||||
if (marketplacePlugin.agents) merged.agents = marketplacePlugin.agents;
|
||||
if (marketplacePlugin.skills) merged.skills = marketplacePlugin.skills;
|
||||
if (marketplacePlugin.hooks) merged.hooks = marketplacePlugin.hooks;
|
||||
if (marketplacePlugin.mcpServers)
|
||||
merged.mcpServers = marketplacePlugin.mcpServers;
|
||||
if (marketplacePlugin.outputStyles)
|
||||
merged.outputStyles = marketplacePlugin.outputStyles;
|
||||
if (marketplacePlugin.lspServers)
|
||||
merged.lspServers = marketplacePlugin.lspServers;
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a config object is in Claude plugin format.
|
||||
* @param config Configuration object to check
|
||||
* @returns true if config appears to be Claude format
|
||||
*/
|
||||
export function isClaudePluginConfig(
|
||||
extensionDir: string,
|
||||
marketplace: { marketplaceSource: string; pluginName: string },
|
||||
) {
|
||||
const marketplaceConfigFilePath = path.join(
|
||||
extensionDir,
|
||||
'.claude-plugin/marketplace.json',
|
||||
);
|
||||
if (!fs.existsSync(marketplaceConfigFilePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const marketplaceConfigContent = fs.readFileSync(
|
||||
marketplaceConfigFilePath,
|
||||
'utf-8',
|
||||
);
|
||||
const marketplaceConfig = JSON.parse(marketplaceConfigContent);
|
||||
|
||||
if (typeof marketplaceConfig !== 'object' || marketplaceConfig === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const marketplaceConfigObj = marketplaceConfig as Record<string, unknown>;
|
||||
|
||||
// Must have name and owner
|
||||
if (
|
||||
typeof marketplaceConfigObj['name'] !== 'string' ||
|
||||
typeof marketplaceConfigObj['owner'] !== 'object'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!Array.isArray(marketplaceConfigObj['plugins'])) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const marketplacePluginObj = marketplaceConfigObj['plugins'].find(
|
||||
(plugin: ClaudeMarketplacePluginConfig) =>
|
||||
plugin.name === marketplace.pluginName,
|
||||
);
|
||||
|
||||
if (!marketplacePluginObj) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve plugin source from marketplace plugin configuration.
|
||||
* Returns the absolute path to the plugin source directory.
|
||||
*/
|
||||
async function resolvePluginSource(
|
||||
pluginConfig: ClaudeMarketplacePluginConfig,
|
||||
marketplaceDir: string,
|
||||
pluginDir: string,
|
||||
): Promise<string> {
|
||||
const source = pluginConfig.source;
|
||||
|
||||
// Handle string source (relative path or URL)
|
||||
if (typeof source === 'string') {
|
||||
// Check if it's a URL
|
||||
if (source.startsWith('http://') || source.startsWith('https://')) {
|
||||
// Download from URL
|
||||
const installMetadata: ExtensionInstallMetadata = {
|
||||
source,
|
||||
type: 'git',
|
||||
};
|
||||
try {
|
||||
await downloadFromGitHubRelease(installMetadata, pluginDir);
|
||||
} catch {
|
||||
await cloneFromGit(installMetadata, pluginDir);
|
||||
}
|
||||
return pluginDir;
|
||||
}
|
||||
|
||||
// Relative path within marketplace
|
||||
const pluginRoot = marketplaceDir;
|
||||
const sourcePath = path.join(pluginRoot, source);
|
||||
|
||||
if (!fs.existsSync(sourcePath)) {
|
||||
throw new Error(`Plugin source not found at ${sourcePath}`);
|
||||
}
|
||||
|
||||
// Copy to plugin directory
|
||||
await fs.promises.cp(sourcePath, pluginDir, { recursive: true });
|
||||
return pluginDir;
|
||||
}
|
||||
|
||||
// Handle object source (github or url)
|
||||
if (source.source === 'github') {
|
||||
const installMetadata: ExtensionInstallMetadata = {
|
||||
source: `https://github.com/${source.repo}`,
|
||||
type: 'git',
|
||||
};
|
||||
try {
|
||||
await downloadFromGitHubRelease(installMetadata, pluginDir);
|
||||
} catch {
|
||||
await cloneFromGit(installMetadata, pluginDir);
|
||||
}
|
||||
return pluginDir;
|
||||
}
|
||||
|
||||
if (source.source === 'url') {
|
||||
const installMetadata: ExtensionInstallMetadata = {
|
||||
source: source.url,
|
||||
type: 'git',
|
||||
};
|
||||
try {
|
||||
await downloadFromGitHubRelease(installMetadata, pluginDir);
|
||||
} catch {
|
||||
await cloneFromGit(installMetadata, pluginDir);
|
||||
}
|
||||
return pluginDir;
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported plugin source type: ${JSON.stringify(source)}`);
|
||||
}
|
||||
931
packages/core/src/extension/extensionManager.test.ts
Normal file
931
packages/core/src/extension/extensionManager.test.ts
Normal file
@@ -0,0 +1,931 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
INSTALL_METADATA_FILENAME,
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
} from './variables.js';
|
||||
import { QWEN_DIR } from '../config/storage.js';
|
||||
import {
|
||||
ExtensionManager,
|
||||
SettingScope,
|
||||
type ExtensionManagerOptions,
|
||||
validateName,
|
||||
getExtensionId,
|
||||
hashValue,
|
||||
parseInstallSource,
|
||||
type ExtensionConfig,
|
||||
} from './extensionManager.js';
|
||||
import type { MCPServerConfig, ExtensionInstallMetadata } from '../index.js';
|
||||
|
||||
const mockGit = {
|
||||
clone: vi.fn(),
|
||||
getRemotes: vi.fn(),
|
||||
fetch: vi.fn(),
|
||||
checkout: vi.fn(),
|
||||
listRemote: vi.fn(),
|
||||
revparse: vi.fn(),
|
||||
path: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mock('simple-git', () => ({
|
||||
simpleGit: vi.fn((path: string) => {
|
||||
mockGit.path.mockReturnValue(path);
|
||||
return mockGit;
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('./github.js', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('./github.js')>();
|
||||
return {
|
||||
...actual,
|
||||
downloadFromGitHubRelease: vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
|
||||
};
|
||||
});
|
||||
|
||||
const mockHomedir = vi.hoisted(() => vi.fn());
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const mockedOs = await importOriginal<typeof os>();
|
||||
return {
|
||||
...mockedOs,
|
||||
homedir: mockHomedir,
|
||||
};
|
||||
});
|
||||
|
||||
const mockLogExtensionEnable = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionDisable = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionUpdateEvent = vi.hoisted(() => vi.fn());
|
||||
vi.mock('../telemetry/loggers.js', () => ({
|
||||
logExtensionEnable: mockLogExtensionEnable,
|
||||
logExtensionUpdateEvent: mockLogExtensionUpdateEvent,
|
||||
}));
|
||||
|
||||
vi.mock('../index.js', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../index.js')>();
|
||||
return {
|
||||
...actual,
|
||||
logExtensionEnable: mockLogExtensionEnable,
|
||||
logExtensionInstallEvent: mockLogExtensionInstallEvent,
|
||||
logExtensionUninstall: mockLogExtensionUninstall,
|
||||
logExtensionDisable: mockLogExtensionDisable,
|
||||
};
|
||||
});
|
||||
|
||||
const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
|
||||
|
||||
function createExtension({
|
||||
extensionsDir = 'extensions-dir',
|
||||
name = 'my-extension',
|
||||
version = '1.0.0',
|
||||
addContextFile = false,
|
||||
contextFileName = undefined as string | undefined,
|
||||
mcpServers = {} as Record<string, MCPServerConfig>,
|
||||
installMetadata = undefined as ExtensionInstallMetadata | undefined,
|
||||
} = {}): string {
|
||||
const extDir = path.join(extensionsDir, name);
|
||||
fs.mkdirSync(extDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name, version, contextFileName, mcpServers }),
|
||||
);
|
||||
|
||||
if (addContextFile) {
|
||||
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
|
||||
}
|
||||
|
||||
if (contextFileName) {
|
||||
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
|
||||
}
|
||||
|
||||
if (installMetadata) {
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, INSTALL_METADATA_FILENAME),
|
||||
JSON.stringify(installMetadata),
|
||||
);
|
||||
}
|
||||
return extDir;
|
||||
}
|
||||
|
||||
describe('extension tests', () => {
|
||||
let tempHomeDir: string;
|
||||
let tempWorkspaceDir: string;
|
||||
let userExtensionsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'qwen-code-test-home-'),
|
||||
);
|
||||
tempWorkspaceDir = fs.mkdtempSync(
|
||||
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
|
||||
);
|
||||
userExtensionsDir = path.join(tempHomeDir, EXTENSIONS_DIRECTORY_NAME);
|
||||
fs.mkdirSync(userExtensionsDir, { recursive: true });
|
||||
|
||||
mockHomedir.mockReturnValue(tempHomeDir);
|
||||
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
|
||||
Object.values(mockGit).forEach((fn) => fn.mockReset());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
function createExtensionManager(
|
||||
options: Partial<ExtensionManagerOptions> = {},
|
||||
): ExtensionManager {
|
||||
return new ExtensionManager({
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
isWorkspaceTrusted: true,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
|
||||
describe('loadExtension', () => {
|
||||
it('should include extension path in loaded extension', async () => {
|
||||
const extensionDir = path.join(userExtensionsDir, 'test-extension');
|
||||
fs.mkdirSync(extensionDir, { recursive: true });
|
||||
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
expect(extensions[0].path).toBe(extensionDir);
|
||||
expect(extensions[0].config.name).toBe('test-extension');
|
||||
});
|
||||
|
||||
it('should load context file path when QWEN.md is present', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
addContextFile: true,
|
||||
});
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext2',
|
||||
version: '2.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(2);
|
||||
const ext1 = extensions.find((e) => e.config.name === 'ext1');
|
||||
const ext2 = extensions.find((e) => e.config.name === 'ext2');
|
||||
expect(ext1?.contextFiles).toEqual([
|
||||
path.join(userExtensionsDir, 'ext1', 'QWEN.md'),
|
||||
]);
|
||||
expect(ext2?.contextFiles).toEqual([]);
|
||||
});
|
||||
|
||||
it('should load context file path from the extension config', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
addContextFile: false,
|
||||
contextFileName: 'my-context-file.md',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
const ext1 = extensions.find((e) => e.config.name === 'ext1');
|
||||
expect(ext1?.contextFiles).toEqual([
|
||||
path.join(userExtensionsDir, 'ext1', 'my-context-file.md'),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should skip extensions with invalid JSON and log a warning', async () => {
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Good extension
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'good-ext',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
// Bad extension
|
||||
const badExtDir = path.join(userExtensionsDir, 'bad-ext');
|
||||
fs.mkdirSync(badExtDir);
|
||||
const badConfigPath = path.join(badExtDir, EXTENSIONS_CONFIG_FILENAME);
|
||||
fs.writeFileSync(badConfigPath, '{ "name": "bad-ext"'); // Malformed
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
expect(extensions[0].config.name).toBe('good-ext');
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining(`Warning: Skipping extension in ${badExtDir}`),
|
||||
);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should skip extensions with missing name and log a warning', async () => {
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Good extension
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'good-ext',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
// Bad extension
|
||||
const badExtDir = path.join(userExtensionsDir, 'bad-ext-no-name');
|
||||
fs.mkdirSync(badExtDir);
|
||||
const badConfigPath = path.join(badExtDir, EXTENSIONS_CONFIG_FILENAME);
|
||||
fs.writeFileSync(badConfigPath, JSON.stringify({ version: '1.0.0' }));
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
expect(extensions[0].config.name).toBe('good-ext');
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining(`Warning: Skipping extension in ${badExtDir}`),
|
||||
);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should filter trust out of mcp servers', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
trust: true,
|
||||
} as MCPServerConfig,
|
||||
},
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
// trust should be filtered from extension.mcpServers (not config.mcpServers)
|
||||
expect(extensions[0].mcpServers?.['test-server']?.trust).toBeUndefined();
|
||||
// config.mcpServers should still have trust (original config)
|
||||
expect(extensions[0].config.mcpServers?.['test-server']?.trust).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('enableExtension / disableExtension', () => {
|
||||
it('should disable an extension at the user scope', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'my-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
await manager.disableExtension('my-extension', SettingScope.User);
|
||||
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should disable an extension at the workspace scope', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'my-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
await manager.disableExtension(
|
||||
'my-extension',
|
||||
SettingScope.Workspace,
|
||||
tempWorkspaceDir,
|
||||
);
|
||||
|
||||
expect(manager.isEnabled('my-extension', tempHomeDir)).toBe(true);
|
||||
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle disabling the same extension twice', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'my-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
await manager.disableExtension('my-extension', SettingScope.User);
|
||||
await manager.disableExtension('my-extension', SettingScope.User);
|
||||
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should throw an error if you request system scope', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'my-extension',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
await expect(
|
||||
manager.disableExtension('my-extension', SettingScope.System),
|
||||
).rejects.toThrow('System and SystemDefaults scopes are not supported.');
|
||||
});
|
||||
|
||||
it('should enable an extension at the user scope', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
await manager.disableExtension('ext1', SettingScope.User);
|
||||
expect(manager.isEnabled('ext1')).toBe(false);
|
||||
|
||||
await manager.enableExtension('ext1', SettingScope.User);
|
||||
expect(manager.isEnabled('ext1')).toBe(true);
|
||||
});
|
||||
|
||||
it('should enable an extension at the workspace scope', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
|
||||
await manager.disableExtension('ext1', SettingScope.Workspace);
|
||||
expect(manager.isEnabled('ext1', tempWorkspaceDir)).toBe(false);
|
||||
|
||||
await manager.enableExtension('ext1', SettingScope.Workspace);
|
||||
expect(manager.isEnabled('ext1', tempWorkspaceDir)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateExtensionOverrides', () => {
|
||||
it('should mark all extensions as active if no enabled extensions are provided', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext2',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(2);
|
||||
expect(extensions.every((e) => e.isActive)).toBe(true);
|
||||
});
|
||||
|
||||
it('should mark only the enabled extensions as active', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext2',
|
||||
version: '1.0.0',
|
||||
});
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext3',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager({
|
||||
enabledExtensionOverrides: ['ext1', 'ext3'],
|
||||
});
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions.find((e) => e.name === 'ext1')?.isActive).toBe(true);
|
||||
expect(extensions.find((e) => e.name === 'ext2')?.isActive).toBe(false);
|
||||
expect(extensions.find((e) => e.name === 'ext3')?.isActive).toBe(true);
|
||||
});
|
||||
|
||||
it('should mark all extensions as inactive when "none" is provided', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext2',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager({
|
||||
enabledExtensionOverrides: ['none'],
|
||||
});
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions.every((e) => !e.isActive)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle case-insensitivity', async () => {
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager({
|
||||
enabledExtensionOverrides: ['EXT1'],
|
||||
});
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions.find((e) => e.name === 'ext1')?.isActive).toBe(true);
|
||||
});
|
||||
|
||||
it('should log an error for unknown extensions', async () => {
|
||||
const consoleSpy = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'ext1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const manager = createExtensionManager({
|
||||
enabledExtensionOverrides: ['ext4'],
|
||||
});
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith('Extension not found: ext4');
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadExtensionConfig', () => {
|
||||
it('should resolve environment variables in extension configuration', async () => {
|
||||
process.env['TEST_API_KEY'] = 'test-api-key-123';
|
||||
process.env['TEST_DB_URL'] = 'postgresql://localhost:5432/testdb';
|
||||
|
||||
try {
|
||||
const extDir = path.join(userExtensionsDir, 'test-extension');
|
||||
fs.mkdirSync(extDir);
|
||||
|
||||
const extensionConfig = {
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
API_KEY: '$TEST_API_KEY',
|
||||
DATABASE_URL: '${TEST_DB_URL}',
|
||||
STATIC_VALUE: 'no-substitution',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
const extension = extensions[0];
|
||||
expect(extension.config.name).toBe('test-extension');
|
||||
expect(extension.config.mcpServers).toBeDefined();
|
||||
|
||||
const serverConfig = extension.config.mcpServers?.['test-server'];
|
||||
expect(serverConfig).toBeDefined();
|
||||
expect(serverConfig?.env).toBeDefined();
|
||||
expect(serverConfig?.env?.['API_KEY']).toBe('test-api-key-123');
|
||||
expect(serverConfig?.env?.['DATABASE_URL']).toBe(
|
||||
'postgresql://localhost:5432/testdb',
|
||||
);
|
||||
expect(serverConfig?.env?.['STATIC_VALUE']).toBe('no-substitution');
|
||||
} finally {
|
||||
delete process.env['TEST_API_KEY'];
|
||||
delete process.env['TEST_DB_URL'];
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle missing environment variables gracefully', async () => {
|
||||
const extDir = path.join(userExtensionsDir, 'test-extension');
|
||||
fs.mkdirSync(extDir);
|
||||
|
||||
const extensionConfig = {
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
MISSING_VAR: '$UNDEFINED_ENV_VAR',
|
||||
MISSING_VAR_BRACES: '${ALSO_UNDEFINED}',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
const manager = createExtensionManager();
|
||||
await manager.refreshCache();
|
||||
const extensions = manager.getLoadedExtensions();
|
||||
|
||||
expect(extensions).toHaveLength(1);
|
||||
const extension = extensions[0];
|
||||
const serverConfig = extension.config.mcpServers!['test-server'];
|
||||
expect(serverConfig.env).toBeDefined();
|
||||
expect(serverConfig.env!['MISSING_VAR']).toBe('$UNDEFINED_ENV_VAR');
|
||||
expect(serverConfig.env!['MISSING_VAR_BRACES']).toBe('${ALSO_UNDEFINED}');
|
||||
});
|
||||
describe('refreshTools and refreshMemory', () => {
|
||||
it('refreshTools should return early if config is not set', async () => {
|
||||
const manager = createExtensionManager();
|
||||
const extension = {
|
||||
name: 'test-ext',
|
||||
config: { name: 'test-ext', version: '1.0.0' },
|
||||
excludeTools: ['tool1'],
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} as any;
|
||||
|
||||
// Should not throw when config is undefined
|
||||
await expect(manager.refreshTools(extension)).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it('refreshTools should call setTools when extension has excludeTools', async () => {
|
||||
const mockSetTools = vi.fn().mockResolvedValue(undefined);
|
||||
const mockRefreshCache = vi.fn();
|
||||
const mockRestartMcpServers = vi.fn();
|
||||
const mockRefreshHierarchicalMemory = vi.fn();
|
||||
|
||||
const mockConfig = {
|
||||
getGeminiClient: () => ({
|
||||
isInitialized: () => true,
|
||||
setTools: mockSetTools,
|
||||
}),
|
||||
getToolRegistry: () => ({
|
||||
restartMcpServers: mockRestartMcpServers,
|
||||
}),
|
||||
getSkillManager: () => ({
|
||||
refreshCache: mockRefreshCache,
|
||||
}),
|
||||
getSubagentManager: () => ({
|
||||
refreshCache: mockRefreshCache,
|
||||
}),
|
||||
refreshHierarchicalMemory: mockRefreshHierarchicalMemory,
|
||||
};
|
||||
|
||||
const manager = createExtensionManager();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(manager as any).config = mockConfig;
|
||||
|
||||
const extension = {
|
||||
name: 'test-ext',
|
||||
config: { name: 'test-ext', version: '1.0.0' },
|
||||
excludeTools: ['tool1', 'tool2'],
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} as any;
|
||||
|
||||
await manager.refreshTools(extension);
|
||||
|
||||
expect(mockSetTools).toHaveBeenCalledOnce();
|
||||
});
|
||||
|
||||
it('refreshTools should not call setTools when extension has no excludeTools', async () => {
|
||||
const mockSetTools = vi.fn().mockResolvedValue(undefined);
|
||||
const mockRefreshCache = vi.fn();
|
||||
const mockRestartMcpServers = vi.fn();
|
||||
const mockRefreshHierarchicalMemory = vi.fn();
|
||||
|
||||
const mockConfig = {
|
||||
getGeminiClient: () => ({
|
||||
isInitialized: () => true,
|
||||
setTools: mockSetTools,
|
||||
}),
|
||||
getToolRegistry: () => ({
|
||||
restartMcpServers: mockRestartMcpServers,
|
||||
}),
|
||||
getSkillManager: () => ({
|
||||
refreshCache: mockRefreshCache,
|
||||
}),
|
||||
getSubagentManager: () => ({
|
||||
refreshCache: mockRefreshCache,
|
||||
}),
|
||||
refreshHierarchicalMemory: mockRefreshHierarchicalMemory,
|
||||
};
|
||||
|
||||
const manager = createExtensionManager();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(manager as any).config = mockConfig;
|
||||
|
||||
const extension = {
|
||||
name: 'test-ext',
|
||||
config: { name: 'test-ext', version: '1.0.0' },
|
||||
excludeTools: [],
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} as any;
|
||||
|
||||
await manager.refreshTools(extension);
|
||||
|
||||
expect(mockSetTools).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('refreshTools should not call setTools when geminiClient is not initialized', async () => {
|
||||
const mockSetTools = vi.fn().mockResolvedValue(undefined);
|
||||
const mockRefreshCache = vi.fn();
|
||||
const mockRestartMcpServers = vi.fn();
|
||||
const mockRefreshHierarchicalMemory = vi.fn();
|
||||
|
||||
const mockConfig = {
|
||||
getGeminiClient: () => ({
|
||||
isInitialized: () => false,
|
||||
setTools: mockSetTools,
|
||||
}),
|
||||
getToolRegistry: () => ({
|
||||
restartMcpServers: mockRestartMcpServers,
|
||||
}),
|
||||
getSkillManager: () => ({
|
||||
refreshCache: mockRefreshCache,
|
||||
}),
|
||||
getSubagentManager: () => ({
|
||||
refreshCache: mockRefreshCache,
|
||||
}),
|
||||
refreshHierarchicalMemory: mockRefreshHierarchicalMemory,
|
||||
};
|
||||
|
||||
const manager = createExtensionManager();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(manager as any).config = mockConfig;
|
||||
|
||||
const extension = {
|
||||
name: 'test-ext',
|
||||
config: { name: 'test-ext', version: '1.0.0' },
|
||||
excludeTools: ['tool1'],
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} as any;
|
||||
|
||||
await manager.refreshTools(extension);
|
||||
|
||||
expect(mockSetTools).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('refreshTools should always call refreshMemory', async () => {
|
||||
const mockRefreshCache = vi.fn();
|
||||
const mockRestartMcpServers = vi.fn();
|
||||
const mockRefreshHierarchicalMemory = vi.fn();
|
||||
|
||||
const mockConfig = {
|
||||
getGeminiClient: () => ({
|
||||
isInitialized: () => false,
|
||||
setTools: vi.fn(),
|
||||
}),
|
||||
getToolRegistry: () => ({
|
||||
restartMcpServers: mockRestartMcpServers,
|
||||
}),
|
||||
getSkillManager: () => ({
|
||||
refreshCache: mockRefreshCache,
|
||||
}),
|
||||
getSubagentManager: () => ({
|
||||
refreshCache: mockRefreshCache,
|
||||
}),
|
||||
refreshHierarchicalMemory: mockRefreshHierarchicalMemory,
|
||||
};
|
||||
|
||||
const manager = createExtensionManager();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(manager as any).config = mockConfig;
|
||||
|
||||
const extension = {
|
||||
name: 'test-ext',
|
||||
config: { name: 'test-ext', version: '1.0.0' },
|
||||
excludeTools: [],
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} as any;
|
||||
|
||||
await manager.refreshTools(extension);
|
||||
|
||||
// refreshMemory should be called which includes these
|
||||
expect(mockRestartMcpServers).toHaveBeenCalledOnce();
|
||||
expect(mockRefreshCache).toHaveBeenCalledTimes(2); // skillManager and subagentManager
|
||||
expect(mockRefreshHierarchicalMemory).toHaveBeenCalledOnce();
|
||||
});
|
||||
|
||||
it('refreshMemory should return early if config is not set', async () => {
|
||||
const manager = createExtensionManager();
|
||||
|
||||
// Should not throw when config is undefined
|
||||
await expect(manager.refreshMemory()).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it('refreshMemory should call all refresh methods', async () => {
|
||||
const mockSkillRefreshCache = vi.fn();
|
||||
const mockSubagentRefreshCache = vi.fn();
|
||||
const mockRestartMcpServers = vi.fn();
|
||||
const mockRefreshHierarchicalMemory = vi.fn();
|
||||
|
||||
const mockConfig = {
|
||||
getToolRegistry: () => ({
|
||||
restartMcpServers: mockRestartMcpServers,
|
||||
}),
|
||||
getSkillManager: () => ({
|
||||
refreshCache: mockSkillRefreshCache,
|
||||
}),
|
||||
getSubagentManager: () => ({
|
||||
refreshCache: mockSubagentRefreshCache,
|
||||
}),
|
||||
refreshHierarchicalMemory: mockRefreshHierarchicalMemory,
|
||||
};
|
||||
|
||||
const manager = createExtensionManager();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(manager as any).config = mockConfig;
|
||||
|
||||
await manager.refreshMemory();
|
||||
|
||||
expect(mockRestartMcpServers).toHaveBeenCalledOnce();
|
||||
expect(mockSkillRefreshCache).toHaveBeenCalledOnce();
|
||||
expect(mockSubagentRefreshCache).toHaveBeenCalledOnce();
|
||||
expect(mockRefreshHierarchicalMemory).toHaveBeenCalledOnce();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('extensionManager utility functions', () => {
|
||||
describe('validateName', () => {
|
||||
it('should accept valid extension names', () => {
|
||||
expect(() => validateName('my-extension')).not.toThrow();
|
||||
expect(() => validateName('Extension123')).not.toThrow();
|
||||
expect(() => validateName('test-ext-1')).not.toThrow();
|
||||
expect(() => validateName('UPPERCASE')).not.toThrow();
|
||||
});
|
||||
|
||||
it('should reject names with invalid characters', () => {
|
||||
expect(() => validateName('my_extension')).toThrow(
|
||||
'Invalid extension name',
|
||||
);
|
||||
expect(() => validateName('my.extension')).toThrow(
|
||||
'Invalid extension name',
|
||||
);
|
||||
expect(() => validateName('my extension')).toThrow(
|
||||
'Invalid extension name',
|
||||
);
|
||||
expect(() => validateName('my@ext')).toThrow('Invalid extension name');
|
||||
});
|
||||
|
||||
it('should reject empty names', () => {
|
||||
expect(() => validateName('')).toThrow('Invalid extension name');
|
||||
});
|
||||
});
|
||||
|
||||
describe('hashValue', () => {
|
||||
it('should generate consistent hash for same input', () => {
|
||||
const hash1 = hashValue('test-input');
|
||||
const hash2 = hashValue('test-input');
|
||||
expect(hash1).toBe(hash2);
|
||||
});
|
||||
|
||||
it('should generate different hashes for different inputs', () => {
|
||||
const hash1 = hashValue('input-1');
|
||||
const hash2 = hashValue('input-2');
|
||||
expect(hash1).not.toBe(hash2);
|
||||
});
|
||||
|
||||
it('should generate a valid SHA256 hash', () => {
|
||||
const hash = hashValue('test');
|
||||
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getExtensionId', () => {
|
||||
it('should use hashed name when no install metadata', () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const id = getExtensionId(config);
|
||||
expect(id).toBe(hashValue('test-ext'));
|
||||
});
|
||||
|
||||
it('should use hashed source for local install', () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const metadata = { type: 'local' as const, source: '/path/to/ext' };
|
||||
const id = getExtensionId(config, metadata);
|
||||
expect(id).toBe(hashValue('/path/to/ext'));
|
||||
});
|
||||
|
||||
it('should use GitHub URL for git install', () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const metadata = {
|
||||
type: 'git' as const,
|
||||
source: 'https://github.com/owner/repo',
|
||||
};
|
||||
const id = getExtensionId(config, metadata);
|
||||
expect(id).toBe(hashValue('https://github.com/owner/repo'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseInstallSource', () => {
|
||||
it('should parse HTTPS URL as git type', async () => {
|
||||
const result = await parseInstallSource(
|
||||
'https://github.com/owner/repo',
|
||||
);
|
||||
expect(result.type).toBe('git');
|
||||
expect(result.source).toBe('https://github.com/owner/repo');
|
||||
});
|
||||
|
||||
it('should parse HTTP URL as git type', async () => {
|
||||
const result = await parseInstallSource('http://example.com/repo');
|
||||
expect(result.type).toBe('git');
|
||||
});
|
||||
|
||||
it('should parse git@ URL as git type', async () => {
|
||||
const result = await parseInstallSource(
|
||||
'git@github.com:owner/repo.git',
|
||||
);
|
||||
expect(result.type).toBe('git');
|
||||
});
|
||||
|
||||
it('should parse sso:// URL as git type', async () => {
|
||||
const result = await parseInstallSource('sso://some/path');
|
||||
expect(result.type).toBe('git');
|
||||
});
|
||||
|
||||
it('should parse marketplace URL correctly', async () => {
|
||||
const result = await parseInstallSource(
|
||||
'https://example.com/marketplace:plugin-name',
|
||||
);
|
||||
expect(result.type).toBe('marketplace');
|
||||
expect(result.marketplace?.pluginName).toBe('plugin-name');
|
||||
});
|
||||
|
||||
it('should throw for non-existent local path', async () => {
|
||||
await expect(
|
||||
parseInstallSource('/nonexistent/path/to/extension'),
|
||||
).rejects.toThrow('Install source not found');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
1335
packages/core/src/extension/extensionManager.ts
Normal file
1335
packages/core/src/extension/extensionManager.ts
Normal file
File diff suppressed because it is too large
Load Diff
730
packages/core/src/extension/extensionSettings.test.ts
Normal file
730
packages/core/src/extension/extensionSettings.test.ts
Normal file
@@ -0,0 +1,730 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import {
|
||||
getEnvContents,
|
||||
maybePromptForSettings,
|
||||
promptForSetting,
|
||||
type ExtensionSetting,
|
||||
updateSetting,
|
||||
ExtensionSettingScope,
|
||||
getScopedEnvContents,
|
||||
} from './extensionSettings.js';
|
||||
import type { ExtensionConfig } from './extensionManager.js';
|
||||
import { ExtensionStorage } from './storage.js';
|
||||
import prompts from 'prompts';
|
||||
import * as fsPromises from 'node:fs/promises';
|
||||
import * as fs from 'node:fs';
|
||||
import { KeychainTokenStorage } from '../mcp/token-storage/keychain-token-storage.js';
|
||||
import { EXTENSION_SETTINGS_FILENAME } from './variables.js';
|
||||
|
||||
vi.mock('prompts');
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const mockedOs = await importOriginal<typeof os>();
|
||||
return {
|
||||
...mockedOs,
|
||||
homedir: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock(
|
||||
'../mcp/token-storage/keychain-token-storage.js',
|
||||
async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<
|
||||
typeof import('../mcp/token-storage/keychain-token-storage.js')
|
||||
>();
|
||||
return {
|
||||
...actual,
|
||||
KeychainTokenStorage: vi.fn(),
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
describe('extensionSettings', () => {
|
||||
let tempHomeDir: string;
|
||||
let tempWorkspaceDir: string;
|
||||
let extensionDir: string;
|
||||
let mockKeychainData: Record<string, Record<string, string>>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockKeychainData = {};
|
||||
vi.mocked(KeychainTokenStorage).mockImplementation(
|
||||
(serviceName: string) => {
|
||||
if (!mockKeychainData[serviceName]) {
|
||||
mockKeychainData[serviceName] = {};
|
||||
}
|
||||
const keychainData = mockKeychainData[serviceName];
|
||||
return {
|
||||
getSecret: vi
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
async (key: string) => keychainData[key] || null,
|
||||
),
|
||||
setSecret: vi
|
||||
.fn()
|
||||
.mockImplementation(async (key: string, value: string) => {
|
||||
keychainData[key] = value;
|
||||
}),
|
||||
deleteSecret: vi.fn().mockImplementation(async (key: string) => {
|
||||
delete keychainData[key];
|
||||
}),
|
||||
listSecrets: vi
|
||||
.fn()
|
||||
.mockImplementation(async () => Object.keys(keychainData)),
|
||||
isAvailable: vi.fn().mockResolvedValue(true),
|
||||
} as unknown as KeychainTokenStorage;
|
||||
},
|
||||
);
|
||||
tempHomeDir = os.tmpdir() + path.sep + `gemini-cli-test-home-${Date.now()}`;
|
||||
tempWorkspaceDir = path.join(
|
||||
os.tmpdir(),
|
||||
`gemini-cli-test-workspace-${Date.now()}`,
|
||||
);
|
||||
extensionDir = path.join(tempHomeDir, '.gemini', 'extensions', 'test-ext');
|
||||
// Spy and mock the method, but also create the directory so we can write to it.
|
||||
vi.spyOn(ExtensionStorage.prototype, 'getExtensionDir').mockReturnValue(
|
||||
extensionDir,
|
||||
);
|
||||
fs.mkdirSync(extensionDir, { recursive: true });
|
||||
fs.mkdirSync(tempWorkspaceDir, { recursive: true });
|
||||
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
|
||||
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
|
||||
vi.mocked(prompts).mockClear();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('maybePromptForSettings', () => {
|
||||
const mockRequestSetting = vi.fn(
|
||||
async (setting: ExtensionSetting) => `mock-${setting.envVar}`,
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
mockRequestSetting.mockClear();
|
||||
});
|
||||
|
||||
it('should do nothing if settings are undefined', async () => {
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
await maybePromptForSettings(
|
||||
config,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
undefined,
|
||||
undefined,
|
||||
);
|
||||
expect(mockRequestSetting).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing if settings are empty', async () => {
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [],
|
||||
};
|
||||
await maybePromptForSettings(
|
||||
config,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
undefined,
|
||||
undefined,
|
||||
);
|
||||
expect(mockRequestSetting).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should prompt for all settings if there is no previous config', async () => {
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1' },
|
||||
{ name: 's2', description: 'd2', envVar: 'VAR2' },
|
||||
],
|
||||
};
|
||||
await maybePromptForSettings(
|
||||
config,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
undefined,
|
||||
undefined,
|
||||
);
|
||||
expect(mockRequestSetting).toHaveBeenCalledTimes(2);
|
||||
expect(mockRequestSetting).toHaveBeenCalledWith(config.settings![0]);
|
||||
expect(mockRequestSetting).toHaveBeenCalledWith(config.settings![1]);
|
||||
});
|
||||
|
||||
it('should only prompt for new settings', async () => {
|
||||
const previousConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
|
||||
};
|
||||
const newConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1' },
|
||||
{ name: 's2', description: 'd2', envVar: 'VAR2' },
|
||||
],
|
||||
};
|
||||
const previousSettings = { VAR1: 'previous-VAR1' };
|
||||
|
||||
await maybePromptForSettings(
|
||||
newConfig,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
previousConfig,
|
||||
previousSettings,
|
||||
);
|
||||
|
||||
expect(mockRequestSetting).toHaveBeenCalledTimes(1);
|
||||
expect(mockRequestSetting).toHaveBeenCalledWith(newConfig.settings![1]);
|
||||
|
||||
const expectedEnvPath = path.join(extensionDir, '.env');
|
||||
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
|
||||
const expectedContent = 'VAR1=previous-VAR1\nVAR2=mock-VAR2\n';
|
||||
expect(actualContent).toBe(expectedContent);
|
||||
});
|
||||
|
||||
it('should clear settings if new config has no settings', async () => {
|
||||
const previousConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1' },
|
||||
{
|
||||
name: 's2',
|
||||
description: 'd2',
|
||||
envVar: 'SENSITIVE_VAR',
|
||||
sensitive: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
const newConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [],
|
||||
};
|
||||
const previousSettings = {
|
||||
VAR1: 'previous-VAR1',
|
||||
SENSITIVE_VAR: 'secret',
|
||||
};
|
||||
const userKeychain = new KeychainTokenStorage(
|
||||
`Gemini CLI Extensions test-ext 12345`,
|
||||
);
|
||||
await userKeychain.setSecret('SENSITIVE_VAR', 'secret');
|
||||
const envPath = path.join(extensionDir, '.env');
|
||||
await fsPromises.writeFile(envPath, 'VAR1=previous-VAR1');
|
||||
|
||||
await maybePromptForSettings(
|
||||
newConfig,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
previousConfig,
|
||||
previousSettings,
|
||||
);
|
||||
|
||||
expect(mockRequestSetting).not.toHaveBeenCalled();
|
||||
const actualContent = await fsPromises.readFile(envPath, 'utf-8');
|
||||
expect(actualContent).toBe('');
|
||||
expect(await userKeychain.getSecret('SENSITIVE_VAR')).toBeNull();
|
||||
});
|
||||
|
||||
it('should remove sensitive settings from keychain', async () => {
|
||||
const previousConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{
|
||||
name: 's1',
|
||||
description: 'd1',
|
||||
envVar: 'SENSITIVE_VAR',
|
||||
sensitive: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
const newConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [],
|
||||
};
|
||||
const previousSettings = { SENSITIVE_VAR: 'secret' };
|
||||
const userKeychain = new KeychainTokenStorage(
|
||||
`Gemini CLI Extensions test-ext 12345`,
|
||||
);
|
||||
await userKeychain.setSecret('SENSITIVE_VAR', 'secret');
|
||||
|
||||
await maybePromptForSettings(
|
||||
newConfig,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
previousConfig,
|
||||
previousSettings,
|
||||
);
|
||||
|
||||
expect(await userKeychain.getSecret('SENSITIVE_VAR')).toBeNull();
|
||||
});
|
||||
|
||||
it('should remove settings that are no longer in the config', async () => {
|
||||
const previousConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1' },
|
||||
{ name: 's2', description: 'd2', envVar: 'VAR2' },
|
||||
],
|
||||
};
|
||||
const newConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
|
||||
};
|
||||
const previousSettings = {
|
||||
VAR1: 'previous-VAR1',
|
||||
VAR2: 'previous-VAR2',
|
||||
};
|
||||
|
||||
await maybePromptForSettings(
|
||||
newConfig,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
previousConfig,
|
||||
previousSettings,
|
||||
);
|
||||
|
||||
expect(mockRequestSetting).not.toHaveBeenCalled();
|
||||
|
||||
const expectedEnvPath = path.join(extensionDir, '.env');
|
||||
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
|
||||
const expectedContent = 'VAR1=previous-VAR1\n';
|
||||
expect(actualContent).toBe(expectedContent);
|
||||
});
|
||||
|
||||
it('should reprompt if a setting changes sensitivity', async () => {
|
||||
const previousConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1', sensitive: false },
|
||||
],
|
||||
};
|
||||
const newConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1', sensitive: true },
|
||||
],
|
||||
};
|
||||
const previousSettings = { VAR1: 'previous-VAR1' };
|
||||
|
||||
await maybePromptForSettings(
|
||||
newConfig,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
previousConfig,
|
||||
previousSettings,
|
||||
);
|
||||
|
||||
expect(mockRequestSetting).toHaveBeenCalledTimes(1);
|
||||
expect(mockRequestSetting).toHaveBeenCalledWith(newConfig.settings![0]);
|
||||
|
||||
// The value should now be in keychain, not the .env file.
|
||||
const expectedEnvPath = path.join(extensionDir, '.env');
|
||||
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
|
||||
expect(actualContent).toBe('');
|
||||
});
|
||||
|
||||
it('should not prompt if settings are identical', async () => {
|
||||
const previousConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1' },
|
||||
{ name: 's2', description: 'd2', envVar: 'VAR2' },
|
||||
],
|
||||
};
|
||||
const newConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1' },
|
||||
{ name: 's2', description: 'd2', envVar: 'VAR2' },
|
||||
],
|
||||
};
|
||||
const previousSettings = {
|
||||
VAR1: 'previous-VAR1',
|
||||
VAR2: 'previous-VAR2',
|
||||
};
|
||||
|
||||
await maybePromptForSettings(
|
||||
newConfig,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
previousConfig,
|
||||
previousSettings,
|
||||
);
|
||||
|
||||
expect(mockRequestSetting).not.toHaveBeenCalled();
|
||||
const expectedEnvPath = path.join(extensionDir, '.env');
|
||||
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
|
||||
const expectedContent = 'VAR1=previous-VAR1\nVAR2=previous-VAR2\n';
|
||||
expect(actualContent).toBe(expectedContent);
|
||||
});
|
||||
|
||||
it('should wrap values with spaces in quotes', async () => {
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
|
||||
};
|
||||
mockRequestSetting.mockResolvedValue('a value with spaces');
|
||||
|
||||
await maybePromptForSettings(
|
||||
config,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
undefined,
|
||||
undefined,
|
||||
);
|
||||
|
||||
const expectedEnvPath = path.join(extensionDir, '.env');
|
||||
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
|
||||
expect(actualContent).toBe('VAR1="a value with spaces"\n');
|
||||
});
|
||||
|
||||
it('should not attempt to clear secrets if keychain is unavailable', async () => {
|
||||
// Arrange
|
||||
const mockIsAvailable = vi.fn().mockResolvedValue(false);
|
||||
const mockListSecrets = vi.fn();
|
||||
|
||||
vi.mocked(KeychainTokenStorage).mockImplementation(
|
||||
() =>
|
||||
({
|
||||
isAvailable: mockIsAvailable,
|
||||
listSecrets: mockListSecrets,
|
||||
deleteSecret: vi.fn(),
|
||||
getSecret: vi.fn(),
|
||||
setSecret: vi.fn(),
|
||||
}) as unknown as KeychainTokenStorage,
|
||||
);
|
||||
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [], // Empty settings triggers clearSettings
|
||||
};
|
||||
|
||||
const previousConfig: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
|
||||
};
|
||||
|
||||
// Act
|
||||
await maybePromptForSettings(
|
||||
config,
|
||||
'12345',
|
||||
mockRequestSetting,
|
||||
previousConfig,
|
||||
undefined,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(mockIsAvailable).toHaveBeenCalled();
|
||||
expect(mockListSecrets).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('promptForSetting', () => {
|
||||
it.each([
|
||||
{
|
||||
description:
|
||||
'should use prompts with type "password" for sensitive settings',
|
||||
setting: {
|
||||
name: 'API Key',
|
||||
description: 'Your secret key',
|
||||
envVar: 'API_KEY',
|
||||
sensitive: true,
|
||||
},
|
||||
expectedType: 'password',
|
||||
promptValue: 'secret-key',
|
||||
},
|
||||
{
|
||||
description:
|
||||
'should use prompts with type "text" for non-sensitive settings',
|
||||
setting: {
|
||||
name: 'Username',
|
||||
description: 'Your public username',
|
||||
envVar: 'USERNAME',
|
||||
sensitive: false,
|
||||
},
|
||||
expectedType: 'text',
|
||||
promptValue: 'test-user',
|
||||
},
|
||||
{
|
||||
description: 'should default to "text" if sensitive is undefined',
|
||||
setting: {
|
||||
name: 'Username',
|
||||
description: 'Your public username',
|
||||
envVar: 'USERNAME',
|
||||
},
|
||||
expectedType: 'text',
|
||||
promptValue: 'test-user',
|
||||
},
|
||||
])('$description', async ({ setting, expectedType, promptValue }) => {
|
||||
vi.mocked(prompts).mockResolvedValue({ value: promptValue });
|
||||
|
||||
const result = await promptForSetting(setting as ExtensionSetting);
|
||||
|
||||
expect(prompts).toHaveBeenCalledWith({
|
||||
type: expectedType,
|
||||
name: 'value',
|
||||
message: `${setting.name}\n${setting.description}`,
|
||||
});
|
||||
expect(result).toBe(promptValue);
|
||||
});
|
||||
|
||||
it('should return undefined if the user cancels the prompt', async () => {
|
||||
vi.mocked(prompts).mockResolvedValue({ value: undefined });
|
||||
const result = await promptForSetting({
|
||||
name: 'Test',
|
||||
description: 'Test desc',
|
||||
envVar: 'TEST_VAR',
|
||||
});
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getScopedEnvContents', () => {
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1' },
|
||||
{
|
||||
name: 's2',
|
||||
description: 'd2',
|
||||
envVar: 'SENSITIVE_VAR',
|
||||
sensitive: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
const extensionId = '12345';
|
||||
|
||||
it('should return combined contents from user .env and keychain for USER scope', async () => {
|
||||
const userEnvPath = path.join(extensionDir, EXTENSION_SETTINGS_FILENAME);
|
||||
await fsPromises.writeFile(userEnvPath, 'VAR1=user-value1');
|
||||
const userKeychain = new KeychainTokenStorage(
|
||||
`Gemini CLI Extensions test-ext 12345`,
|
||||
);
|
||||
await userKeychain.setSecret('SENSITIVE_VAR', 'user-secret');
|
||||
|
||||
const contents = await getScopedEnvContents(
|
||||
config,
|
||||
extensionId,
|
||||
ExtensionSettingScope.USER,
|
||||
);
|
||||
|
||||
expect(contents).toEqual({
|
||||
VAR1: 'user-value1',
|
||||
SENSITIVE_VAR: 'user-secret',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return combined contents from workspace .env and keychain for WORKSPACE scope', async () => {
|
||||
const workspaceEnvPath = path.join(
|
||||
tempWorkspaceDir,
|
||||
EXTENSION_SETTINGS_FILENAME,
|
||||
);
|
||||
await fsPromises.writeFile(workspaceEnvPath, 'VAR1=workspace-value1');
|
||||
const workspaceKeychain = new KeychainTokenStorage(
|
||||
`Gemini CLI Extensions test-ext 12345 ${tempWorkspaceDir}`,
|
||||
);
|
||||
await workspaceKeychain.setSecret('SENSITIVE_VAR', 'workspace-secret');
|
||||
|
||||
const contents = await getScopedEnvContents(
|
||||
config,
|
||||
extensionId,
|
||||
ExtensionSettingScope.WORKSPACE,
|
||||
);
|
||||
|
||||
expect(contents).toEqual({
|
||||
VAR1: 'workspace-value1',
|
||||
SENSITIVE_VAR: 'workspace-secret',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEnvContents (merged)', () => {
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1' },
|
||||
{ name: 's2', description: 'd2', envVar: 'VAR2', sensitive: true },
|
||||
{ name: 's3', description: 'd3', envVar: 'VAR3' },
|
||||
],
|
||||
};
|
||||
const extensionId = '12345';
|
||||
|
||||
it('should merge user and workspace settings, with workspace taking precedence', async () => {
|
||||
// User settings
|
||||
const userEnvPath = path.join(extensionDir, EXTENSION_SETTINGS_FILENAME);
|
||||
await fsPromises.writeFile(
|
||||
userEnvPath,
|
||||
'VAR1=user-value1\nVAR3=user-value3',
|
||||
);
|
||||
const userKeychain = new KeychainTokenStorage(
|
||||
`Gemini CLI Extensions test-ext ${extensionId}`,
|
||||
);
|
||||
await userKeychain.setSecret('VAR2', 'user-secret2');
|
||||
|
||||
// Workspace settings
|
||||
const workspaceEnvPath = path.join(
|
||||
tempWorkspaceDir,
|
||||
EXTENSION_SETTINGS_FILENAME,
|
||||
);
|
||||
await fsPromises.writeFile(workspaceEnvPath, 'VAR1=workspace-value1');
|
||||
const workspaceKeychain = new KeychainTokenStorage(
|
||||
`Gemini CLI Extensions test-ext ${extensionId} ${tempWorkspaceDir}`,
|
||||
);
|
||||
await workspaceKeychain.setSecret('VAR2', 'workspace-secret2');
|
||||
|
||||
const contents = await getEnvContents(config, extensionId);
|
||||
|
||||
expect(contents).toEqual({
|
||||
VAR1: 'workspace-value1',
|
||||
VAR2: 'workspace-secret2',
|
||||
VAR3: 'user-value3',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateSetting', () => {
|
||||
const config: ExtensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
settings: [
|
||||
{ name: 's1', description: 'd1', envVar: 'VAR1' },
|
||||
{ name: 's2', description: 'd2', envVar: 'VAR2', sensitive: true },
|
||||
],
|
||||
};
|
||||
const mockRequestSetting = vi.fn();
|
||||
|
||||
beforeEach(async () => {
|
||||
const userEnvPath = path.join(extensionDir, '.env');
|
||||
await fsPromises.writeFile(userEnvPath, 'VAR1=value1\n');
|
||||
const userKeychain = new KeychainTokenStorage(
|
||||
`Gemini CLI Extensions test-ext 12345`,
|
||||
);
|
||||
await userKeychain.setSecret('VAR2', 'value2');
|
||||
mockRequestSetting.mockClear();
|
||||
});
|
||||
|
||||
it('should update a non-sensitive setting in USER scope', async () => {
|
||||
mockRequestSetting.mockResolvedValue('new-value1');
|
||||
|
||||
await updateSetting(
|
||||
config,
|
||||
'12345',
|
||||
'VAR1',
|
||||
mockRequestSetting,
|
||||
ExtensionSettingScope.USER,
|
||||
);
|
||||
|
||||
const expectedEnvPath = path.join(extensionDir, '.env');
|
||||
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
|
||||
expect(actualContent).toContain('VAR1=new-value1');
|
||||
});
|
||||
|
||||
it('should update a non-sensitive setting in WORKSPACE scope', async () => {
|
||||
mockRequestSetting.mockResolvedValue('new-workspace-value');
|
||||
|
||||
await updateSetting(
|
||||
config,
|
||||
'12345',
|
||||
'VAR1',
|
||||
mockRequestSetting,
|
||||
ExtensionSettingScope.WORKSPACE,
|
||||
);
|
||||
|
||||
const expectedEnvPath = path.join(tempWorkspaceDir, '.env');
|
||||
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
|
||||
expect(actualContent).toContain('VAR1=new-workspace-value');
|
||||
});
|
||||
|
||||
it('should update a sensitive setting in USER scope', async () => {
|
||||
mockRequestSetting.mockResolvedValue('new-value2');
|
||||
|
||||
await updateSetting(
|
||||
config,
|
||||
'12345',
|
||||
'VAR2',
|
||||
mockRequestSetting,
|
||||
ExtensionSettingScope.USER,
|
||||
);
|
||||
|
||||
const userKeychain = new KeychainTokenStorage(
|
||||
`Gemini CLI Extensions test-ext 12345`,
|
||||
);
|
||||
expect(await userKeychain.getSecret('VAR2')).toBe('new-value2');
|
||||
});
|
||||
|
||||
it('should update a sensitive setting in WORKSPACE scope', async () => {
|
||||
mockRequestSetting.mockResolvedValue('new-workspace-secret');
|
||||
|
||||
await updateSetting(
|
||||
config,
|
||||
'12345',
|
||||
'VAR2',
|
||||
mockRequestSetting,
|
||||
ExtensionSettingScope.WORKSPACE,
|
||||
);
|
||||
|
||||
const workspaceKeychain = new KeychainTokenStorage(
|
||||
`Gemini CLI Extensions test-ext 12345 ${tempWorkspaceDir}`,
|
||||
);
|
||||
expect(await workspaceKeychain.getSecret('VAR2')).toBe(
|
||||
'new-workspace-secret',
|
||||
);
|
||||
});
|
||||
|
||||
it('should leave existing, unmanaged .env variables intact when updating in WORKSPACE scope', async () => {
|
||||
// Setup a pre-existing .env file in the workspace with unmanaged variables
|
||||
const workspaceEnvPath = path.join(tempWorkspaceDir, '.env');
|
||||
const originalEnvContent =
|
||||
'PROJECT_VAR_1=value_1\nPROJECT_VAR_2=value_2\nVAR1=original-value'; // VAR1 is managed by extension
|
||||
await fsPromises.writeFile(workspaceEnvPath, originalEnvContent);
|
||||
|
||||
// Simulate updating an extension-managed non-sensitive setting
|
||||
mockRequestSetting.mockResolvedValue('updated-value');
|
||||
await updateSetting(
|
||||
config,
|
||||
'12345',
|
||||
'VAR1',
|
||||
mockRequestSetting,
|
||||
ExtensionSettingScope.WORKSPACE,
|
||||
);
|
||||
|
||||
// Read the .env file after update
|
||||
const actualContent = await fsPromises.readFile(
|
||||
workspaceEnvPath,
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
// Assert that original variables are intact and extension variable is updated
|
||||
expect(actualContent).toContain('PROJECT_VAR_1=value_1');
|
||||
expect(actualContent).toContain('PROJECT_VAR_2=value_2');
|
||||
expect(actualContent).toContain('VAR1=updated-value');
|
||||
|
||||
// Ensure no other unexpected changes or deletions
|
||||
const lines = actualContent.split('\n').filter((line) => line.length > 0);
|
||||
expect(lines).toHaveLength(3); // Should only have the three variables
|
||||
});
|
||||
});
|
||||
});
|
||||
298
packages/core/src/extension/extensionSettings.ts
Normal file
298
packages/core/src/extension/extensionSettings.ts
Normal file
@@ -0,0 +1,298 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as fsSync from 'node:fs';
|
||||
import * as dotenv from 'dotenv';
|
||||
import * as path from 'node:path';
|
||||
import { ExtensionStorage } from './storage.js';
|
||||
import type { ExtensionConfig } from './extensionManager.js';
|
||||
import prompts from 'prompts';
|
||||
import { EXTENSION_SETTINGS_FILENAME } from './variables.js';
|
||||
import { KeychainTokenStorage } from '../mcp/token-storage/keychain-token-storage.js';
|
||||
|
||||
export enum ExtensionSettingScope {
|
||||
USER = 'user',
|
||||
WORKSPACE = 'workspace',
|
||||
}
|
||||
|
||||
export interface ExtensionSetting {
|
||||
name: string;
|
||||
description: string;
|
||||
envVar: string;
|
||||
// NOTE: If no value is set, this setting will be considered NOT sensitive.
|
||||
sensitive?: boolean;
|
||||
}
|
||||
|
||||
const getKeychainStorageName = (
|
||||
extensionName: string,
|
||||
extensionId: string,
|
||||
scope: ExtensionSettingScope,
|
||||
): string => {
|
||||
const base = `Qwen Code Extensions ${extensionName} ${extensionId}`;
|
||||
if (scope === ExtensionSettingScope.WORKSPACE) {
|
||||
return `${base} ${process.cwd()}`;
|
||||
}
|
||||
return base;
|
||||
};
|
||||
|
||||
const getEnvFilePath = (
|
||||
extensionName: string,
|
||||
scope: ExtensionSettingScope,
|
||||
): string => {
|
||||
if (scope === ExtensionSettingScope.WORKSPACE) {
|
||||
return path.join(process.cwd(), EXTENSION_SETTINGS_FILENAME);
|
||||
}
|
||||
return new ExtensionStorage(extensionName).getEnvFilePath();
|
||||
};
|
||||
|
||||
export async function maybePromptForSettings(
|
||||
extensionConfig: ExtensionConfig,
|
||||
extensionId: string,
|
||||
requestSetting: (setting: ExtensionSetting) => Promise<string>,
|
||||
previousExtensionConfig?: ExtensionConfig,
|
||||
previousSettings?: Record<string, string>,
|
||||
): Promise<void> {
|
||||
const { name: extensionName, settings } = extensionConfig;
|
||||
if (
|
||||
(!settings || settings.length === 0) &&
|
||||
(!previousExtensionConfig?.settings ||
|
||||
previousExtensionConfig.settings.length === 0)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
// We assume user scope here because we don't have a way to ask the user for scope during the initial setup.
|
||||
// The user can change the scope later using the `settings set` command.
|
||||
const scope = ExtensionSettingScope.USER;
|
||||
const envFilePath = getEnvFilePath(extensionName, scope);
|
||||
const keychain = new KeychainTokenStorage(
|
||||
getKeychainStorageName(extensionName, extensionId, scope),
|
||||
);
|
||||
|
||||
if (!settings || settings.length === 0) {
|
||||
await clearSettings(envFilePath, keychain);
|
||||
return;
|
||||
}
|
||||
|
||||
const settingsChanges = getSettingsChanges(
|
||||
settings,
|
||||
previousExtensionConfig?.settings ?? [],
|
||||
);
|
||||
|
||||
const allSettings: Record<string, string> = { ...previousSettings };
|
||||
|
||||
for (const removedEnvSetting of settingsChanges.removeEnv) {
|
||||
delete allSettings[removedEnvSetting.envVar];
|
||||
}
|
||||
|
||||
for (const removedSensitiveSetting of settingsChanges.removeSensitive) {
|
||||
await keychain.deleteSecret(removedSensitiveSetting.envVar);
|
||||
}
|
||||
|
||||
for (const setting of settingsChanges.promptForSensitive.concat(
|
||||
settingsChanges.promptForEnv,
|
||||
)) {
|
||||
const answer = await requestSetting(setting);
|
||||
allSettings[setting.envVar] = answer;
|
||||
}
|
||||
|
||||
const nonSensitiveSettings: Record<string, string> = {};
|
||||
for (const setting of settings) {
|
||||
const value = allSettings[setting.envVar];
|
||||
if (value === undefined) {
|
||||
continue;
|
||||
}
|
||||
if (setting.sensitive) {
|
||||
await keychain.setSecret(setting.envVar, value);
|
||||
} else {
|
||||
nonSensitiveSettings[setting.envVar] = value;
|
||||
}
|
||||
}
|
||||
|
||||
const envContent = formatEnvContent(nonSensitiveSettings);
|
||||
|
||||
await fs.writeFile(envFilePath, envContent);
|
||||
}
|
||||
|
||||
function formatEnvContent(settings: Record<string, string>): string {
|
||||
let envContent = '';
|
||||
for (const [key, value] of Object.entries(settings)) {
|
||||
const formattedValue = value.includes(' ') ? `"${value}"` : value;
|
||||
envContent += `${key}=${formattedValue}\n`;
|
||||
}
|
||||
return envContent;
|
||||
}
|
||||
|
||||
export async function promptForSetting(
|
||||
setting: ExtensionSetting,
|
||||
): Promise<string> {
|
||||
const response = await prompts({
|
||||
type: setting.sensitive ? 'password' : 'text',
|
||||
name: 'value',
|
||||
message: `${setting.name}\n${setting.description}`,
|
||||
});
|
||||
return response.value;
|
||||
}
|
||||
|
||||
export async function getScopedEnvContents(
|
||||
extensionConfig: ExtensionConfig,
|
||||
extensionId: string,
|
||||
scope: ExtensionSettingScope,
|
||||
): Promise<Record<string, string>> {
|
||||
const { name: extensionName } = extensionConfig;
|
||||
const keychain = new KeychainTokenStorage(
|
||||
getKeychainStorageName(extensionName, extensionId, scope),
|
||||
);
|
||||
const envFilePath = getEnvFilePath(extensionName, scope);
|
||||
let customEnv: Record<string, string> = {};
|
||||
if (fsSync.existsSync(envFilePath)) {
|
||||
const envFile = fsSync.readFileSync(envFilePath, 'utf-8');
|
||||
customEnv = dotenv.parse(envFile);
|
||||
}
|
||||
|
||||
if (extensionConfig.settings) {
|
||||
for (const setting of extensionConfig.settings) {
|
||||
if (setting.sensitive) {
|
||||
const secret = await keychain.getSecret(setting.envVar);
|
||||
if (secret) {
|
||||
customEnv[setting.envVar] = secret;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return customEnv;
|
||||
}
|
||||
|
||||
export async function getEnvContents(
|
||||
extensionConfig: ExtensionConfig,
|
||||
extensionId: string,
|
||||
): Promise<Record<string, string>> {
|
||||
if (!extensionConfig.settings || extensionConfig.settings.length === 0) {
|
||||
return Promise.resolve({});
|
||||
}
|
||||
|
||||
const userSettings = await getScopedEnvContents(
|
||||
extensionConfig,
|
||||
extensionId,
|
||||
ExtensionSettingScope.USER,
|
||||
);
|
||||
const workspaceSettings = await getScopedEnvContents(
|
||||
extensionConfig,
|
||||
extensionId,
|
||||
ExtensionSettingScope.WORKSPACE,
|
||||
);
|
||||
|
||||
return { ...userSettings, ...workspaceSettings };
|
||||
}
|
||||
|
||||
export async function updateSetting(
|
||||
extensionConfig: ExtensionConfig,
|
||||
extensionId: string,
|
||||
settingKey: string,
|
||||
requestSetting: (setting: ExtensionSetting) => Promise<string>,
|
||||
scope: ExtensionSettingScope,
|
||||
): Promise<void> {
|
||||
const { name: extensionName, settings } = extensionConfig;
|
||||
if (!settings || settings.length === 0) {
|
||||
console.log('This extension does not have any settings.');
|
||||
return;
|
||||
}
|
||||
|
||||
const settingToUpdate = settings.find(
|
||||
(s) => s.name === settingKey || s.envVar === settingKey,
|
||||
);
|
||||
|
||||
if (!settingToUpdate) {
|
||||
console.log(`Setting ${settingKey} not found.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const newValue = await requestSetting(settingToUpdate);
|
||||
const keychain = new KeychainTokenStorage(
|
||||
getKeychainStorageName(extensionName, extensionId, scope),
|
||||
);
|
||||
|
||||
if (settingToUpdate.sensitive) {
|
||||
await keychain.setSecret(settingToUpdate.envVar, newValue);
|
||||
return;
|
||||
}
|
||||
|
||||
// For non-sensitive settings, we need to read the existing .env file,
|
||||
// update the value, and write it back, preserving any other values.
|
||||
const envFilePath = getEnvFilePath(extensionName, scope);
|
||||
let envContent = '';
|
||||
if (fsSync.existsSync(envFilePath)) {
|
||||
envContent = await fs.readFile(envFilePath, 'utf-8');
|
||||
}
|
||||
|
||||
const parsedEnv = dotenv.parse(envContent);
|
||||
parsedEnv[settingToUpdate.envVar] = newValue;
|
||||
|
||||
// We only want to write back the variables that are not sensitive.
|
||||
const nonSensitiveSettings: Record<string, string> = {};
|
||||
const sensitiveEnvVars = new Set(
|
||||
settings.filter((s) => s.sensitive).map((s) => s.envVar),
|
||||
);
|
||||
for (const [key, value] of Object.entries(parsedEnv)) {
|
||||
if (!sensitiveEnvVars.has(key)) {
|
||||
nonSensitiveSettings[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
const newEnvContent = formatEnvContent(nonSensitiveSettings);
|
||||
await fs.writeFile(envFilePath, newEnvContent);
|
||||
}
|
||||
|
||||
interface settingsChanges {
|
||||
promptForSensitive: ExtensionSetting[];
|
||||
removeSensitive: ExtensionSetting[];
|
||||
promptForEnv: ExtensionSetting[];
|
||||
removeEnv: ExtensionSetting[];
|
||||
}
|
||||
function getSettingsChanges(
|
||||
settings: ExtensionSetting[],
|
||||
oldSettings: ExtensionSetting[],
|
||||
): settingsChanges {
|
||||
const isSameSetting = (a: ExtensionSetting, b: ExtensionSetting) =>
|
||||
a.envVar === b.envVar && (a.sensitive ?? false) === (b.sensitive ?? false);
|
||||
|
||||
const sensitiveOld = oldSettings.filter((s) => s.sensitive ?? false);
|
||||
const sensitiveNew = settings.filter((s) => s.sensitive ?? false);
|
||||
const envOld = oldSettings.filter((s) => !(s.sensitive ?? false));
|
||||
const envNew = settings.filter((s) => !(s.sensitive ?? false));
|
||||
|
||||
return {
|
||||
promptForSensitive: sensitiveNew.filter(
|
||||
(s) => !sensitiveOld.some((old) => isSameSetting(s, old)),
|
||||
),
|
||||
removeSensitive: sensitiveOld.filter(
|
||||
(s) => !sensitiveNew.some((neu) => isSameSetting(s, neu)),
|
||||
),
|
||||
promptForEnv: envNew.filter(
|
||||
(s) => !envOld.some((old) => isSameSetting(s, old)),
|
||||
),
|
||||
removeEnv: envOld.filter(
|
||||
(s) => !envNew.some((neu) => isSameSetting(s, neu)),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
async function clearSettings(
|
||||
envFilePath: string,
|
||||
keychain: KeychainTokenStorage,
|
||||
) {
|
||||
if (fsSync.existsSync(envFilePath)) {
|
||||
await fs.writeFile(envFilePath, '');
|
||||
}
|
||||
if (!(await keychain.isAvailable())) {
|
||||
return;
|
||||
}
|
||||
const secrets = await keychain.listSecrets();
|
||||
for (const secret of secrets) {
|
||||
await keychain.deleteSecret(secret);
|
||||
}
|
||||
return;
|
||||
}
|
||||
179
packages/core/src/extension/gemini-converter.test.ts
Normal file
179
packages/core/src/extension/gemini-converter.test.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
convertGeminiToQwenConfig,
|
||||
isGeminiExtensionConfig,
|
||||
type GeminiExtensionConfig,
|
||||
} from './gemini-converter.js';
|
||||
|
||||
// Mock fs module
|
||||
vi.mock('node:fs', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('node:fs')>();
|
||||
return {
|
||||
...actual,
|
||||
existsSync: vi.fn(),
|
||||
readFileSync: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('convertGeminiToQwenConfig', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should convert basic Gemini config from directory', () => {
|
||||
const mockDir = '/mock/extension/dir';
|
||||
const geminiConfig: GeminiExtensionConfig = {
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
};
|
||||
|
||||
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(geminiConfig));
|
||||
|
||||
const result = convertGeminiToQwenConfig(mockDir);
|
||||
|
||||
expect(result.name).toBe('test-extension');
|
||||
expect(result.version).toBe('1.0.0');
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith(
|
||||
path.join(mockDir, 'gemini-extension.json'),
|
||||
'utf-8',
|
||||
);
|
||||
});
|
||||
|
||||
it('should convert config with all optional fields', () => {
|
||||
const mockDir = '/mock/extension/dir';
|
||||
const geminiConfig = {
|
||||
name: 'full-extension',
|
||||
version: '2.0.0',
|
||||
mcpServers: { server1: {} },
|
||||
contextFileName: 'context.txt',
|
||||
excludeTools: ['tool1', 'tool2'],
|
||||
settings: [
|
||||
{ name: 'Setting1', envVar: 'VAR1', description: 'Test setting' },
|
||||
],
|
||||
};
|
||||
|
||||
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(geminiConfig));
|
||||
|
||||
const result = convertGeminiToQwenConfig(mockDir);
|
||||
|
||||
expect(result.name).toBe('full-extension');
|
||||
expect(result.version).toBe('2.0.0');
|
||||
expect(result.mcpServers).toEqual({ server1: {} });
|
||||
expect(result.contextFileName).toBe('context.txt');
|
||||
expect(result.excludeTools).toEqual(['tool1', 'tool2']);
|
||||
expect(result.settings).toHaveLength(1);
|
||||
expect(result.settings?.[0].name).toBe('Setting1');
|
||||
});
|
||||
|
||||
it('should throw error for missing name', () => {
|
||||
const mockDir = '/mock/extension/dir';
|
||||
const invalidConfig = {
|
||||
version: '1.0.0',
|
||||
};
|
||||
|
||||
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
|
||||
|
||||
expect(() => convertGeminiToQwenConfig(mockDir)).toThrow(
|
||||
'Gemini extension config must have name and version fields',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error for missing version', () => {
|
||||
const mockDir = '/mock/extension/dir';
|
||||
const invalidConfig = {
|
||||
name: 'test-extension',
|
||||
};
|
||||
|
||||
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
|
||||
|
||||
expect(() => convertGeminiToQwenConfig(mockDir)).toThrow(
|
||||
'Gemini extension config must have name and version fields',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isGeminiExtensionConfig', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should identify Gemini extension directory with valid config', () => {
|
||||
const mockDir = '/mock/extension/dir';
|
||||
const mockConfig = {
|
||||
name: 'test',
|
||||
version: '1.0.0',
|
||||
settings: [{ name: 'Test', envVar: 'TEST', description: 'Test' }],
|
||||
};
|
||||
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(mockConfig));
|
||||
|
||||
expect(isGeminiExtensionConfig(mockDir)).toBe(true);
|
||||
|
||||
expect(fs.existsSync).toHaveBeenCalledWith(
|
||||
path.join(mockDir, 'gemini-extension.json'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return false when gemini-extension.json does not exist', () => {
|
||||
const mockDir = '/mock/nonexistent/dir';
|
||||
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false);
|
||||
|
||||
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for invalid config content', () => {
|
||||
const mockDir = '/mock/invalid/dir';
|
||||
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.readFileSync).mockReturnValue('null');
|
||||
|
||||
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for config missing required fields', () => {
|
||||
const mockDir = '/mock/invalid/dir';
|
||||
const invalidConfig = {
|
||||
name: 'test',
|
||||
// missing version
|
||||
};
|
||||
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
|
||||
|
||||
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for basic config without settings', () => {
|
||||
const mockDir = '/mock/extension/dir';
|
||||
const basicConfig = {
|
||||
name: 'test',
|
||||
version: '1.0.0',
|
||||
};
|
||||
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(basicConfig));
|
||||
|
||||
expect(isGeminiExtensionConfig(mockDir)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
// Note: convertGeminiExtensionPackage() is tested through integration tests
|
||||
// as it requires real file system operations
|
||||
217
packages/core/src/extension/gemini-converter.ts
Normal file
217
packages/core/src/extension/gemini-converter.ts
Normal file
@@ -0,0 +1,217 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Converter for Gemini CLI extensions to Qwen Code format.
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import type { ExtensionConfig, ExtensionSetting } from './extensionManager.js';
|
||||
import { ExtensionStorage } from './storage.js';
|
||||
import { convertTomlToMarkdown } from '../utils/toml-to-markdown-converter.js';
|
||||
|
||||
export interface GeminiExtensionConfig {
|
||||
name: string;
|
||||
version: string;
|
||||
mcpServers?: Record<string, unknown>;
|
||||
contextFileName?: string | string[];
|
||||
excludeTools?: string[];
|
||||
settings?: ExtensionSetting[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Gemini CLI extension config to Qwen Code format.
|
||||
* @param extensionDir Path to the Gemini extension directory
|
||||
* @returns Qwen ExtensionConfig
|
||||
*/
|
||||
export function convertGeminiToQwenConfig(
|
||||
extensionDir: string,
|
||||
): ExtensionConfig {
|
||||
const configFilePath = path.join(extensionDir, 'gemini-extension.json');
|
||||
const configContent = fs.readFileSync(configFilePath, 'utf-8');
|
||||
const geminiConfig: GeminiExtensionConfig = JSON.parse(configContent);
|
||||
// Validate required fields
|
||||
if (!geminiConfig.name || !geminiConfig.version) {
|
||||
throw new Error(
|
||||
'Gemini extension config must have name and version fields',
|
||||
);
|
||||
}
|
||||
|
||||
const settings: ExtensionSetting[] | undefined = geminiConfig.settings;
|
||||
|
||||
// Direct field mapping
|
||||
return {
|
||||
name: geminiConfig.name,
|
||||
version: geminiConfig.version,
|
||||
mcpServers: geminiConfig.mcpServers as ExtensionConfig['mcpServers'],
|
||||
contextFileName: geminiConfig.contextFileName,
|
||||
excludeTools: geminiConfig.excludeTools,
|
||||
settings,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a complete Gemini extension package to Qwen Code format.
|
||||
* Creates a new temporary directory with:
|
||||
* 1. Converted qwen-extension.json
|
||||
* 2. Commands converted from TOML to MD
|
||||
* 3. All other files/folders preserved
|
||||
*
|
||||
* @param extensionDir Path to the Gemini extension directory
|
||||
* @returns Object containing converted config and the temporary directory path
|
||||
*/
|
||||
export async function convertGeminiExtensionPackage(
|
||||
extensionDir: string,
|
||||
): Promise<{ config: ExtensionConfig; convertedDir: string }> {
|
||||
const geminiConfig = convertGeminiToQwenConfig(extensionDir);
|
||||
|
||||
// Create temporary directory for converted extension
|
||||
const tmpDir = await ExtensionStorage.createTmpDir();
|
||||
|
||||
try {
|
||||
// Step 1: Copy all files and directories to temporary directory
|
||||
await copyDirectory(extensionDir, tmpDir);
|
||||
|
||||
// Step 2: Convert TOML commands to Markdown in commands folder
|
||||
const commandsDir = path.join(tmpDir, 'commands');
|
||||
if (fs.existsSync(commandsDir)) {
|
||||
await convertCommandsDirectory(commandsDir);
|
||||
}
|
||||
|
||||
// Step 3: Create qwen-extension.json with converted config
|
||||
const qwenConfigPath = path.join(tmpDir, 'qwen-extension.json');
|
||||
fs.writeFileSync(
|
||||
qwenConfigPath,
|
||||
JSON.stringify(geminiConfig, null, 2),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
return {
|
||||
config: geminiConfig,
|
||||
convertedDir: tmpDir,
|
||||
};
|
||||
} catch (error) {
|
||||
// Clean up temporary directory on error
|
||||
try {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively copies a directory and its contents.
|
||||
* @param source Source directory path
|
||||
* @param destination Destination directory path
|
||||
*/
|
||||
export async function copyDirectory(
|
||||
source: string,
|
||||
destination: string,
|
||||
): Promise<void> {
|
||||
// Create destination directory if it doesn't exist
|
||||
if (!fs.existsSync(destination)) {
|
||||
fs.mkdirSync(destination, { recursive: true });
|
||||
}
|
||||
|
||||
const entries = fs.readdirSync(source, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const sourcePath = path.join(source, entry.name);
|
||||
const destPath = path.join(destination, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await copyDirectory(sourcePath, destPath);
|
||||
} else {
|
||||
fs.copyFileSync(sourcePath, destPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts all TOML command files in a directory to Markdown format.
|
||||
* @param commandsDir Path to the commands directory
|
||||
*/
|
||||
async function convertCommandsDirectory(commandsDir: string): Promise<void> {
|
||||
// Find all .toml files in the commands directory
|
||||
const tomlFiles = await glob('**/*.toml', {
|
||||
cwd: commandsDir,
|
||||
nodir: true,
|
||||
dot: false,
|
||||
});
|
||||
|
||||
// Convert each TOML file to Markdown
|
||||
for (const relativeFile of tomlFiles) {
|
||||
const tomlPath = path.join(commandsDir, relativeFile);
|
||||
|
||||
try {
|
||||
// Read TOML file
|
||||
const tomlContent = fs.readFileSync(tomlPath, 'utf-8');
|
||||
|
||||
// Convert to Markdown
|
||||
const markdownContent = convertTomlToMarkdown(tomlContent);
|
||||
|
||||
// Generate Markdown file path (same location, .md extension)
|
||||
const markdownPath = tomlPath.replace(/\.toml$/, '.md');
|
||||
|
||||
// Write Markdown file
|
||||
fs.writeFileSync(markdownPath, markdownContent, 'utf-8');
|
||||
|
||||
// Delete original TOML file
|
||||
fs.unlinkSync(tomlPath);
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`Warning: Failed to convert command file ${relativeFile}: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
// Continue with other files even if one fails
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a config object is in Gemini format.
|
||||
* This is a heuristic check based on typical Gemini extension patterns.
|
||||
* @param config Configuration object to check
|
||||
* @returns true if config appears to be Gemini format
|
||||
*/
|
||||
export function isGeminiExtensionConfig(extensionDir: string) {
|
||||
const configFilePath = path.join(extensionDir, 'gemini-extension.json');
|
||||
if (!fs.existsSync(configFilePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const configContent = fs.readFileSync(configFilePath, 'utf-8');
|
||||
const parsedConfig = JSON.parse(configContent);
|
||||
|
||||
if (typeof parsedConfig !== 'object' || parsedConfig === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const obj = parsedConfig as Record<string, unknown>;
|
||||
|
||||
// Must have name and version
|
||||
if (typeof obj['name'] !== 'string' || typeof obj['version'] !== 'string') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for Gemini-specific settings format
|
||||
if (obj['settings'] && Array.isArray(obj['settings'])) {
|
||||
const firstSetting = obj['settings'][0];
|
||||
if (
|
||||
firstSetting &&
|
||||
typeof firstSetting === 'object' &&
|
||||
'envVar' in firstSetting
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// If it has Gemini-specific fields but not Qwen-specific fields, likely Gemini
|
||||
return true;
|
||||
}
|
||||
@@ -13,14 +13,17 @@ import {
|
||||
parseGitHubRepoForReleases,
|
||||
} from './github.js';
|
||||
import { simpleGit, type SimpleGit } from 'simple-git';
|
||||
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
|
||||
import * as os from 'node:os';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as fsSync from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as tar from 'tar';
|
||||
import * as archiver from 'archiver';
|
||||
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
ExtensionUpdateState,
|
||||
type Extension,
|
||||
type ExtensionManager,
|
||||
} from './extensionManager.js';
|
||||
|
||||
const mockPlatform = vi.hoisted(() => vi.fn());
|
||||
const mockArch = vi.hoisted(() => vi.fn());
|
||||
@@ -123,119 +126,170 @@ describe('git extension helpers', () => {
|
||||
revparse: vi.fn(),
|
||||
};
|
||||
|
||||
const mockExtensionManager = {
|
||||
loadExtensionConfig: vi.fn(),
|
||||
} as unknown as ExtensionManager;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(simpleGit).mockReturnValue(mockGit as unknown as SimpleGit);
|
||||
});
|
||||
|
||||
it('should return NOT_UPDATABLE for non-git extensions', async () => {
|
||||
const extension: GeminiCLIExtension = {
|
||||
function createExtension(overrides: Partial<Extension> = {}): Extension {
|
||||
return {
|
||||
id: 'test-id',
|
||||
name: 'test',
|
||||
path: '/ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
config: { name: 'test', version: '1.0.0' },
|
||||
contextFiles: [],
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
it('should return NOT_UPDATABLE for non-git extensions', async () => {
|
||||
const extension = createExtension({
|
||||
installMetadata: {
|
||||
type: 'link',
|
||||
source: '',
|
||||
},
|
||||
};
|
||||
let result: ExtensionUpdateState | undefined = undefined;
|
||||
await checkForExtensionUpdate(
|
||||
});
|
||||
const result = await checkForExtensionUpdate(
|
||||
extension,
|
||||
(newState) => (result = newState),
|
||||
mockExtensionManager,
|
||||
);
|
||||
expect(result).toBe(ExtensionUpdateState.NOT_UPDATABLE);
|
||||
});
|
||||
|
||||
it('should return ERROR if no remotes found', async () => {
|
||||
const extension: GeminiCLIExtension = {
|
||||
name: 'test',
|
||||
path: '/ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
const extension = createExtension({
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: '',
|
||||
},
|
||||
};
|
||||
});
|
||||
mockGit.getRemotes.mockResolvedValue([]);
|
||||
let result: ExtensionUpdateState | undefined = undefined;
|
||||
await checkForExtensionUpdate(
|
||||
const result = await checkForExtensionUpdate(
|
||||
extension,
|
||||
(newState) => (result = newState),
|
||||
mockExtensionManager,
|
||||
);
|
||||
expect(result).toBe(ExtensionUpdateState.ERROR);
|
||||
});
|
||||
|
||||
it('should return UPDATE_AVAILABLE when remote hash is different', async () => {
|
||||
const extension: GeminiCLIExtension = {
|
||||
name: 'test',
|
||||
path: '/ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
const extension = createExtension({
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'my/ext',
|
||||
},
|
||||
};
|
||||
});
|
||||
mockGit.getRemotes.mockResolvedValue([
|
||||
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
|
||||
]);
|
||||
mockGit.listRemote.mockResolvedValue('remote-hash\tHEAD');
|
||||
mockGit.revparse.mockResolvedValue('local-hash');
|
||||
|
||||
let result: ExtensionUpdateState | undefined = undefined;
|
||||
await checkForExtensionUpdate(
|
||||
const result = await checkForExtensionUpdate(
|
||||
extension,
|
||||
(newState) => (result = newState),
|
||||
mockExtensionManager,
|
||||
);
|
||||
expect(result).toBe(ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
});
|
||||
|
||||
it('should return UP_TO_DATE when remote and local hashes are the same', async () => {
|
||||
const extension: GeminiCLIExtension = {
|
||||
name: 'test',
|
||||
path: '/ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
const extension = createExtension({
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'my/ext',
|
||||
},
|
||||
};
|
||||
});
|
||||
mockGit.getRemotes.mockResolvedValue([
|
||||
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
|
||||
]);
|
||||
mockGit.listRemote.mockResolvedValue('same-hash\tHEAD');
|
||||
mockGit.revparse.mockResolvedValue('same-hash');
|
||||
|
||||
let result: ExtensionUpdateState | undefined = undefined;
|
||||
await checkForExtensionUpdate(
|
||||
const result = await checkForExtensionUpdate(
|
||||
extension,
|
||||
(newState) => (result = newState),
|
||||
mockExtensionManager,
|
||||
);
|
||||
expect(result).toBe(ExtensionUpdateState.UP_TO_DATE);
|
||||
});
|
||||
|
||||
it('should return ERROR on git error', async () => {
|
||||
const extension: GeminiCLIExtension = {
|
||||
name: 'test',
|
||||
path: '/ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
const extension = createExtension({
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'my/ext',
|
||||
},
|
||||
};
|
||||
});
|
||||
mockGit.getRemotes.mockRejectedValue(new Error('git error'));
|
||||
|
||||
let result: ExtensionUpdateState | undefined = undefined;
|
||||
await checkForExtensionUpdate(
|
||||
const result = await checkForExtensionUpdate(
|
||||
extension,
|
||||
(newState) => (result = newState),
|
||||
mockExtensionManager,
|
||||
);
|
||||
expect(result).toBe(ExtensionUpdateState.ERROR);
|
||||
});
|
||||
|
||||
it('should return UPDATE_AVAILABLE for local extension with different version', async () => {
|
||||
const extension = createExtension({
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
type: 'local',
|
||||
source: '/path/to/source',
|
||||
},
|
||||
});
|
||||
|
||||
const mockManager = {
|
||||
loadExtensionConfig: vi.fn().mockReturnValue({
|
||||
name: 'test',
|
||||
version: '2.0.0',
|
||||
}),
|
||||
} as unknown as ExtensionManager;
|
||||
|
||||
const result = await checkForExtensionUpdate(extension, mockManager);
|
||||
expect(result).toBe(ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
});
|
||||
|
||||
it('should return UP_TO_DATE for local extension with same version', async () => {
|
||||
const extension = createExtension({
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
type: 'local',
|
||||
source: '/path/to/source',
|
||||
},
|
||||
});
|
||||
|
||||
const mockManager = {
|
||||
loadExtensionConfig: vi.fn().mockReturnValue({
|
||||
name: 'test',
|
||||
version: '1.0.0',
|
||||
}),
|
||||
} as unknown as ExtensionManager;
|
||||
|
||||
const result = await checkForExtensionUpdate(extension, mockManager);
|
||||
expect(result).toBe(ExtensionUpdateState.UP_TO_DATE);
|
||||
});
|
||||
|
||||
it('should return NOT_UPDATABLE for local extension when source cannot be loaded', async () => {
|
||||
const extension = createExtension({
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
type: 'local',
|
||||
source: '/path/to/source',
|
||||
},
|
||||
});
|
||||
|
||||
const mockManager = {
|
||||
loadExtensionConfig: vi.fn().mockImplementation(() => {
|
||||
throw new Error('Cannot load config');
|
||||
}),
|
||||
} as unknown as ExtensionManager;
|
||||
|
||||
const result = await checkForExtensionUpdate(extension, mockManager);
|
||||
expect(result).toBe(ExtensionUpdateState.NOT_UPDATABLE);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findReleaseAsset', () => {
|
||||
@@ -5,19 +5,38 @@
|
||||
*/
|
||||
|
||||
import { simpleGit } from 'simple-git';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import type {
|
||||
ExtensionInstallMetadata,
|
||||
GeminiCLIExtension,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
|
||||
import { getErrorMessage } from '../utils/errors.js';
|
||||
import * as os from 'node:os';
|
||||
import * as https from 'node:https';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { EXTENSIONS_CONFIG_FILENAME, loadExtension } from '../extension.js';
|
||||
import { EXTENSIONS_CONFIG_FILENAME } from './variables.js';
|
||||
import * as tar from 'tar';
|
||||
import extract from 'extract-zip';
|
||||
import {
|
||||
ExtensionUpdateState,
|
||||
type Extension,
|
||||
type ExtensionConfig,
|
||||
type ExtensionManager,
|
||||
} from './extensionManager.js';
|
||||
import type { ExtensionInstallMetadata } from '../config/config.js';
|
||||
|
||||
interface GithubReleaseData {
|
||||
assets: Asset[];
|
||||
tag_name: string;
|
||||
tarball_url?: string;
|
||||
zipball_url?: string;
|
||||
}
|
||||
|
||||
interface Asset {
|
||||
name: string;
|
||||
browser_download_url: string;
|
||||
}
|
||||
|
||||
export interface GitHubDownloadResult {
|
||||
tagName: string;
|
||||
type: 'git' | 'github-release';
|
||||
}
|
||||
|
||||
function getGitHubToken(): string | undefined {
|
||||
return process.env['GITHUB_TOKEN'];
|
||||
@@ -115,38 +134,40 @@ async function fetchReleaseFromGithub(
|
||||
}
|
||||
|
||||
export async function checkForExtensionUpdate(
|
||||
extension: GeminiCLIExtension,
|
||||
setExtensionUpdateState: (updateState: ExtensionUpdateState) => void,
|
||||
cwd: string = process.cwd(),
|
||||
): Promise<void> {
|
||||
setExtensionUpdateState(ExtensionUpdateState.CHECKING_FOR_UPDATES);
|
||||
extension: Extension,
|
||||
extensionManager: ExtensionManager,
|
||||
): Promise<ExtensionUpdateState> {
|
||||
const installMetadata = extension.installMetadata;
|
||||
if (installMetadata?.type === 'local') {
|
||||
const newExtension = loadExtension({
|
||||
extensionDir: installMetadata.source,
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
if (!newExtension) {
|
||||
let latestConfig: ExtensionConfig | undefined;
|
||||
try {
|
||||
latestConfig = extensionManager.loadExtensionConfig({
|
||||
extensionDir: installMetadata.source,
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(
|
||||
`Failed to check for update for local extension "${extension.name}". Could not load extension from source path: ${installMetadata.source}. Error: ${getErrorMessage(e)}`,
|
||||
);
|
||||
return ExtensionUpdateState.NOT_UPDATABLE;
|
||||
}
|
||||
|
||||
if (!latestConfig) {
|
||||
console.error(
|
||||
`Failed to check for update for local extension "${extension.name}". Could not load extension from source path: ${installMetadata.source}`,
|
||||
);
|
||||
setExtensionUpdateState(ExtensionUpdateState.ERROR);
|
||||
return;
|
||||
return ExtensionUpdateState.NOT_UPDATABLE;
|
||||
}
|
||||
if (newExtension.config.version !== extension.version) {
|
||||
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
return;
|
||||
if (latestConfig.version !== extension.version) {
|
||||
return ExtensionUpdateState.UPDATE_AVAILABLE;
|
||||
}
|
||||
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
|
||||
return;
|
||||
return ExtensionUpdateState.UP_TO_DATE;
|
||||
}
|
||||
if (
|
||||
!installMetadata ||
|
||||
(installMetadata.type !== 'git' &&
|
||||
installMetadata.type !== 'github-release')
|
||||
) {
|
||||
setExtensionUpdateState(ExtensionUpdateState.NOT_UPDATABLE);
|
||||
return;
|
||||
return ExtensionUpdateState.NOT_UPDATABLE;
|
||||
}
|
||||
try {
|
||||
if (installMetadata.type === 'git') {
|
||||
@@ -154,14 +175,12 @@ export async function checkForExtensionUpdate(
|
||||
const remotes = await git.getRemotes(true);
|
||||
if (remotes.length === 0) {
|
||||
console.error('No git remotes found.');
|
||||
setExtensionUpdateState(ExtensionUpdateState.ERROR);
|
||||
return;
|
||||
return ExtensionUpdateState.ERROR;
|
||||
}
|
||||
const remoteUrl = remotes[0].refs.fetch;
|
||||
if (!remoteUrl) {
|
||||
console.error(`No fetch URL found for git remote ${remotes[0].name}.`);
|
||||
setExtensionUpdateState(ExtensionUpdateState.ERROR);
|
||||
return;
|
||||
return ExtensionUpdateState.ERROR;
|
||||
}
|
||||
|
||||
// Determine the ref to check on the remote.
|
||||
@@ -171,8 +190,7 @@ export async function checkForExtensionUpdate(
|
||||
|
||||
if (typeof lsRemoteOutput !== 'string' || lsRemoteOutput.trim() === '') {
|
||||
console.error(`Git ref ${refToCheck} not found.`);
|
||||
setExtensionUpdateState(ExtensionUpdateState.ERROR);
|
||||
return;
|
||||
return ExtensionUpdateState.ERROR;
|
||||
}
|
||||
|
||||
const remoteHash = lsRemoteOutput.split('\t')[0];
|
||||
@@ -182,21 +200,17 @@ export async function checkForExtensionUpdate(
|
||||
console.error(
|
||||
`Unable to parse hash from git ls-remote output "${lsRemoteOutput}"`,
|
||||
);
|
||||
setExtensionUpdateState(ExtensionUpdateState.ERROR);
|
||||
return;
|
||||
return ExtensionUpdateState.ERROR;
|
||||
}
|
||||
if (remoteHash === localHash) {
|
||||
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
|
||||
return;
|
||||
return ExtensionUpdateState.UP_TO_DATE;
|
||||
}
|
||||
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
return;
|
||||
return ExtensionUpdateState.UPDATE_AVAILABLE;
|
||||
} else {
|
||||
const { source, releaseTag } = installMetadata;
|
||||
if (!source) {
|
||||
console.error(`No "source" provided for extension.`);
|
||||
setExtensionUpdateState(ExtensionUpdateState.ERROR);
|
||||
return;
|
||||
return ExtensionUpdateState.ERROR;
|
||||
}
|
||||
const { owner, repo } = parseGitHubRepoForReleases(source);
|
||||
|
||||
@@ -206,30 +220,28 @@ export async function checkForExtensionUpdate(
|
||||
installMetadata.ref,
|
||||
);
|
||||
if (releaseData.tag_name !== releaseTag) {
|
||||
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
return;
|
||||
return ExtensionUpdateState.UPDATE_AVAILABLE;
|
||||
}
|
||||
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
|
||||
return;
|
||||
return ExtensionUpdateState.UP_TO_DATE;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Failed to check for updates for extension "${installMetadata.source}": ${getErrorMessage(error)}`,
|
||||
);
|
||||
setExtensionUpdateState(ExtensionUpdateState.ERROR);
|
||||
return;
|
||||
return ExtensionUpdateState.ERROR;
|
||||
}
|
||||
}
|
||||
export interface GitHubDownloadResult {
|
||||
tagName: string;
|
||||
type: 'git' | 'github-release';
|
||||
}
|
||||
|
||||
export async function downloadFromGitHubRelease(
|
||||
installMetadata: ExtensionInstallMetadata,
|
||||
destination: string,
|
||||
): Promise<GitHubDownloadResult> {
|
||||
const { source, ref } = installMetadata;
|
||||
const { owner, repo } = parseGitHubRepoForReleases(source);
|
||||
const { source, ref, marketplace, type } = installMetadata;
|
||||
const { owner, repo } = parseGitHubRepoForReleases(
|
||||
type === 'marketplace' && marketplace
|
||||
? marketplace.marketplaceSource
|
||||
: source,
|
||||
);
|
||||
|
||||
try {
|
||||
const releaseData = await fetchReleaseFromGithub(owner, repo, ref);
|
||||
@@ -276,28 +288,47 @@ export async function downloadFromGitHubRelease(
|
||||
// For regular github releases, the repository is put inside of a top level
|
||||
// directory. In this case we should see exactly two file in the destination
|
||||
// dir, the archive and the directory. If we see that, validate that the
|
||||
// dir has a qwen extension configuration file and then move all files
|
||||
// from the directory up one level into the destination directory.
|
||||
// dir has a qwen extension configuration file (or gemini-extension.json
|
||||
// which will be converted later) and then move all files from the directory
|
||||
// up one level into the destination directory.
|
||||
const entries = await fs.promises.readdir(destination, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
if (entries.length === 2) {
|
||||
const lonelyDir = entries.find((entry) => entry.isDirectory());
|
||||
if (
|
||||
lonelyDir &&
|
||||
fs.existsSync(
|
||||
if (lonelyDir) {
|
||||
const hasQwenConfig = fs.existsSync(
|
||||
path.join(destination, lonelyDir.name, EXTENSIONS_CONFIG_FILENAME),
|
||||
)
|
||||
) {
|
||||
const dirPathToExtract = path.join(destination, lonelyDir.name);
|
||||
const extractedDirFiles = await fs.promises.readdir(dirPathToExtract);
|
||||
for (const file of extractedDirFiles) {
|
||||
await fs.promises.rename(
|
||||
path.join(dirPathToExtract, file),
|
||||
path.join(destination, file),
|
||||
);
|
||||
);
|
||||
const hasGeminiConfig = fs.existsSync(
|
||||
path.join(destination, lonelyDir.name, 'gemini-extension.json'),
|
||||
);
|
||||
const hasMarketplaceConfig = fs.existsSync(
|
||||
path.join(
|
||||
destination,
|
||||
lonelyDir.name,
|
||||
'.claude-plugin/marketplace.json',
|
||||
),
|
||||
);
|
||||
const hasClaudePluginConfig = fs.existsSync(
|
||||
path.join(destination, lonelyDir.name, '.claude-plugin/plugin.json'),
|
||||
);
|
||||
if (
|
||||
hasQwenConfig ||
|
||||
hasGeminiConfig ||
|
||||
hasMarketplaceConfig ||
|
||||
hasClaudePluginConfig
|
||||
) {
|
||||
const dirPathToExtract = path.join(destination, lonelyDir.name);
|
||||
const extractedDirFiles = await fs.promises.readdir(dirPathToExtract);
|
||||
for (const file of extractedDirFiles) {
|
||||
await fs.promises.rename(
|
||||
path.join(dirPathToExtract, file),
|
||||
path.join(destination, file),
|
||||
);
|
||||
}
|
||||
await fs.promises.rmdir(dirPathToExtract);
|
||||
}
|
||||
await fs.promises.rmdir(dirPathToExtract);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -313,18 +344,6 @@ export async function downloadFromGitHubRelease(
|
||||
}
|
||||
}
|
||||
|
||||
interface GithubReleaseData {
|
||||
assets: Asset[];
|
||||
tag_name: string;
|
||||
tarball_url?: string;
|
||||
zipball_url?: string;
|
||||
}
|
||||
|
||||
interface Asset {
|
||||
name: string;
|
||||
browser_download_url: string;
|
||||
}
|
||||
|
||||
export function findReleaseAsset(assets: Asset[]): Asset | undefined {
|
||||
const platform = os.platform();
|
||||
const arch = os.arch();
|
||||
3
packages/core/src/extension/index.ts
Normal file
3
packages/core/src/extension/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './extensionManager.js';
|
||||
export * from './variables.js';
|
||||
export * from './github.js';
|
||||
78
packages/core/src/extension/marketplace.test.ts
Normal file
78
packages/core/src/extension/marketplace.test.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseMarketplaceSource } from './marketplace.js';
|
||||
|
||||
describe('Marketplace Installation', () => {
|
||||
describe('parseMarketplaceSource', () => {
|
||||
it('should parse valid marketplace source with http URL', () => {
|
||||
const result = parseMarketplaceSource(
|
||||
'http://example.com/marketplace:my-plugin',
|
||||
);
|
||||
expect(result).toEqual({
|
||||
marketplaceSource: 'http://example.com/marketplace',
|
||||
pluginName: 'my-plugin',
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse valid marketplace source with https URL', () => {
|
||||
const result = parseMarketplaceSource(
|
||||
'https://github.com/example/marketplace:awesome-plugin',
|
||||
);
|
||||
expect(result).toEqual({
|
||||
marketplaceSource: 'https://github.com/example/marketplace',
|
||||
pluginName: 'awesome-plugin',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle plugin names with hyphens', () => {
|
||||
const result = parseMarketplaceSource(
|
||||
'https://example.com:my-super-plugin',
|
||||
);
|
||||
expect(result).toEqual({
|
||||
marketplaceSource: 'https://example.com',
|
||||
pluginName: 'my-super-plugin',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle URLs with ports', () => {
|
||||
const result = parseMarketplaceSource(
|
||||
'https://example.com:8080/marketplace:plugin',
|
||||
);
|
||||
expect(result).toEqual({
|
||||
marketplaceSource: 'https://example.com:8080/marketplace',
|
||||
pluginName: 'plugin',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return null for source without colon separator', () => {
|
||||
const result = parseMarketplaceSource('https://example.com/plugin');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for source without URL', () => {
|
||||
const result = parseMarketplaceSource('not-a-url:plugin');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for source with empty plugin name', () => {
|
||||
const result = parseMarketplaceSource('https://example.com:');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should use last colon as separator', () => {
|
||||
// URLs with ports have colons, should use the last one
|
||||
const result = parseMarketplaceSource(
|
||||
'https://example.com:8080:my-plugin',
|
||||
);
|
||||
expect(result).toEqual({
|
||||
marketplaceSource: 'https://example.com:8080',
|
||||
pluginName: 'my-plugin',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
61
packages/core/src/extension/marketplace.ts
Normal file
61
packages/core/src/extension/marketplace.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* This module handles installation of extensions from Claude marketplaces.
|
||||
*
|
||||
* A marketplace URL format: marketplace-url:plugin-name
|
||||
* Example: https://github.com/example/marketplace:my-plugin
|
||||
*/
|
||||
|
||||
import type { ExtensionConfig } from './extensionManager.js';
|
||||
import type { ExtensionInstallMetadata } from '../config/config.js';
|
||||
|
||||
export interface MarketplaceInstallOptions {
|
||||
marketplaceUrl: string;
|
||||
pluginName: string;
|
||||
tempDir: string;
|
||||
requestConsent: (consent: string) => Promise<boolean>;
|
||||
}
|
||||
|
||||
export interface MarketplaceInstallResult {
|
||||
config: ExtensionConfig;
|
||||
sourcePath: string;
|
||||
installMetadata: ExtensionInstallMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse marketplace install source string.
|
||||
* Format: marketplace-url:plugin-name
|
||||
*/
|
||||
export function parseMarketplaceSource(source: string): {
|
||||
marketplaceSource: string;
|
||||
pluginName: string;
|
||||
} | null {
|
||||
// Check if source contains a colon separator
|
||||
const lastColonIndex = source.lastIndexOf(':');
|
||||
if (lastColonIndex === -1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Split at the last colon to separate URL from plugin name
|
||||
const marketplaceSource = source.substring(0, lastColonIndex);
|
||||
const pluginName = source.substring(lastColonIndex + 1);
|
||||
|
||||
// Validate that marketplace URL looks like a URL
|
||||
if (
|
||||
!marketplaceSource.startsWith('http://') &&
|
||||
!marketplaceSource.startsWith('https://')
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!pluginName || pluginName.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return { marketplaceSource, pluginName };
|
||||
}
|
||||
110
packages/core/src/extension/override.test.ts
Normal file
110
packages/core/src/extension/override.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { Override } from './override.js';
|
||||
|
||||
describe('Override', () => {
|
||||
it('should create an override from input', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
expect(override.baseRule).toBe(`/path/to/dir/`);
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(true);
|
||||
});
|
||||
|
||||
it('should create a disable override from input', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.baseRule).toBe(`/path/to/dir/`);
|
||||
expect(override.isDisable).toBe(true);
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create an override from a file rule', () => {
|
||||
const override = Override.fromFileRule('/path/to/dir');
|
||||
expect(override.baseRule).toBe('/path/to/dir');
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create a disable override from a file rule', () => {
|
||||
const override = Override.fromFileRule('!/path/to/dir/');
|
||||
expect(override.isDisable).toBe(true);
|
||||
expect(override.baseRule).toBe('/path/to/dir/');
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create an override with subdirs from a file rule', () => {
|
||||
const override = Override.fromFileRule('/path/to/dir/*');
|
||||
expect(override.baseRule).toBe('/path/to/dir/');
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify conflicting overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/dir', false);
|
||||
expect(override1.conflictsWith(override2)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify non-conflicting overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/another/dir', true);
|
||||
expect(override1.conflictsWith(override2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly identify equal overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/dir', true);
|
||||
expect(override1.isEqualTo(override2)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify unequal overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('!/path/to/dir', true);
|
||||
expect(override1.isEqualTo(override2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should generate the correct regex', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
const regex = override.asRegex();
|
||||
expect(regex.test('/path/to/dir/')).toBe(true);
|
||||
expect(regex.test('/path/to/dir/subdir')).toBe(true);
|
||||
expect(regex.test('/path/to/another/dir')).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly identify child overrides', () => {
|
||||
const parent = Override.fromInput('/path/to/dir', true);
|
||||
const child = Override.fromInput('/path/to/dir/subdir', false);
|
||||
expect(child.isChildOf(parent)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify child overrides with glob', () => {
|
||||
const parent = Override.fromInput('/path/to/dir/*', true);
|
||||
const child = Override.fromInput('/path/to/dir/subdir', false);
|
||||
expect(child.isChildOf(parent)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify non-child overrides', () => {
|
||||
const parent = Override.fromInput('/path/to/dir', true);
|
||||
const other = Override.fromInput('/path/to/another/dir', false);
|
||||
expect(other.isChildOf(parent)).toBe(false);
|
||||
});
|
||||
|
||||
it('should generate the correct output string', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
expect(override.output()).toBe(`/path/to/dir/*`);
|
||||
});
|
||||
|
||||
it('should generate the correct output string for a disable override', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.output()).toBe(`!/path/to/dir/`);
|
||||
});
|
||||
|
||||
it('should disable a path based on a disable override rule', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.output()).toBe(`!/path/to/dir/`);
|
||||
});
|
||||
});
|
||||
102
packages/core/src/extension/override.ts
Normal file
102
packages/core/src/extension/override.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
export interface ExtensionEnablementConfig {
|
||||
overrides: string[];
|
||||
}
|
||||
|
||||
export interface AllExtensionsEnablementConfig {
|
||||
[extensionName: string]: ExtensionEnablementConfig;
|
||||
}
|
||||
|
||||
export class Override {
|
||||
constructor(
|
||||
public baseRule: string,
|
||||
public isDisable: boolean,
|
||||
public includeSubdirs: boolean,
|
||||
) {}
|
||||
|
||||
static fromInput(inputRule: string, includeSubdirs: boolean): Override {
|
||||
const isDisable = inputRule.startsWith('!');
|
||||
let baseRule = isDisable ? inputRule.substring(1) : inputRule;
|
||||
baseRule = ensureLeadingAndTrailingSlash(baseRule);
|
||||
return new Override(baseRule, isDisable, includeSubdirs);
|
||||
}
|
||||
|
||||
static fromFileRule(fileRule: string): Override {
|
||||
const isDisable = fileRule.startsWith('!');
|
||||
let baseRule = isDisable ? fileRule.substring(1) : fileRule;
|
||||
const includeSubdirs = baseRule.endsWith('*');
|
||||
baseRule = includeSubdirs
|
||||
? baseRule.substring(0, baseRule.length - 1)
|
||||
: baseRule;
|
||||
return new Override(baseRule, isDisable, includeSubdirs);
|
||||
}
|
||||
|
||||
conflictsWith(other: Override): boolean {
|
||||
if (this.baseRule === other.baseRule) {
|
||||
return (
|
||||
this.includeSubdirs !== other.includeSubdirs ||
|
||||
this.isDisable !== other.isDisable
|
||||
);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
isEqualTo(other: Override): boolean {
|
||||
return (
|
||||
this.baseRule === other.baseRule &&
|
||||
this.includeSubdirs === other.includeSubdirs &&
|
||||
this.isDisable === other.isDisable
|
||||
);
|
||||
}
|
||||
|
||||
asRegex(): RegExp {
|
||||
return globToRegex(`${this.baseRule}${this.includeSubdirs ? '*' : ''}`);
|
||||
}
|
||||
|
||||
isChildOf(parent: Override) {
|
||||
if (!parent.includeSubdirs) {
|
||||
return false;
|
||||
}
|
||||
return parent.asRegex().test(this.baseRule);
|
||||
}
|
||||
|
||||
output(): string {
|
||||
return `${this.isDisable ? '!' : ''}${this.baseRule}${this.includeSubdirs ? '*' : ''}`;
|
||||
}
|
||||
|
||||
matchesPath(path: string) {
|
||||
return this.asRegex().test(path);
|
||||
}
|
||||
}
|
||||
|
||||
const ensureLeadingAndTrailingSlash = function (dirPath: string): string {
|
||||
// Normalize separators to forward slashes for consistent matching across platforms.
|
||||
let result = dirPath.replace(/\\/g, '/');
|
||||
if (result.charAt(0) !== '/') {
|
||||
result = '/' + result;
|
||||
}
|
||||
if (result.charAt(result.length - 1) !== '/') {
|
||||
result = result + '/';
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts a glob pattern to a RegExp object.
|
||||
* This is a simplified implementation that supports `*`.
|
||||
*
|
||||
* @param glob The glob pattern to convert.
|
||||
* @returns A RegExp object.
|
||||
*/
|
||||
function globToRegex(glob: string): RegExp {
|
||||
const regexString = glob
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex characters
|
||||
.replace(/(\/?)\*/g, '($1.*)?'); // Convert * to optional group
|
||||
|
||||
return new RegExp(`^${regexString}$`);
|
||||
}
|
||||
138
packages/core/src/extension/settings.test.ts
Normal file
138
packages/core/src/extension/settings.test.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseEnvFile, generateEnvFile, validateSettings } from './settings.js';
|
||||
import type { ExtensionSetting } from './extensionManager.js';
|
||||
|
||||
describe('Extension Settings', () => {
|
||||
describe('parseEnvFile', () => {
|
||||
it('should parse simple KEY=VALUE pairs', () => {
|
||||
const content = 'API_KEY=abc123\nSERVER_URL=http://example.com';
|
||||
const result = parseEnvFile(content);
|
||||
expect(result).toEqual({
|
||||
API_KEY: 'abc123',
|
||||
SERVER_URL: 'http://example.com',
|
||||
});
|
||||
});
|
||||
|
||||
it('should skip empty lines and comments', () => {
|
||||
const content = `
|
||||
# This is a comment
|
||||
API_KEY=secret
|
||||
|
||||
# Another comment
|
||||
DEBUG=true
|
||||
`;
|
||||
const result = parseEnvFile(content);
|
||||
expect(result).toEqual({
|
||||
API_KEY: 'secret',
|
||||
DEBUG: 'true',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle quoted values', () => {
|
||||
const content = `API_KEY="my secret key"\nPATH='/usr/local/bin'`;
|
||||
const result = parseEnvFile(content);
|
||||
expect(result).toEqual({
|
||||
API_KEY: 'my secret key',
|
||||
PATH: '/usr/local/bin',
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore invalid lines', () => {
|
||||
const content = 'VALID=value\nINVALID LINE\nANOTHER=valid';
|
||||
const result = parseEnvFile(content);
|
||||
expect(result).toEqual({
|
||||
VALID: 'value',
|
||||
ANOTHER: 'valid',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateEnvFile', () => {
|
||||
it('should generate properly formatted .env content', () => {
|
||||
const settings = {
|
||||
API_KEY: 'secret123',
|
||||
DEBUG: 'true',
|
||||
};
|
||||
const result = generateEnvFile(settings);
|
||||
expect(result).toContain('API_KEY=secret123');
|
||||
expect(result).toContain('DEBUG=true');
|
||||
expect(result).toContain('# Extension Settings');
|
||||
});
|
||||
|
||||
it('should quote values with spaces', () => {
|
||||
const settings = {
|
||||
MESSAGE: 'Hello World',
|
||||
PATH: '/usr/local/bin',
|
||||
};
|
||||
const result = generateEnvFile(settings);
|
||||
expect(result).toContain('MESSAGE="Hello World"');
|
||||
expect(result).toContain('PATH=/usr/local/bin');
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateSettings', () => {
|
||||
it('should pass validation for valid string settings', () => {
|
||||
const settingsConfig: ExtensionSetting[] = [
|
||||
{
|
||||
name: 'API Key',
|
||||
description: 'Your API key for the service',
|
||||
envVar: 'API_KEY',
|
||||
},
|
||||
];
|
||||
const settings = { API_KEY: 'my-key' };
|
||||
const errors = validateSettings(settings, settingsConfig);
|
||||
expect(errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should fail validation for non-string values', () => {
|
||||
const settingsConfig: ExtensionSetting[] = [
|
||||
{
|
||||
name: 'API Key',
|
||||
description: 'Your API key for the service',
|
||||
envVar: 'API_KEY',
|
||||
},
|
||||
];
|
||||
// In TypeScript, this would be caught at compile time,
|
||||
// but at runtime we check the type
|
||||
const settings = { API_KEY: 123 as unknown as string };
|
||||
const errors = validateSettings(settings, settingsConfig);
|
||||
expect(errors).toHaveLength(1);
|
||||
expect(errors[0]).toContain('API Key');
|
||||
expect(errors[0]).toContain('string');
|
||||
});
|
||||
|
||||
it('should allow undefined/missing settings (all settings are optional)', () => {
|
||||
const settingsConfig: ExtensionSetting[] = [
|
||||
{
|
||||
name: 'Optional Setting',
|
||||
description: 'An optional setting',
|
||||
envVar: 'OPTIONAL_VAR',
|
||||
},
|
||||
];
|
||||
|
||||
const settings = {};
|
||||
const errors = validateSettings(settings, settingsConfig);
|
||||
expect(errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should validate sensitive settings the same way', () => {
|
||||
const settingsConfig: ExtensionSetting[] = [
|
||||
{
|
||||
name: 'Secret Key',
|
||||
description: 'Your secret key',
|
||||
envVar: 'SECRET_KEY',
|
||||
sensitive: true,
|
||||
},
|
||||
];
|
||||
|
||||
const validSettings = { SECRET_KEY: 'super-secret' };
|
||||
expect(validateSettings(validSettings, settingsConfig)).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
149
packages/core/src/extension/settings.ts
Normal file
149
packages/core/src/extension/settings.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* This module handles extension settings management.
|
||||
* Settings are stored in .env files within extension directories.
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import type { ExtensionSetting } from './extensionManager.js';
|
||||
|
||||
/**
|
||||
* Parse .env file content into key-value pairs.
|
||||
* Simple parser that handles:
|
||||
* - KEY=VALUE format
|
||||
* - Comments starting with #
|
||||
* - Empty lines
|
||||
*/
|
||||
export function parseEnvFile(content: string): Record<string, string> {
|
||||
const result: Record<string, string> = {};
|
||||
const lines = content.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
|
||||
// Skip empty lines and comments
|
||||
if (!trimmed || trimmed.startsWith('#')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse KEY=VALUE
|
||||
const equalIndex = trimmed.indexOf('=');
|
||||
if (equalIndex === -1) {
|
||||
continue; // Invalid line, skip
|
||||
}
|
||||
|
||||
const key = trimmed.substring(0, equalIndex).trim();
|
||||
const value = trimmed.substring(equalIndex + 1).trim();
|
||||
|
||||
// Remove quotes if present
|
||||
let cleanValue = value;
|
||||
if (
|
||||
(value.startsWith('"') && value.endsWith('"')) ||
|
||||
(value.startsWith("'") && value.endsWith("'"))
|
||||
) {
|
||||
cleanValue = value.substring(1, value.length - 1);
|
||||
}
|
||||
|
||||
result[key] = cleanValue;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate .env file content from key-value pairs.
|
||||
*/
|
||||
export function generateEnvFile(settings: Record<string, string>): string {
|
||||
const lines: string[] = [];
|
||||
|
||||
lines.push('# Extension Settings');
|
||||
lines.push('# Generated by Qwen Code');
|
||||
lines.push('');
|
||||
|
||||
for (const [key, value] of Object.entries(settings)) {
|
||||
// Quote values that contain spaces
|
||||
const quotedValue = value.includes(' ') ? `"${value}"` : value;
|
||||
lines.push(`${key}=${quotedValue}`);
|
||||
}
|
||||
|
||||
return lines.join('\n') + '\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* Load settings from extension .env file.
|
||||
*/
|
||||
export async function loadExtensionSettings(
|
||||
extensionPath: string,
|
||||
): Promise<Record<string, string>> {
|
||||
const envPath = path.join(extensionPath, '.env');
|
||||
|
||||
try {
|
||||
const content = await fs.promises.readFile(envPath, 'utf-8');
|
||||
return parseEnvFile(content);
|
||||
} catch (error) {
|
||||
// If .env file doesn't exist, return empty object
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save settings to extension .env file.
|
||||
*/
|
||||
export async function saveExtensionSettings(
|
||||
extensionPath: string,
|
||||
settings: Record<string, string>,
|
||||
): Promise<void> {
|
||||
const envPath = path.join(extensionPath, '.env');
|
||||
const content = generateEnvFile(settings);
|
||||
await fs.promises.writeFile(envPath, content, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate settings against configuration.
|
||||
* Returns array of validation errors (empty if valid).
|
||||
*
|
||||
* Note: This validates that environment variables are properly set.
|
||||
* In Gemini Extension format, all settings are treated as strings.
|
||||
*/
|
||||
export function validateSettings(
|
||||
settings: Record<string, string>,
|
||||
settingsConfig: ExtensionSetting[],
|
||||
): string[] {
|
||||
const errors: string[] = [];
|
||||
|
||||
for (const config of settingsConfig) {
|
||||
const value = settings[config.envVar];
|
||||
|
||||
// Basic validation - check if value exists and is not empty
|
||||
// Note: All settings are optional in Gemini Extension format
|
||||
if (value !== undefined && typeof value !== 'string') {
|
||||
errors.push(
|
||||
`Setting "${config.name}" (${config.envVar}) must be a string`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge extension settings into process environment.
|
||||
* This allows MCP servers and other extension components to access settings.
|
||||
*/
|
||||
export function applySettingsToEnv(settings: Record<string, string>): void {
|
||||
for (const [key, value] of Object.entries(settings)) {
|
||||
// Only set if not already defined in process.env
|
||||
if (process.env[key] === undefined) {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
101
packages/core/src/extension/storage.test.ts
Normal file
101
packages/core/src/extension/storage.test.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { ExtensionStorage } from './storage.js';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import * as fs from 'node:fs';
|
||||
import {
|
||||
EXTENSION_SETTINGS_FILENAME,
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
} from './variables.js';
|
||||
import { Storage } from '../config/storage.js';
|
||||
|
||||
vi.mock('node:os');
|
||||
vi.mock('node:fs', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof fs>();
|
||||
return {
|
||||
...actual,
|
||||
promises: {
|
||||
...actual.promises,
|
||||
mkdtemp: vi.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
vi.mock('@google/gemini-cli-core');
|
||||
|
||||
describe('ExtensionStorage', () => {
|
||||
const mockHomeDir = '/mock/home';
|
||||
const extensionName = 'test-extension';
|
||||
let storage: ExtensionStorage;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(os.homedir).mockReturnValue(mockHomeDir);
|
||||
vi.mocked(Storage).mockImplementation(
|
||||
() =>
|
||||
({
|
||||
getExtensionsDir: () =>
|
||||
path.join(mockHomeDir, '.gemini', 'extensions'),
|
||||
}) as any, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
);
|
||||
storage = new ExtensionStorage(extensionName);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should return the correct extension directory', () => {
|
||||
const expectedDir = path.join(
|
||||
mockHomeDir,
|
||||
'.gemini',
|
||||
'extensions',
|
||||
extensionName,
|
||||
);
|
||||
expect(storage.getExtensionDir()).toBe(expectedDir);
|
||||
});
|
||||
|
||||
it('should return the correct config path', () => {
|
||||
const expectedPath = path.join(
|
||||
mockHomeDir,
|
||||
'.gemini',
|
||||
'extensions',
|
||||
extensionName,
|
||||
EXTENSIONS_CONFIG_FILENAME, // EXTENSIONS_CONFIG_FILENAME
|
||||
);
|
||||
expect(storage.getConfigPath()).toBe(expectedPath);
|
||||
});
|
||||
|
||||
it('should return the correct env file path', () => {
|
||||
const expectedPath = path.join(
|
||||
mockHomeDir,
|
||||
'.gemini',
|
||||
'extensions',
|
||||
extensionName,
|
||||
EXTENSION_SETTINGS_FILENAME, // EXTENSION_SETTINGS_FILENAME
|
||||
);
|
||||
expect(storage.getEnvFilePath()).toBe(expectedPath);
|
||||
});
|
||||
|
||||
it('should return the correct user extensions directory', () => {
|
||||
const expectedDir = path.join(mockHomeDir, '.gemini', 'extensions');
|
||||
expect(ExtensionStorage.getUserExtensionsDir()).toBe(expectedDir);
|
||||
});
|
||||
|
||||
it('should create a temporary directory', async () => {
|
||||
const mockTmpDir = '/tmp/gemini-extension-123';
|
||||
vi.mocked(fs.promises.mkdtemp).mockResolvedValue(mockTmpDir);
|
||||
vi.mocked(os.tmpdir).mockReturnValue('/tmp');
|
||||
|
||||
const result = await ExtensionStorage.createTmpDir();
|
||||
|
||||
expect(fs.promises.mkdtemp).toHaveBeenCalledWith(
|
||||
path.join('/tmp', 'gemini-extension'),
|
||||
);
|
||||
expect(result).toBe(mockTmpDir);
|
||||
});
|
||||
});
|
||||
40
packages/core/src/extension/storage.ts
Normal file
40
packages/core/src/extension/storage.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { Storage } from '../config/storage.js';
|
||||
import path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import {
|
||||
EXTENSION_SETTINGS_FILENAME,
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
} from './variables.js';
|
||||
import * as fs from 'node:fs';
|
||||
|
||||
export class ExtensionStorage {
|
||||
private readonly extensionName: string;
|
||||
|
||||
constructor(extensionName: string) {
|
||||
this.extensionName = extensionName;
|
||||
}
|
||||
|
||||
getExtensionDir(): string {
|
||||
return path.join(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
this.extensionName,
|
||||
);
|
||||
}
|
||||
|
||||
getConfigPath(): string {
|
||||
return path.join(this.getExtensionDir(), EXTENSIONS_CONFIG_FILENAME);
|
||||
}
|
||||
|
||||
getEnvFilePath(): string {
|
||||
return path.join(this.getExtensionDir(), EXTENSION_SETTINGS_FILENAME);
|
||||
}
|
||||
|
||||
static getUserExtensionsDir(): string {
|
||||
const storage = new Storage(os.homedir());
|
||||
return storage.getExtensionsDir();
|
||||
}
|
||||
|
||||
static async createTmpDir(): Promise<string> {
|
||||
return await fs.promises.mkdtemp(path.join(os.tmpdir(), 'qwen-extension'));
|
||||
}
|
||||
}
|
||||
@@ -17,7 +17,7 @@ export interface VariableSchema {
|
||||
|
||||
export interface LoadExtensionContext {
|
||||
extensionDir: string;
|
||||
workspaceDir: string;
|
||||
workspaceDir?: string;
|
||||
}
|
||||
|
||||
const PATH_SEPARATOR_DEFINITION = {
|
||||
@@ -30,6 +30,10 @@ export const VARIABLE_SCHEMA = {
|
||||
type: 'string',
|
||||
description: 'The path of the extension in the filesystem.',
|
||||
},
|
||||
CLAUDE_PLUGIN_ROOT: {
|
||||
type: 'string',
|
||||
description: 'The path of the extension in the filesystem.',
|
||||
},
|
||||
workspacePath: {
|
||||
type: 'string',
|
||||
description: 'The absolute path of the current workspace.',
|
||||
@@ -5,6 +5,13 @@
|
||||
*/
|
||||
|
||||
import { type VariableSchema, VARIABLE_SCHEMA } from './variableSchema.js';
|
||||
import path from 'node:path';
|
||||
import { QWEN_DIR } from '../config/storage.js';
|
||||
|
||||
export const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
|
||||
export const EXTENSIONS_CONFIG_FILENAME = 'qwen-extension.json';
|
||||
export const INSTALL_METADATA_FILENAME = '.qwen-extension-install.json';
|
||||
export const EXTENSION_SETTINGS_FILENAME = '.env';
|
||||
|
||||
export type JsonObject = { [key: string]: JsonValue };
|
||||
export type JsonArray = JsonValue[];
|
||||
@@ -76,6 +76,8 @@ export * from './utils/subagentGenerator.js';
|
||||
export * from './utils/projectSummary.js';
|
||||
export * from './utils/promptIdContext.js';
|
||||
export * from './utils/thoughtUtils.js';
|
||||
export * from './utils/toml-to-markdown-converter.js';
|
||||
export * from './utils/yaml-parser.js';
|
||||
|
||||
// Config resolution utilities
|
||||
export * from './utils/configResolver.js';
|
||||
@@ -109,6 +111,9 @@ export * from './subagents/index.js';
|
||||
// Export skills
|
||||
export * from './skills/index.js';
|
||||
|
||||
// Export extension
|
||||
export * from './extension/index.js';
|
||||
|
||||
// Export prompt logic
|
||||
export * from './prompts/mcp-prompts.js';
|
||||
|
||||
@@ -141,6 +146,7 @@ export type {
|
||||
OAuthCredentials,
|
||||
} from './mcp/token-storage/types.js';
|
||||
export { MCPOAuthTokenStorage } from './mcp/oauth-token-storage.js';
|
||||
export { KeychainTokenStorage } from './mcp/token-storage/keychain-token-storage.js';
|
||||
export type { MCPOAuthConfig } from './mcp/oauth-provider.js';
|
||||
export type {
|
||||
OAuthAuthorizationServerMetadata,
|
||||
|
||||
@@ -22,6 +22,7 @@ interface Keytar {
|
||||
}
|
||||
|
||||
const KEYCHAIN_TEST_PREFIX = '__keychain_test__';
|
||||
const SECRET_PREFIX = '__secret__';
|
||||
|
||||
export class KeychainTokenStorage extends BaseTokenStorage {
|
||||
private keychainAvailable: boolean | null = null;
|
||||
@@ -137,6 +138,7 @@ export class KeychainTokenStorage extends BaseTokenStorage {
|
||||
const credentials = await keytar.findCredentials(this.serviceName);
|
||||
return credentials
|
||||
.filter((cred) => !cred.account.startsWith(KEYCHAIN_TEST_PREFIX))
|
||||
.filter((cred) => !cred.account.startsWith(SECRET_PREFIX))
|
||||
.map((cred: { account: string }) => cred.account);
|
||||
} catch (error) {
|
||||
console.error('Failed to list servers from keychain:', error);
|
||||
@@ -156,9 +158,9 @@ export class KeychainTokenStorage extends BaseTokenStorage {
|
||||
|
||||
const result = new Map<string, OAuthCredentials>();
|
||||
try {
|
||||
const credentials = (
|
||||
await keytar.findCredentials(this.serviceName)
|
||||
).filter((c) => !c.account.startsWith(KEYCHAIN_TEST_PREFIX));
|
||||
const credentials = (await keytar.findCredentials(this.serviceName))
|
||||
.filter((c) => !c.account.startsWith(KEYCHAIN_TEST_PREFIX))
|
||||
.filter((c) => !c.account.startsWith(SECRET_PREFIX));
|
||||
|
||||
for (const cred of credentials) {
|
||||
try {
|
||||
@@ -248,4 +250,62 @@ export class KeychainTokenStorage extends BaseTokenStorage {
|
||||
async isAvailable(): Promise<boolean> {
|
||||
return this.checkKeychainAvailability();
|
||||
}
|
||||
|
||||
async setSecret(key: string, value: string): Promise<void> {
|
||||
if (!(await this.checkKeychainAvailability())) {
|
||||
throw new Error('Keychain is not available');
|
||||
}
|
||||
const keytar = await this.getKeytar();
|
||||
if (!keytar) {
|
||||
throw new Error('Keytar module not available');
|
||||
}
|
||||
await keytar.setPassword(this.serviceName, `${SECRET_PREFIX}${key}`, value);
|
||||
}
|
||||
|
||||
async getSecret(key: string): Promise<string | null> {
|
||||
if (!(await this.checkKeychainAvailability())) {
|
||||
throw new Error('Keychain is not available');
|
||||
}
|
||||
const keytar = await this.getKeytar();
|
||||
if (!keytar) {
|
||||
throw new Error('Keytar module not available');
|
||||
}
|
||||
return keytar.getPassword(this.serviceName, `${SECRET_PREFIX}${key}`);
|
||||
}
|
||||
|
||||
async deleteSecret(key: string): Promise<void> {
|
||||
if (!(await this.checkKeychainAvailability())) {
|
||||
throw new Error('Keychain is not available');
|
||||
}
|
||||
const keytar = await this.getKeytar();
|
||||
if (!keytar) {
|
||||
throw new Error('Keytar module not available');
|
||||
}
|
||||
const deleted = await keytar.deletePassword(
|
||||
this.serviceName,
|
||||
`${SECRET_PREFIX}${key}`,
|
||||
);
|
||||
if (!deleted) {
|
||||
throw new Error(`No secret found for key: ${key}`);
|
||||
}
|
||||
}
|
||||
|
||||
async listSecrets(): Promise<string[]> {
|
||||
if (!(await this.checkKeychainAvailability())) {
|
||||
throw new Error('Keychain is not available');
|
||||
}
|
||||
const keytar = await this.getKeytar();
|
||||
if (!keytar) {
|
||||
throw new Error('Keytar module not available');
|
||||
}
|
||||
try {
|
||||
const credentials = await keytar.findCredentials(this.serviceName);
|
||||
return credentials
|
||||
.filter((cred) => cred.account.startsWith(SECRET_PREFIX))
|
||||
.map((cred) => cred.account.substring(SECRET_PREFIX.length));
|
||||
} catch (error) {
|
||||
console.error('Failed to list secrets from keychain:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user