Compare commits

..

8 Commits

Author SHA1 Message Date
LaZzyMan
b37ede07e8 fix/gemini extension install error 2026-01-14 17:48:25 +08:00
LaZzyMan
0a88dd7861 fix: fix tests 2026-01-14 16:50:59 +08:00
LaZzyMan
70991e474f fix/lint error 2026-01-14 15:42:50 +08:00
LaZzyMan
551e546974 feat: move extension to core package 2026-01-14 15:30:27 +08:00
LaZzyMan
74013bd8b2 feat: settings extension 2026-01-08 13:49:59 +08:00
LaZzyMan
18713ef2b0 feat: install from gemini 2026-01-07 19:17:34 +08:00
LaZzyMan
50dac93c80 feat: migrate command format 2026-01-07 13:43:00 +08:00
LaZzyMan
22504b0a5b feat: add extension for gemini and claude 2026-01-07 11:06:17 +08:00
99 changed files with 8730 additions and 5069 deletions

View File

@@ -34,8 +34,7 @@ on:
default: false
concurrency:
# Serialize all release workflows (CLI + SDK) to avoid racing on `main` pushes.
group: 'release-main'
group: '${{ github.workflow }}'
cancel-in-progress: false
jobs:
@@ -51,6 +50,7 @@ jobs:
packages: 'write'
id-token: 'write'
issues: 'write'
pull-requests: 'write'
outputs:
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }}'
@@ -128,13 +128,12 @@ jobs:
IS_PREVIEW: '${{ steps.vars.outputs.is_preview }}'
MANUAL_VERSION: '${{ inputs.version }}'
- name: 'Set SDK package version'
- name: 'Set SDK package version (local only)'
env:
RELEASE_VERSION: '${{ steps.version.outputs.RELEASE_VERSION }}'
run: |-
# Ensure the package version matches the computed release version.
# This is required for nightly/preview because npm does not allow re-publishing the same version.
# Using --no-git-tag-version because we create tags via GitHub Release, not npm.
npm version -w @qwen-code/sdk "${RELEASE_VERSION}" --no-git-tag-version --allow-same-version
- name: 'Build CLI Bundle'
@@ -169,40 +168,37 @@ jobs:
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
- name: 'Create and switch to a release branch (stable only)'
- name: 'Build SDK'
working-directory: 'packages/sdk-typescript'
run: |-
npm run build
- name: 'Publish @qwen-code/sdk'
working-directory: 'packages/sdk-typescript'
run: |-
npm publish --access public --tag=${{ steps.version.outputs.NPM_TAG }} ${{ steps.vars.outputs.is_dry_run == 'true' && '--dry-run' || '' }}
env:
NODE_AUTH_TOKEN: '${{ secrets.NPM_TOKEN }}'
- name: 'Create and switch to a release branch'
if: |-
${{ steps.vars.outputs.is_dry_run == 'false' && steps.vars.outputs.is_nightly == 'false' && steps.vars.outputs.is_preview == 'false' }}
id: 'release_branch'
env:
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }}'
run: |-
set -euo pipefail
BRANCH_NAME="release/sdk-typescript/${RELEASE_TAG}"
# Make reruns idempotent: reuse an existing remote branch if it already exists.
if git show-ref --verify --quiet "refs/heads/${BRANCH_NAME}"; then
git switch "${BRANCH_NAME}"
elif git ls-remote --exit-code --heads origin "${BRANCH_NAME}" >/dev/null 2>&1; then
git fetch origin "${BRANCH_NAME}:${BRANCH_NAME}"
git switch "${BRANCH_NAME}"
else
git switch -c "${BRANCH_NAME}"
fi
git switch -c "${BRANCH_NAME}"
echo "BRANCH_NAME=${BRANCH_NAME}" >> "${GITHUB_OUTPUT}"
- name: 'Build SDK'
working-directory: 'packages/sdk-typescript'
run: |-
npm run build
- name: 'Commit and Push package version to release branch (stable only)'
- name: 'Commit and Push package version (stable only)'
if: |-
${{ steps.vars.outputs.is_dry_run == 'false' && steps.vars.outputs.is_nightly == 'false' && steps.vars.outputs.is_preview == 'false' }}
env:
BRANCH_NAME: '${{ steps.release_branch.outputs.BRANCH_NAME }}'
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }}'
run: |-
# Only persist version bumps after a successful publish.
git add packages/sdk-typescript/package.json package-lock.json
if git diff --staged --quiet; then
echo "No version changes to commit"
@@ -212,47 +208,9 @@ jobs:
echo "Pushing release branch to remote..."
git push --set-upstream origin "${BRANCH_NAME}" --follow-tags
- name: 'Check if @qwen-code/sdk version is already published (rerun safety)'
id: 'npm_check'
env:
RELEASE_VERSION: '${{ steps.version.outputs.RELEASE_VERSION }}'
run: |-
set -euo pipefail
if npm view "@qwen-code/sdk@${RELEASE_VERSION}" version >/dev/null 2>&1; then
echo "already_published=true" >> "${GITHUB_OUTPUT}"
echo "@qwen-code/sdk@${RELEASE_VERSION} already exists on npm."
else
echo "already_published=false" >> "${GITHUB_OUTPUT}"
fi
- name: 'Publish @qwen-code/sdk'
working-directory: 'packages/sdk-typescript'
if: |-
${{ steps.vars.outputs.is_dry_run == 'true' || steps.npm_check.outputs.already_published != 'true' }}
run: |-
npm publish --access public --tag=${{ steps.version.outputs.NPM_TAG }} ${{ steps.vars.outputs.is_dry_run == 'true' && '--dry-run' || '' }}
env:
NODE_AUTH_TOKEN: '${{ secrets.NPM_TOKEN }}'
- name: 'Check if GitHub Release already exists (rerun safety)'
if: |-
${{ steps.vars.outputs.is_dry_run == 'false' }}
id: 'gh_release_check'
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }}'
run: |-
set -euo pipefail
if gh release view "sdk-typescript-${RELEASE_TAG}" >/dev/null 2>&1; then
echo "already_exists=true" >> "${GITHUB_OUTPUT}"
echo "GitHub Release sdk-typescript-${RELEASE_TAG} already exists."
else
echo "already_exists=false" >> "${GITHUB_OUTPUT}"
fi
- name: 'Create GitHub Release and Tag'
if: |-
${{ steps.vars.outputs.is_dry_run == 'false' && steps.gh_release_check.outputs.already_exists != 'true' }}
${{ steps.vars.outputs.is_dry_run == 'false' }}
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
RELEASE_BRANCH: '${{ steps.release_branch.outputs.BRANCH_NAME }}'
@@ -278,27 +236,48 @@ jobs:
--generate-notes \
${PRERELEASE_FLAG}
- name: 'Create release PR for SDK version bump'
- name: 'Create PR to merge release branch into main'
if: |-
${{ steps.vars.outputs.is_dry_run == 'false' && steps.vars.outputs.is_nightly == 'false' && steps.vars.outputs.is_preview == 'false' }}
id: 'pr'
env:
GH_TOKEN: '${{ secrets.CI_BOT_PAT }}'
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }}'
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
RELEASE_BRANCH: '${{ steps.release_branch.outputs.BRANCH_NAME }}'
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }}'
run: |-
set -euo pipefail
pr_exists=$(gh pr list --head "${RELEASE_BRANCH}" --state open --json number --jq 'length')
if [[ "${pr_exists}" != "0" ]]; then
echo "Open PR already exists for ${RELEASE_BRANCH}; skipping creation."
exit 0
pr_url="$(gh pr list --head "${RELEASE_BRANCH}" --base main --json url --jq '.[0].url')"
if [[ -z "${pr_url}" ]]; then
pr_url="$(gh pr create \
--base main \
--head "${RELEASE_BRANCH}" \
--title "chore(release): sdk-typescript ${RELEASE_TAG}" \
--body "Automated release PR for sdk-typescript ${RELEASE_TAG}.")"
fi
gh pr create \
--base main \
--head "${RELEASE_BRANCH}" \
--title "chore(release): sdk-typescript ${RELEASE_TAG}" \
--body "Automated SDK version bump for ${RELEASE_TAG}."
echo "PR_URL=${pr_url}" >> "${GITHUB_OUTPUT}"
- name: 'Wait for CI checks to complete'
if: |-
${{ steps.vars.outputs.is_dry_run == 'false' && steps.vars.outputs.is_nightly == 'false' && steps.vars.outputs.is_preview == 'false' }}
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
PR_URL: '${{ steps.pr.outputs.PR_URL }}'
run: |-
set -euo pipefail
echo "Waiting for CI checks to complete..."
gh pr checks "${PR_URL}" --watch --interval 30
- name: 'Enable auto-merge for release PR'
if: |-
${{ steps.vars.outputs.is_dry_run == 'false' && steps.vars.outputs.is_nightly == 'false' && steps.vars.outputs.is_preview == 'false' }}
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
PR_URL: '${{ steps.pr.outputs.PR_URL }}'
run: |-
set -euo pipefail
gh pr merge "${PR_URL}" --merge --auto
- name: 'Create Issue on Failure'
if: |-

View File

@@ -38,11 +38,6 @@ on:
type: 'boolean'
default: false
concurrency:
# Serialize all release workflows (CLI + SDK) to avoid racing on `main` pushes.
group: 'release-main'
cancel-in-progress: false
jobs:
release:
runs-on: 'ubuntu-latest'
@@ -155,19 +150,8 @@ jobs:
env:
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }}'
run: |-
set -euo pipefail
BRANCH_NAME="release/${RELEASE_TAG}"
# Make reruns idempotent: reuse an existing remote branch if it already exists.
if git show-ref --verify --quiet "refs/heads/${BRANCH_NAME}"; then
git switch "${BRANCH_NAME}"
elif git ls-remote --exit-code --heads origin "${BRANCH_NAME}" >/dev/null 2>&1; then
git fetch origin "${BRANCH_NAME}:${BRANCH_NAME}"
git switch "${BRANCH_NAME}"
else
git switch -c "${BRANCH_NAME}"
fi
git switch -c "${BRANCH_NAME}"
echo "BRANCH_NAME=${BRANCH_NAME}" >> "${GITHUB_OUTPUT}"
- name: 'Update package versions'
@@ -207,47 +191,16 @@ jobs:
registry-url: 'https://registry.npmjs.org'
scope: '@qwen-code'
- name: 'Check if @qwen-code/qwen-code version is already published (rerun safety)'
id: 'npm_check'
env:
RELEASE_VERSION: '${{ steps.version.outputs.RELEASE_VERSION }}'
run: |-
set -euo pipefail
if npm view "@qwen-code/qwen-code@${RELEASE_VERSION}" version >/dev/null 2>&1; then
echo "already_published=true" >> "${GITHUB_OUTPUT}"
echo "@qwen-code/qwen-code@${RELEASE_VERSION} already exists on npm."
else
echo "already_published=false" >> "${GITHUB_OUTPUT}"
fi
- name: 'Publish @qwen-code/qwen-code'
working-directory: 'dist'
if: |-
${{ steps.vars.outputs.is_dry_run == 'true' || steps.npm_check.outputs.already_published != 'true' }}
run: |-
npm publish --access public --tag=${{ steps.version.outputs.NPM_TAG }} ${{ steps.vars.outputs.is_dry_run == 'true' && '--dry-run' || '' }}
env:
NODE_AUTH_TOKEN: '${{ secrets.NPM_TOKEN }}'
- name: 'Check if GitHub Release already exists (rerun safety)'
if: |-
${{ steps.vars.outputs.is_dry_run == 'false' }}
id: 'gh_release_check'
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }}'
run: |-
set -euo pipefail
if gh release view "${RELEASE_TAG}" >/dev/null 2>&1; then
echo "already_exists=true" >> "${GITHUB_OUTPUT}"
echo "GitHub Release ${RELEASE_TAG} already exists."
else
echo "already_exists=false" >> "${GITHUB_OUTPUT}"
fi
- name: 'Create GitHub Release and Tag'
if: |-
${{ steps.vars.outputs.is_dry_run == 'false' && steps.gh_release_check.outputs.already_exists != 'true' }}
${{ steps.vars.outputs.is_dry_run == 'false' }}
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
RELEASE_BRANCH: '${{ steps.release_branch.outputs.BRANCH_NAME }}'
@@ -261,34 +214,12 @@ jobs:
--notes-start-tag "$PREVIOUS_RELEASE_TAG" \
--generate-notes
- name: 'Create release PR for version bump'
if: |-
${{ steps.vars.outputs.is_dry_run == 'false' && steps.vars.outputs.is_nightly == 'false' && steps.vars.outputs.is_preview == 'false' }}
env:
GH_TOKEN: '${{ secrets.CI_BOT_PAT }}'
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }}'
RELEASE_BRANCH: '${{ steps.release_branch.outputs.BRANCH_NAME }}'
run: |-
set -euo pipefail
pr_exists=$(gh pr list --head "${RELEASE_BRANCH}" --state open --json number --jq 'length')
if [[ "${pr_exists}" != "0" ]]; then
echo "Open PR already exists for ${RELEASE_BRANCH}; skipping creation."
exit 0
fi
gh pr create \
--base main \
--head "${RELEASE_BRANCH}" \
--title "chore(release): ${RELEASE_TAG}" \
--body "Automated version bump for ${RELEASE_TAG}."
- name: 'Create Issue on Failure'
if: |-
${{ failure() }}
env:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
RELEASE_TAG: "${{ steps.version.outputs.RELEASE_TAG || 'N/A' }}"
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }} || "N/A"'
DETAILS_URL: '${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'
run: |-
gh issue create \

1
.gitignore vendored
View File

@@ -23,6 +23,7 @@ package-lock.json
.idea
*.iml
.cursor
.qoder
# OS metadata
.DS_Store

41
package-lock.json generated
View File

@@ -3875,6 +3875,17 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/prompts": {
"version": "2.4.9",
"resolved": "https://registry.npmjs.org/@types/prompts/-/prompts-2.4.9.tgz",
"integrity": "sha512-qTxFi6Buiu8+50/+3DGIWLHM6QuWsEKugJnnP6iv2Mc4ncxE4A/OJkjuVOA+5X0X1S/nq5VJRa8Lu+nwcvbrKA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*",
"kleur": "^3.0.3"
}
},
"node_modules/@types/prop-types": {
"version": "15.7.15",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz",
@@ -10984,6 +10995,15 @@
"json-buffer": "3.0.1"
}
},
"node_modules/kleur": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz",
"integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==",
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/ky": {
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/ky/-/ky-1.8.1.tgz",
@@ -13393,6 +13413,19 @@
"dev": true,
"license": "MIT"
},
"node_modules/prompts": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz",
"integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==",
"license": "MIT",
"dependencies": {
"kleur": "^3.0.3",
"sisteransi": "^1.0.5"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/prop-types": {
"version": "15.8.1",
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz",
@@ -14753,6 +14786,12 @@
"url": "https://github.com/steveukx/git-js?sponsor=1"
}
},
"node_modules/sisteransi": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
"integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==",
"license": "MIT"
},
"node_modules/slash": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz",
@@ -17338,6 +17377,7 @@
"ink-spinner": "^5.0.0",
"lowlight": "^3.3.0",
"open": "^10.1.2",
"prompts": "^2.4.2",
"qrcode-terminal": "^0.12.0",
"react": "^19.1.0",
"read-package-up": "^11.0.0",
@@ -17366,6 +17406,7 @@
"@types/diff": "^7.0.2",
"@types/dotenv": "^6.1.1",
"@types/node": "^20.11.24",
"@types/prompts": "^2.4.9",
"@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6",
"@types/semver": "^7.7.0",

View File

@@ -46,6 +46,7 @@
"comment-json": "^4.2.5",
"diff": "^7.0.0",
"dotenv": "^17.1.0",
"prompts": "^2.4.2",
"fzf": "^0.5.2",
"glob": "^10.5.0",
"highlight.js": "^11.11.1",
@@ -79,6 +80,7 @@
"@types/command-exists": "^1.2.3",
"@types/diff": "^7.0.2",
"@types/dotenv": "^6.1.1",
"@types/prompts": "^2.4.9",
"@types/node": "^20.11.24",
"@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6",

View File

@@ -27,10 +27,8 @@ import { Readable, Writable } from 'node:stream';
import type { LoadedSettings } from '../config/settings.js';
import { SettingScope } from '../config/settings.js';
import { z } from 'zod';
import { ExtensionStorage, type Extension } from '../config/extension.js';
import type { CliArgs } from '../config/config.js';
import { loadCliConfig } from '../config/config.js';
import { ExtensionEnablementManager } from '../config/extensions/extensionEnablement.js';
// Import the modular Session class
import { Session } from './session/Session.js';
@@ -38,7 +36,6 @@ import { Session } from './session/Session.js';
export async function runAcpAgent(
config: Config,
settings: LoadedSettings,
extensions: Extension[],
argv: CliArgs,
) {
const stdout = Writable.toWeb(process.stdout) as WritableStream;
@@ -51,8 +48,7 @@ export async function runAcpAgent(
console.debug = console.error;
new acp.AgentSideConnection(
(client: acp.Client) =>
new GeminiAgent(config, settings, extensions, argv, client),
(client: acp.Client) => new GeminiAgent(config, settings, argv, client),
stdout,
stdin,
);
@@ -65,7 +61,6 @@ class GeminiAgent {
constructor(
private config: Config,
private settings: LoadedSettings,
private extensions: Extension[],
private argv: CliArgs,
private client: acp.Client,
) {}
@@ -215,16 +210,7 @@ class GeminiAgent {
continue: false,
};
const config = await loadCliConfig(
settings,
this.extensions,
new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
this.argv.extensions,
),
argvForSession,
cwd,
);
const config = await loadCliConfig(settings, argvForSession, cwd);
await config.initialize();
return config;

View File

@@ -0,0 +1,87 @@
import type { ConfirmationRequest } from '../../ui/types.js';
/**
* Requests consent from the user to perform an action, by reading a Y/n
* character from stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param consentDescription The description of the thing they will be consenting to.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentNonInteractive(
consentDescription: string,
): Promise<boolean> {
console.info(consentDescription);
const result = await promptForConsentNonInteractive(
'Do you want to continue? [Y/n]: ',
);
return result;
}
/**
* Requests consent from the user to perform an action, in interactive mode.
*
* This should not be called from non-interactive mode as it will not work.
*
* @param consentDescription The description of the thing they will be consenting to.
* @param addExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentInteractive(
consentDescription: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return promptForConsentInteractive(
consentDescription + '\n\nDo you want to continue?',
addExtensionUpdateConfirmationRequest,
);
}
/**
* Asks users a prompt and awaits for a y/n response on stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param prompt A yes/no prompt to ask the user
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
*/
async function promptForConsentNonInteractive(
prompt: string,
): Promise<boolean> {
const readline = await import('node:readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise((resolve) => {
rl.question(prompt, (answer) => {
rl.close();
resolve(['y', ''].includes(answer.trim().toLowerCase()));
});
});
}
/**
* Asks users an interactive yes/no prompt.
*
* This should not be called from non-interactive mode as it will break the CLI.
*
* @param prompt A markdown prompt to ask the user
* @param addExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
* @returns Whether or not the user answers yes.
*/
async function promptForConsentInteractive(
prompt: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return new Promise<boolean>((resolve) => {
addExtensionUpdateConfirmationRequest({
prompt,
onConfirm: (resolvedConfirmed) => {
resolve(resolvedConfirmed);
},
});
});
}

View File

@@ -5,21 +5,22 @@
*/
import { type CommandModule } from 'yargs';
import { disableExtension } from '../../config/extension.js';
import { SettingScope } from '../../config/settings.js';
import { getErrorMessage } from '../../utils/errors.js';
import { getExtensionManager } from './utils.js';
interface DisableArgs {
name: string;
scope?: string;
}
export function handleDisable(args: DisableArgs) {
export async function handleDisable(args: DisableArgs) {
const extensionManager = await getExtensionManager();
try {
if (args.scope?.toLowerCase() === 'workspace') {
disableExtension(args.name, SettingScope.Workspace);
extensionManager.disableExtension(args.name, SettingScope.Workspace);
} else {
disableExtension(args.name, SettingScope.User);
extensionManager.disableExtension(args.name, SettingScope.User);
}
console.log(
`Extension "${args.name}" successfully disabled for scope "${args.scope}".`,
@@ -61,8 +62,8 @@ export const disableCommand: CommandModule = {
}
return true;
}),
handler: (argv) => {
handleDisable({
handler: async (argv) => {
await handleDisable({
name: argv['name'] as string,
scope: argv['scope'] as string,
});

View File

@@ -6,20 +6,22 @@
import { type CommandModule } from 'yargs';
import { FatalConfigError, getErrorMessage } from '@qwen-code/qwen-code-core';
import { enableExtension } from '../../config/extension.js';
import { SettingScope } from '../../config/settings.js';
import { getExtensionManager } from './utils.js';
interface EnableArgs {
name: string;
scope?: string;
}
export function handleEnable(args: EnableArgs) {
export async function handleEnable(args: EnableArgs) {
const extensionManager = await getExtensionManager();
try {
if (args.scope?.toLowerCase() === 'workspace') {
enableExtension(args.name, SettingScope.Workspace);
extensionManager.enableExtension(args.name, SettingScope.Workspace);
} else {
enableExtension(args.name, SettingScope.User);
extensionManager.enableExtension(args.name, SettingScope.User);
}
if (args.scope) {
console.log(
@@ -66,8 +68,8 @@ export const enableCommand: CommandModule = {
}
return true;
}),
handler: (argv) => {
handleEnable({
handler: async (argv) => {
await handleEnable({
name: argv['name'] as string,
scope: argv['scope'] as string,
});

View File

@@ -5,58 +5,64 @@
*/
import type { CommandModule } from 'yargs';
import {
installExtension,
requestConsentNonInteractive,
} from '../../config/extension.js';
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
ExtensionManager,
parseInstallSource,
} from '@qwen-code/qwen-code-core';
import { getErrorMessage } from '../../utils/errors.js';
import { stat } from 'node:fs/promises';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
import { loadSettings } from '../../config/settings.js';
import { requestConsentNonInteractive } from './consent.js';
interface InstallArgs {
source: string;
ref?: string;
autoUpdate?: boolean;
allowPreRelease?: boolean;
consent?: boolean;
}
export async function handleInstall(args: InstallArgs) {
try {
let installMetadata: ExtensionInstallMetadata;
const { source } = args;
const installMetadata = await parseInstallSource(args.source);
if (
source.startsWith('http://') ||
source.startsWith('https://') ||
source.startsWith('git@') ||
source.startsWith('sso://')
installMetadata.type !== 'git' &&
installMetadata.type !== 'github-release'
) {
installMetadata = {
source,
type: 'git',
ref: args.ref,
autoUpdate: args.autoUpdate,
};
} else {
if (args.ref || args.autoUpdate) {
throw new Error(
'--ref and --auto-update are not applicable for local extensions.',
'--ref and --auto-update are not applicable for marketplace extensions.',
);
}
try {
await stat(source);
installMetadata = {
source,
type: 'local',
};
} catch {
throw new Error('Install source not found.');
}
}
const name = await installExtension(
installMetadata,
requestConsentNonInteractive,
const requestConsent = args.consent
? () => Promise.resolve(true)
: requestConsentNonInteractive;
const workspaceDir = process.cwd();
const extensionManager = new ExtensionManager({
workspaceDir,
isWorkspaceTrusted: !!isWorkspaceTrusted(
loadSettings(workspaceDir).merged,
),
requestConsent,
});
await extensionManager.refreshCache();
const extension = await extensionManager.installExtension(
{
...installMetadata,
ref: args.ref,
autoUpdate: args.autoUpdate,
allowPreRelease: args.allowPreRelease,
},
requestConsent,
);
console.log(
`Extension "${extension.name}" installed successfully and enabled.`,
);
console.log(`Extension "${name}" installed successfully and enabled.`);
} catch (error) {
console.error(getErrorMessage(error));
process.exit(1);
@@ -65,11 +71,13 @@ export async function handleInstall(args: InstallArgs) {
export const installCommand: CommandModule = {
command: 'install <source>',
describe: 'Installs an extension from a git repository URL or a local path.',
describe:
'Installs an extension from a git repository URL, local path, or claude marketplace (marketplace-url:plugin-name).',
builder: (yargs) =>
yargs
.positional('source', {
describe: 'The github URL or local path of the extension to install.',
describe:
'The github URL, local path, or marketplace source (marketplace-url:plugin-name) of the extension to install.',
type: 'string',
demandOption: true,
})
@@ -81,6 +89,16 @@ export const installCommand: CommandModule = {
describe: 'Enable auto-update for this extension.',
type: 'boolean',
})
.option('pre-release', {
describe: 'Enable pre-release versions for this extension.',
type: 'boolean',
})
.option('consent', {
describe:
'Acknowledge the security risks of installing an extension and skip the confirmation prompt.',
type: 'boolean',
default: false,
})
.check((argv) => {
if (!argv.source) {
throw new Error('The source argument must be provided.');
@@ -92,6 +110,8 @@ export const installCommand: CommandModule = {
source: argv['source'] as string,
ref: argv['ref'] as string | undefined,
autoUpdate: argv['auto-update'] as boolean | undefined,
allowPreRelease: argv['pre-release'] as boolean | undefined,
consent: argv['consent'] as boolean | undefined,
});
},
};

View File

@@ -5,13 +5,10 @@
*/
import type { CommandModule } from 'yargs';
import {
installExtension,
requestConsentNonInteractive,
} from '../../config/extension.js';
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
import { type ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
import { getErrorMessage } from '../../utils/errors.js';
import { requestConsentNonInteractive } from './consent.js';
import { getExtensionManager } from './utils.js';
interface InstallArgs {
path: string;
@@ -23,12 +20,14 @@ export async function handleLink(args: InstallArgs) {
source: args.path,
type: 'link',
};
const extensionName = await installExtension(
const extensionManager = await getExtensionManager();
const extension = await extensionManager.installExtension(
installMetadata,
requestConsentNonInteractive,
);
console.log(
`Extension "${extensionName}" linked successfully and enabled.`,
`Extension "${extension.name}" linked successfully and enabled.`,
);
} catch (error) {
console.error(getErrorMessage(error));

View File

@@ -5,19 +5,23 @@
*/
import type { CommandModule } from 'yargs';
import { loadUserExtensions, toOutputString } from '../../config/extension.js';
import { getErrorMessage } from '../../utils/errors.js';
import { getExtensionManager } from './utils.js';
export async function handleList() {
try {
const extensions = loadUserExtensions();
const extensionManager = await getExtensionManager();
const extensions = extensionManager.getLoadedExtensions();
if (extensions.length === 0) {
console.log('No extensions installed.');
return;
}
console.log(
extensions
.map((extension, _): string => toOutputString(extension, process.cwd()))
.map((extension, _): string =>
extensionManager.toOutputString(extension, process.cwd()),
)
.join('\n\n'),
);
} catch (error) {

View File

@@ -5,8 +5,11 @@
*/
import type { CommandModule } from 'yargs';
import { uninstallExtension } from '../../config/extension.js';
import { getErrorMessage } from '../../utils/errors.js';
import { ExtensionManager } from '@qwen-code/qwen-code-core';
import { requestConsentNonInteractive } from './consent.js';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
import { loadSettings } from '../../config/settings.js';
interface UninstallArgs {
name: string; // can be extension name or source URL.
@@ -14,7 +17,16 @@ interface UninstallArgs {
export async function handleUninstall(args: UninstallArgs) {
try {
await uninstallExtension(args.name);
const workspaceDir = process.cwd();
const extensionManager = new ExtensionManager({
workspaceDir,
requestConsent: requestConsentNonInteractive,
isWorkspaceTrusted: !!isWorkspaceTrusted(
loadSettings(workspaceDir).merged,
),
});
await extensionManager.refreshCache();
await extensionManager.uninstallExtension(args.name, false);
console.log(`Extension "${args.name}" successfully uninstalled.`);
} catch (error) {
console.error(getErrorMessage(error));

View File

@@ -5,22 +5,13 @@
*/
import type { CommandModule } from 'yargs';
import {
loadExtensions,
annotateActiveExtensions,
ExtensionStorage,
requestConsentNonInteractive,
} from '../../config/extension.js';
import {
updateAllUpdatableExtensions,
type ExtensionUpdateInfo,
checkForAllExtensionUpdates,
updateExtension,
} from '../../config/extensions/update.js';
import { checkForExtensionUpdate } from '../../config/extensions/github.js';
import { getErrorMessage } from '../../utils/errors.js';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
import {
checkForExtensionUpdate,
type ExtensionUpdateInfo,
} from '@qwen-code/qwen-code-core';
import { getExtensionManager } from './utils.js';
interface UpdateArgs {
name?: string;
@@ -31,19 +22,9 @@ const updateOutput = (info: ExtensionUpdateInfo) =>
`Extension "${info.name}" successfully updated: ${info.originalVersion}${info.updatedVersion}.`;
export async function handleUpdate(args: UpdateArgs) {
const workingDir = process.cwd();
const extensionEnablementManager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
// Force enable named extensions, otherwise we will only update the enabled
// ones.
args.name ? [args.name] : [],
);
const allExtensions = loadExtensions(extensionEnablementManager);
const extensions = annotateActiveExtensions(
allExtensions,
workingDir,
extensionEnablementManager,
);
const extensionManager = await getExtensionManager();
const extensions = extensionManager.getLoadedExtensions();
if (args.name) {
try {
const extension = extensions.find(
@@ -53,25 +34,23 @@ export async function handleUpdate(args: UpdateArgs) {
console.log(`Extension "${args.name}" not found.`);
return;
}
let updateState: ExtensionUpdateState | undefined;
if (!extension.installMetadata) {
console.log(
`Unable to install extension "${args.name}" due to missing install metadata`,
);
return;
}
await checkForExtensionUpdate(extension, (newState) => {
updateState = newState;
});
const updateState = await checkForExtensionUpdate(
extension,
extensionManager,
);
if (updateState !== ExtensionUpdateState.UPDATE_AVAILABLE) {
console.log(`Extension "${args.name}" is already up to date.`);
return;
}
// TODO(chrstnb): we should list extensions if the requested extension is not installed.
const updatedExtensionInfo = (await updateExtension(
const updatedExtensionInfo = (await extensionManager.updateExtension(
extension,
workingDir,
requestConsentNonInteractive,
updateState,
() => {},
))!;
@@ -92,18 +71,15 @@ export async function handleUpdate(args: UpdateArgs) {
if (args.all) {
try {
const extensionState = new Map();
await checkForAllExtensionUpdates(extensions, (action) => {
if (action.type === 'SET_STATE') {
extensionState.set(action.payload.name, {
status: action.payload.state,
await extensionManager.checkForAllExtensionUpdates(
(extensionName, state) => {
extensionState.set(extensionName, {
status: state,
processed: true, // No need to process as we will force the update.
});
}
});
let updateInfos = await updateAllUpdatableExtensions(
workingDir,
requestConsentNonInteractive,
extensions,
},
);
let updateInfos = await extensionManager.updateAllUpdatableExtensions(
extensionState,
() => {},
);

View File

@@ -0,0 +1,21 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { ExtensionManager } from '@qwen-code/qwen-code-core';
import { loadSettings } from '../../config/settings.js';
import { requestConsentNonInteractive } from './consent.js';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
export async function getExtensionManager(): Promise<ExtensionManager> {
const workspaceDir = process.cwd();
const extensionManager = new ExtensionManager({
workspaceDir,
requestConsent: requestConsentNonInteractive,
isWorkspaceTrusted: !!isWorkspaceTrusted(loadSettings(workspaceDir).merged),
});
await extensionManager.refreshCache();
return extensionManager;
}

View File

@@ -7,7 +7,8 @@
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
import { listMcpServers } from './list.js';
import { loadSettings } from '../../config/settings.js';
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
import { loadExtensions } from '../../config/extension.js';
import { ExtensionStorage } from '../../config/extensions/storage.js';
import { createTransport } from '@qwen-code/qwen-code-core';
import { Client } from '@modelcontextprotocol/sdk/client/index.js';

View File

@@ -8,10 +8,13 @@
import type { CommandModule } from 'yargs';
import { loadSettings } from '../../config/settings.js';
import type { MCPServerConfig } from '@qwen-code/qwen-code-core';
import { MCPServerStatus, createTransport } from '@qwen-code/qwen-code-core';
import {
MCPServerStatus,
createTransport,
ExtensionManager,
} from '@qwen-code/qwen-code-core';
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
const COLOR_GREEN = '\u001b[32m';
const COLOR_YELLOW = '\u001b[33m';
@@ -22,22 +25,27 @@ async function getMcpServersFromConfig(): Promise<
Record<string, MCPServerConfig>
> {
const settings = loadSettings();
const extensions = loadExtensions(
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
);
const extensionManager = new ExtensionManager({
isWorkspaceTrusted: !!isWorkspaceTrusted(settings.merged),
telemetrySettings: settings.merged.telemetry,
});
await extensionManager.refreshCache();
const extensions = extensionManager.getLoadedExtensions();
const mcpServers = { ...(settings.merged.mcpServers || {}) };
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) {
return;
}
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
if (extension.isActive) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) {
return;
}
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
}
return mcpServers;
}

View File

@@ -16,7 +16,8 @@ import {
} from '@qwen-code/qwen-code-core';
import { loadCliConfig, parseArguments, type CliArgs } from './config.js';
import type { Settings } from './settings.js';
import { ExtensionStorage, type Extension } from './extension.js';
import type { Extension } from './extension.js';
import { ExtensionStorage } from './extensions/storage.js';
import * as ServerConfig from '@qwen-code/qwen-code-core';
import { isWorkspaceTrusted } from './trustedFolders.js';
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';

View File

@@ -9,7 +9,6 @@ import {
AuthType,
Config,
DEFAULT_QWEN_EMBEDDING_MODEL,
DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
EditTool,
FileDiscoveryService,
getCurrentGeminiMdFilename,
@@ -25,7 +24,6 @@ import {
SessionService,
type ResumedSessionData,
type FileFilteringOptions,
type MCPServerConfig,
} from '@qwen-code/qwen-code-core';
import { extensionsCommand } from '../commands/extensions.js';
import type { Settings } from './settings.js';
@@ -37,14 +35,11 @@ import { homedir } from 'node:os';
import { resolvePath } from '../utils/resolvePath.js';
import { getCliVersion } from '../utils/version.js';
import type { Extension } from './extension.js';
import { annotateActiveExtensions } from './extension.js';
import { loadSandboxConfig } from './sandboxConfig.js';
import { appEvents } from '../utils/events.js';
import { mcpCommand } from '../commands/mcp.js';
import { isWorkspaceTrusted } from './trustedFolders.js';
import type { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
import { buildWebSearchConfig } from './webSearch.js';
// Simple console logger for now - replace with actual logger if available
@@ -162,7 +157,7 @@ function normalizeOutputFormat(
return OutputFormat.TEXT;
}
export async function parseArguments(settings: Settings): Promise<CliArgs> {
export async function parseArguments(): Promise<CliArgs> {
const rawArgv = hideBin(process.argv);
const yargsInstance = yargs(rawArgv)
.locale('en')
@@ -537,11 +532,9 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
}),
)
// Register MCP subcommands
.command(mcpCommand);
if (settings?.experimental?.extensionManagement ?? true) {
yargsInstance.command(extensionsCommand);
}
.command(mcpCommand)
// Register Extension subcommands
.command(extensionsCommand);
yargsInstance
.version(await getCliVersion()) // This will enable the --version flag based on package.json
@@ -605,11 +598,11 @@ export async function loadHierarchicalGeminiMemory(
includeDirectoriesToReadGemini: readonly string[] = [],
debugMode: boolean,
fileService: FileDiscoveryService,
settings: Settings,
extensionContextFilePaths: string[] = [],
folderTrust: boolean,
memoryImportFormat: 'flat' | 'tree' = 'tree',
fileFilteringOptions?: FileFilteringOptions,
maxDirs: number = 200,
): Promise<{ memoryContent: string; fileCount: number }> {
// FIX: Use real, canonical paths for a reliable comparison to handle symlinks.
const realCwd = fs.realpathSync(path.resolve(currentWorkingDirectory));
@@ -636,7 +629,7 @@ export async function loadHierarchicalGeminiMemory(
folderTrust,
memoryImportFormat,
fileFilteringOptions,
settings.context?.discoveryMaxDirs,
maxDirs,
);
}
@@ -651,30 +644,17 @@ export function isDebugMode(argv: CliArgs): boolean {
export async function loadCliConfig(
settings: Settings,
extensions: Extension[],
extensionEnablementManager: ExtensionEnablementManager,
argv: CliArgs,
cwd: string = process.cwd(),
overrideExtensions?: string[],
): Promise<Config> {
const debugMode = isDebugMode(argv);
const memoryImportFormat = settings.context?.importFormat || 'tree';
const ideMode = settings.ide?.enabled ?? false;
const folderTrust = settings.security?.folderTrust?.enabled ?? false;
const trustedFolder = isWorkspaceTrusted(settings)?.isTrusted ?? true;
const allExtensions = annotateActiveExtensions(
extensions,
cwd,
extensionEnablementManager,
);
const activeExtensions = extensions.filter(
(_, i) => allExtensions[i].isActive,
);
// Set the context filename in the server's memoryTool module BEFORE loading memory
// TODO(b/343434939): This is a bit of a hack. The contextFileName should ideally be passed
// directly to the Config constructor in core, and have core handle setGeminiMdFilename.
@@ -686,51 +666,27 @@ export async function loadCliConfig(
setServerGeminiMdFilename(getCurrentGeminiMdFilename());
}
const extensionContextFilePaths = activeExtensions.flatMap(
(e) => e.contextFiles,
);
// Automatically load output-language.md if it exists
const outputLanguageFilePath = path.join(
let outputLanguageFilePath: string | undefined = path.join(
Storage.getGlobalQwenDir(),
'output-language.md',
);
if (fs.existsSync(outputLanguageFilePath)) {
extensionContextFilePaths.push(outputLanguageFilePath);
if (debugMode) {
logger.debug(
`Found output-language.md, adding to context files: ${outputLanguageFilePath}`,
);
}
} else {
outputLanguageFilePath = undefined;
}
const fileService = new FileDiscoveryService(cwd);
const fileFiltering = {
...DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
...settings.context?.fileFiltering,
};
const includeDirectories = (settings.context?.includeDirectories || [])
.map(resolvePath)
.concat((argv.includeDirectories || []).map(resolvePath));
// Call the (now wrapper) loadHierarchicalGeminiMemory which calls the server's version
const { memoryContent, fileCount } = await loadHierarchicalGeminiMemory(
cwd,
settings.context?.loadMemoryFromIncludeDirectories
? includeDirectories
: [],
debugMode,
fileService,
settings,
extensionContextFilePaths,
trustedFolder,
memoryImportFormat,
fileFiltering,
);
let mcpServers = mergeMcpServers(settings, activeExtensions);
const question = argv.promptInteractive || argv.prompt || '';
const inputFormat: InputFormat =
(argv.inputFormat as InputFormat | undefined) ?? InputFormat.TEXT;
@@ -844,38 +800,18 @@ export async function loadCliConfig(
const excludeTools = mergeExcludeTools(
settings,
activeExtensions,
extraExcludes.length > 0 ? extraExcludes : undefined,
argv.excludeTools,
);
const blockedMcpServers: Array<{ name: string; extensionName: string }> = [];
if (!argv.allowedMcpServerNames) {
if (settings.mcp?.allowed) {
mcpServers = allowedMcpServers(
mcpServers,
settings.mcp.allowed,
blockedMcpServers,
);
}
if (settings.mcp?.excluded) {
const excludedNames = new Set(settings.mcp.excluded.filter(Boolean));
if (excludedNames.size > 0) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key]) => !excludedNames.has(key)),
);
}
}
}
if (argv.allowedMcpServerNames) {
mcpServers = allowedMcpServers(
mcpServers,
argv.allowedMcpServerNames,
blockedMcpServers,
);
}
const allowedMcpServers = argv.allowedMcpServerNames
? new Set(argv.allowedMcpServerNames.filter(Boolean))
: settings.mcp?.allowed
? new Set(settings.mcp.allowed.filter(Boolean))
: undefined;
const excludedMcpServers = settings.mcp?.excluded
? new Set(settings.mcp.excluded.filter(Boolean))
: undefined;
const selectedAuthType =
(argv.authType as AuthType | undefined) ||
@@ -943,6 +879,8 @@ export async function loadCliConfig(
includeDirectories,
loadMemoryFromIncludeDirectories:
settings.context?.loadMemoryFromIncludeDirectories || false,
importFormat: settings.context?.importFormat || 'tree',
discoveryMaxDirs: settings.context?.discoveryMaxDirs || 200,
debugMode,
question,
fullContext: argv.allFiles || false,
@@ -952,9 +890,13 @@ export async function loadCliConfig(
toolDiscoveryCommand: settings.tools?.discoveryCommand,
toolCallCommand: settings.tools?.callCommand,
mcpServerCommand: settings.mcp?.serverCommand,
mcpServers,
userMemory: memoryContent,
geminiMdFileCount: fileCount,
mcpServers: settings.mcpServers || {},
allowedMcpServers: allowedMcpServers
? Array.from(allowedMcpServers)
: undefined,
excludedMcpServers: excludedMcpServers
? Array.from(excludedMcpServers)
: undefined,
approvalMode,
showMemoryUsage:
argv.showMemoryUsage || settings.ui?.showMemoryUsage || false,
@@ -977,15 +919,14 @@ export async function loadCliConfig(
fileDiscoveryService: fileService,
bugCommand: settings.advanced?.bugCommand,
model: resolvedModel,
extensionContextFilePaths,
outputLanguageFilePath,
sessionTokenLimit: settings.model?.sessionTokenLimit ?? -1,
maxSessionTurns:
argv.maxSessionTurns ?? settings.model?.maxSessionTurns ?? -1,
experimentalZedIntegration: argv.experimentalAcp || false,
experimentalSkills: argv.experimentalSkills || false,
listExtensions: argv.listExtensions || false,
extensions: allExtensions,
blockedMcpServers,
overrideExtensions: overrideExtensions || argv.extensions,
noBrowser: !!process.env['NO_BROWSER'],
authType: selectedAuthType,
inputFormat,
@@ -1040,61 +981,8 @@ export async function loadCliConfig(
});
}
function allowedMcpServers(
mcpServers: { [x: string]: MCPServerConfig },
allowMCPServers: string[],
blockedMcpServers: Array<{ name: string; extensionName: string }>,
) {
const allowedNames = new Set(allowMCPServers.filter(Boolean));
if (allowedNames.size > 0) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key, server]) => {
const isAllowed = allowedNames.has(key);
if (!isAllowed) {
blockedMcpServers.push({
name: key,
extensionName: server.extensionName || '',
});
}
return isAllowed;
}),
);
} else {
blockedMcpServers.push(
...Object.entries(mcpServers).map(([key, server]) => ({
name: key,
extensionName: server.extensionName || '',
})),
);
mcpServers = {};
}
return mcpServers;
}
function mergeMcpServers(settings: Settings, extensions: Extension[]) {
const mcpServers = { ...(settings.mcpServers || {}) };
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) {
logger.warn(
`Skipping extension MCP config for server with key "${key}" as it already exists.`,
);
return;
}
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
return mcpServers;
}
function mergeExcludeTools(
settings: Settings,
extensions: Extension[],
extraExcludes?: string[] | undefined,
cliExcludeTools?: string[] | undefined,
): string[] {
@@ -1103,10 +991,5 @@ function mergeExcludeTools(
...(settings.tools?.exclude || []),
...(extraExcludes || []),
]);
for (const extension of extensions) {
for (const tool of extension.config.excludeTools || []) {
allExcludeTools.add(tool);
}
}
return [...allExcludeTools];
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,786 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import type {
MCPServerConfig,
GeminiCLIExtension,
ExtensionInstallMetadata,
} from '@qwen-code/qwen-code-core';
import {
QWEN_DIR,
Storage,
Config,
ExtensionInstallEvent,
ExtensionUninstallEvent,
ExtensionDisableEvent,
ExtensionEnableEvent,
logExtensionEnable,
logExtensionInstallEvent,
logExtensionUninstall,
logExtensionDisable,
} from '@qwen-code/qwen-code-core';
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { SettingScope, loadSettings } from '../config/settings.js';
import { getErrorMessage } from '../utils/errors.js';
import { recursivelyHydrateStrings } from './extensions/variables.js';
import { isWorkspaceTrusted } from './trustedFolders.js';
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
import {
cloneFromGit,
downloadFromGitHubRelease,
} from './extensions/github.js';
import type { LoadExtensionContext } from './extensions/variableSchema.js';
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
import chalk from 'chalk';
import type { ConfirmationRequest } from '../ui/types.js';
export const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
export const EXTENSIONS_CONFIG_FILENAME = 'qwen-extension.json';
export const INSTALL_METADATA_FILENAME = '.qwen-extension-install.json';
export interface Extension {
path: string;
config: ExtensionConfig;
contextFiles: string[];
installMetadata?: ExtensionInstallMetadata | undefined;
}
export interface ExtensionConfig {
name: string;
version: string;
mcpServers?: Record<string, MCPServerConfig>;
contextFileName?: string | string[];
excludeTools?: string[];
}
export interface ExtensionUpdateInfo {
name: string;
originalVersion: string;
updatedVersion: string;
}
export class ExtensionStorage {
private readonly extensionName: string;
constructor(extensionName: string) {
this.extensionName = extensionName;
}
getExtensionDir(): string {
return path.join(
ExtensionStorage.getUserExtensionsDir(),
this.extensionName,
);
}
getConfigPath(): string {
return path.join(this.getExtensionDir(), EXTENSIONS_CONFIG_FILENAME);
}
static getUserExtensionsDir(): string {
const storage = new Storage(os.homedir());
return storage.getExtensionsDir();
}
static async createTmpDir(): Promise<string> {
return await fs.promises.mkdtemp(path.join(os.tmpdir(), 'qwen-extension'));
}
}
export function getWorkspaceExtensions(workspaceDir: string): Extension[] {
// If the workspace dir is the user extensions dir, there are no workspace extensions.
if (path.resolve(workspaceDir) === path.resolve(os.homedir())) {
return [];
}
return loadExtensionsFromDir(workspaceDir);
}
export async function copyExtension(
source: string,
destination: string,
): Promise<void> {
await fs.promises.cp(source, destination, { recursive: true });
}
export async function performWorkspaceExtensionMigration(
extensions: Extension[],
requestConsent: (consent: string) => Promise<boolean>,
): Promise<string[]> {
const failedInstallNames: string[] = [];
for (const extension of extensions) {
try {
const installMetadata: ExtensionInstallMetadata = {
source: extension.path,
type: 'local',
};
await installExtension(installMetadata, requestConsent);
} catch (_) {
failedInstallNames.push(extension.config.name);
}
}
return failedInstallNames;
}
function getTelemetryConfig(cwd: string) {
const settings = loadSettings(cwd);
const config = new Config({
telemetry: settings.merged.telemetry,
interactive: false,
targetDir: cwd,
cwd,
model: '',
debugMode: false,
});
return config;
}
export function loadExtensions(
extensionEnablementManager: ExtensionEnablementManager,
workspaceDir: string = process.cwd(),
): Extension[] {
const settings = loadSettings(workspaceDir).merged;
const allExtensions = [...loadUserExtensions()];
if (
(isWorkspaceTrusted(settings) ?? true) &&
// Default management setting to true
!(settings.experimental?.extensionManagement ?? true)
) {
allExtensions.push(...getWorkspaceExtensions(workspaceDir));
}
const uniqueExtensions = new Map<string, Extension>();
for (const extension of allExtensions) {
if (
!uniqueExtensions.has(extension.config.name) &&
extensionEnablementManager.isEnabled(extension.config.name, workspaceDir)
) {
uniqueExtensions.set(extension.config.name, extension);
}
}
return Array.from(uniqueExtensions.values());
}
export function loadUserExtensions(): Extension[] {
const userExtensions = loadExtensionsFromDir(os.homedir());
const uniqueExtensions = new Map<string, Extension>();
for (const extension of userExtensions) {
if (!uniqueExtensions.has(extension.config.name)) {
uniqueExtensions.set(extension.config.name, extension);
}
}
return Array.from(uniqueExtensions.values());
}
export function loadExtensionsFromDir(dir: string): Extension[] {
const storage = new Storage(dir);
const extensionsDir = storage.getExtensionsDir();
if (!fs.existsSync(extensionsDir)) {
return [];
}
const extensions: Extension[] = [];
for (const subdir of fs.readdirSync(extensionsDir)) {
const extensionDir = path.join(extensionsDir, subdir);
const extension = loadExtension({ extensionDir, workspaceDir: dir });
if (extension != null) {
extensions.push(extension);
}
}
return extensions;
}
export function loadExtension(context: LoadExtensionContext): Extension | null {
const { extensionDir, workspaceDir } = context;
if (!fs.statSync(extensionDir).isDirectory()) {
return null;
}
const installMetadata = loadInstallMetadata(extensionDir);
let effectiveExtensionPath = extensionDir;
if (installMetadata?.type === 'link') {
effectiveExtensionPath = installMetadata.source;
}
try {
let config = loadExtensionConfig({
extensionDir: effectiveExtensionPath,
workspaceDir,
});
config = resolveEnvVarsInObject(config);
if (config.mcpServers) {
config.mcpServers = Object.fromEntries(
Object.entries(config.mcpServers).map(([key, value]) => [
key,
filterMcpConfig(value),
]),
);
}
const contextFiles = getContextFileNames(config)
.map((contextFileName) =>
path.join(effectiveExtensionPath, contextFileName),
)
.filter((contextFilePath) => fs.existsSync(contextFilePath));
return {
path: effectiveExtensionPath,
config,
contextFiles,
installMetadata,
};
} catch (e) {
console.error(
`Warning: Skipping extension in ${effectiveExtensionPath}: ${getErrorMessage(
e,
)}`,
);
return null;
}
}
export function loadExtensionByName(
name: string,
workspaceDir: string = process.cwd(),
): Extension | null {
const userExtensionsDir = ExtensionStorage.getUserExtensionsDir();
if (!fs.existsSync(userExtensionsDir)) {
return null;
}
for (const subdir of fs.readdirSync(userExtensionsDir)) {
const extensionDir = path.join(userExtensionsDir, subdir);
if (!fs.statSync(extensionDir).isDirectory()) {
continue;
}
const extension = loadExtension({ extensionDir, workspaceDir });
if (
extension &&
extension.config.name.toLowerCase() === name.toLowerCase()
) {
return extension;
}
}
return null;
}
function filterMcpConfig(original: MCPServerConfig): MCPServerConfig {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { trust, ...rest } = original;
return Object.freeze(rest);
}
export function loadInstallMetadata(
extensionDir: string,
): ExtensionInstallMetadata | undefined {
const metadataFilePath = path.join(extensionDir, INSTALL_METADATA_FILENAME);
try {
const configContent = fs.readFileSync(metadataFilePath, 'utf-8');
const metadata = JSON.parse(configContent) as ExtensionInstallMetadata;
return metadata;
} catch (_e) {
return undefined;
}
}
function getContextFileNames(config: ExtensionConfig): string[] {
if (!config.contextFileName) {
return ['QWEN.md'];
} else if (!Array.isArray(config.contextFileName)) {
return [config.contextFileName];
}
return config.contextFileName;
}
/**
* Returns an annotated list of extensions. If an extension is listed in enabledExtensionNames, it will be active.
* If enabledExtensionNames is empty, an extension is active unless it is disabled.
* @param extensions The base list of extensions.
* @param enabledExtensionNames The names of explicitly enabled extensions.
* @param workspaceDir The current workspace directory.
*/
export function annotateActiveExtensions(
extensions: Extension[],
workspaceDir: string,
manager: ExtensionEnablementManager,
): GeminiCLIExtension[] {
manager.validateExtensionOverrides(extensions);
return extensions.map((extension) => ({
name: extension.config.name,
version: extension.config.version,
isActive: manager.isEnabled(extension.config.name, workspaceDir),
path: extension.path,
installMetadata: extension.installMetadata,
}));
}
/**
* Requests consent from the user to perform an action, by reading a Y/n
* character from stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param consentDescription The description of the thing they will be consenting to.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentNonInteractive(
consentDescription: string,
): Promise<boolean> {
console.info(consentDescription);
const result = await promptForConsentNonInteractive(
'Do you want to continue? [Y/n]: ',
);
return result;
}
/**
* Requests consent from the user to perform an action, in interactive mode.
*
* This should not be called from non-interactive mode as it will not work.
*
* @param consentDescription The description of the thing they will be consenting to.
* @param setExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentInteractive(
consentDescription: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return await promptForConsentInteractive(
consentDescription + '\n\nDo you want to continue?',
addExtensionUpdateConfirmationRequest,
);
}
/**
* Asks users a prompt and awaits for a y/n response on stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param prompt A yes/no prompt to ask the user
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
*/
async function promptForConsentNonInteractive(
prompt: string,
): Promise<boolean> {
const readline = await import('node:readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise((resolve) => {
rl.question(prompt, (answer) => {
rl.close();
resolve(['y', ''].includes(answer.trim().toLowerCase()));
});
});
}
/**
* Asks users an interactive yes/no prompt.
*
* This should not be called from non-interactive mode as it will break the CLI.
*
* @param prompt A markdown prompt to ask the user
* @param setExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
* @returns Whether or not the user answers yes.
*/
async function promptForConsentInteractive(
prompt: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return await new Promise<boolean>((resolve) => {
addExtensionUpdateConfirmationRequest({
prompt,
onConfirm: (resolvedConfirmed) => {
resolve(resolvedConfirmed);
},
});
});
}
export async function installExtension(
installMetadata: ExtensionInstallMetadata,
requestConsent: (consent: string) => Promise<boolean>,
cwd: string = process.cwd(),
previousExtensionConfig?: ExtensionConfig,
): Promise<string> {
const telemetryConfig = getTelemetryConfig(cwd);
let newExtensionConfig: ExtensionConfig | null = null;
let localSourcePath: string | undefined;
try {
const settings = loadSettings(cwd).merged;
if (!isWorkspaceTrusted(settings)) {
throw new Error(
`Could not install extension from untrusted folder at ${installMetadata.source}`,
);
}
const extensionsDir = ExtensionStorage.getUserExtensionsDir();
await fs.promises.mkdir(extensionsDir, { recursive: true });
if (
!path.isAbsolute(installMetadata.source) &&
(installMetadata.type === 'local' || installMetadata.type === 'link')
) {
installMetadata.source = path.resolve(cwd, installMetadata.source);
}
let tempDir: string | undefined;
if (
installMetadata.type === 'git' ||
installMetadata.type === 'github-release'
) {
tempDir = await ExtensionStorage.createTmpDir();
try {
const result = await downloadFromGitHubRelease(
installMetadata,
tempDir,
);
installMetadata.type = result.type;
installMetadata.releaseTag = result.tagName;
} catch (_error) {
await cloneFromGit(installMetadata, tempDir);
installMetadata.type = 'git';
}
localSourcePath = tempDir;
} else if (
installMetadata.type === 'local' ||
installMetadata.type === 'link'
) {
localSourcePath = installMetadata.source;
} else {
throw new Error(`Unsupported install type: ${installMetadata.type}`);
}
try {
newExtensionConfig = loadExtensionConfig({
extensionDir: localSourcePath,
workspaceDir: cwd,
});
const newExtensionName = newExtensionConfig.name;
const extensionStorage = new ExtensionStorage(newExtensionName);
const destinationPath = extensionStorage.getExtensionDir();
const installedExtensions = loadUserExtensions();
if (
installedExtensions.some(
(installed) => installed.config.name === newExtensionName,
)
) {
throw new Error(
`Extension "${newExtensionName}" is already installed. Please uninstall it first.`,
);
}
await maybeRequestConsentOrFail(
newExtensionConfig,
requestConsent,
previousExtensionConfig,
);
await fs.promises.mkdir(destinationPath, { recursive: true });
if (
installMetadata.type === 'local' ||
installMetadata.type === 'git' ||
installMetadata.type === 'github-release'
) {
await copyExtension(localSourcePath, destinationPath);
}
const metadataString = JSON.stringify(installMetadata, null, 2);
const metadataPath = path.join(
destinationPath,
INSTALL_METADATA_FILENAME,
);
await fs.promises.writeFile(metadataPath, metadataString);
} finally {
if (tempDir) {
await fs.promises.rm(tempDir, { recursive: true, force: true });
}
}
logExtensionInstallEvent(
telemetryConfig,
new ExtensionInstallEvent(
newExtensionConfig!.name,
newExtensionConfig!.version,
installMetadata.source,
'success',
),
);
enableExtension(newExtensionConfig!.name, SettingScope.User);
return newExtensionConfig!.name;
} catch (error) {
// Attempt to load config from the source path even if installation fails
// to get the name and version for logging.
if (!newExtensionConfig && localSourcePath) {
try {
newExtensionConfig = loadExtensionConfig({
extensionDir: localSourcePath,
workspaceDir: cwd,
});
} catch {
// Ignore error, this is just for logging.
}
}
logExtensionInstallEvent(
telemetryConfig,
new ExtensionInstallEvent(
newExtensionConfig?.name ?? '',
newExtensionConfig?.version ?? '',
installMetadata.source,
'error',
),
);
throw error;
}
}
/**
* Builds a consent string for installing an extension based on it's
* extensionConfig.
*/
function extensionConsentString(extensionConfig: ExtensionConfig): string {
const output: string[] = [];
const mcpServerEntries = Object.entries(extensionConfig.mcpServers || {});
output.push(`Installing extension "${extensionConfig.name}".`);
output.push(
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**',
);
if (mcpServerEntries.length) {
output.push('This extension will run the following MCP servers:');
for (const [key, mcpServer] of mcpServerEntries) {
const isLocal = !!mcpServer.command;
const source =
mcpServer.httpUrl ??
`${mcpServer.command || ''}${mcpServer.args ? ' ' + mcpServer.args.join(' ') : ''}`;
output.push(` * ${key} (${isLocal ? 'local' : 'remote'}): ${source}`);
}
}
if (extensionConfig.contextFileName) {
output.push(
`This extension will append info to your QWEN.md context using ${extensionConfig.contextFileName}`,
);
}
if (extensionConfig.excludeTools) {
output.push(
`This extension will exclude the following core tools: ${extensionConfig.excludeTools}`,
);
}
return output.join('\n');
}
/**
* Requests consent from the user to install an extension (extensionConfig), if
* there is any difference between the consent string for `extensionConfig` and
* `previousExtensionConfig`.
*
* Always requests consent if previousExtensionConfig is null.
*
* Throws if the user does not consent.
*/
async function maybeRequestConsentOrFail(
extensionConfig: ExtensionConfig,
requestConsent: (consent: string) => Promise<boolean>,
previousExtensionConfig?: ExtensionConfig,
) {
const extensionConsent = extensionConsentString(extensionConfig);
if (previousExtensionConfig) {
const previousExtensionConsent = extensionConsentString(
previousExtensionConfig,
);
if (previousExtensionConsent === extensionConsent) {
return;
}
}
if (!(await requestConsent(extensionConsent))) {
throw new Error(`Installation cancelled for "${extensionConfig.name}".`);
}
}
export function validateName(name: string) {
if (!/^[a-zA-Z0-9-]+$/.test(name)) {
throw new Error(
`Invalid extension name: "${name}". Only letters (a-z, A-Z), numbers (0-9), and dashes (-) are allowed.`,
);
}
}
export function loadExtensionConfig(
context: LoadExtensionContext,
): ExtensionConfig {
const { extensionDir, workspaceDir } = context;
const configFilePath = path.join(extensionDir, EXTENSIONS_CONFIG_FILENAME);
if (!fs.existsSync(configFilePath)) {
throw new Error(`Configuration file not found at ${configFilePath}`);
}
try {
const configContent = fs.readFileSync(configFilePath, 'utf-8');
const config = recursivelyHydrateStrings(JSON.parse(configContent), {
extensionPath: extensionDir,
workspacePath: workspaceDir,
'/': path.sep,
pathSeparator: path.sep,
}) as unknown as ExtensionConfig;
if (!config.name || !config.version) {
throw new Error(
`Invalid configuration in ${configFilePath}: missing ${!config.name ? '"name"' : '"version"'}`,
);
}
validateName(config.name);
return config;
} catch (e) {
throw new Error(
`Failed to load extension config from ${configFilePath}: ${getErrorMessage(
e,
)}`,
);
}
}
export async function uninstallExtension(
extensionIdentifier: string,
cwd: string = process.cwd(),
): Promise<void> {
const telemetryConfig = getTelemetryConfig(cwd);
const installedExtensions = loadUserExtensions();
const extensionName = installedExtensions.find(
(installed) =>
installed.config.name.toLowerCase() ===
extensionIdentifier.toLowerCase() ||
installed.installMetadata?.source.toLowerCase() ===
extensionIdentifier.toLowerCase(),
)?.config.name;
if (!extensionName) {
throw new Error(`Extension not found.`);
}
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
[extensionName],
);
manager.remove(extensionName);
const storage = new ExtensionStorage(extensionName);
await fs.promises.rm(storage.getExtensionDir(), {
recursive: true,
force: true,
});
logExtensionUninstall(
telemetryConfig,
new ExtensionUninstallEvent(extensionName, 'success'),
);
}
export function toOutputString(
extension: Extension,
workspaceDir: string,
): string {
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
);
const userEnabled = manager.isEnabled(extension.config.name, os.homedir());
const workspaceEnabled = manager.isEnabled(
extension.config.name,
workspaceDir,
);
const status = workspaceEnabled ? chalk.green('✓') : chalk.red('✗');
let output = `${status} ${extension.config.name} (${extension.config.version})`;
output += `\n Path: ${extension.path}`;
if (extension.installMetadata) {
output += `\n Source: ${extension.installMetadata.source} (Type: ${extension.installMetadata.type})`;
if (extension.installMetadata.ref) {
output += `\n Ref: ${extension.installMetadata.ref}`;
}
if (extension.installMetadata.releaseTag) {
output += `\n Release tag: ${extension.installMetadata.releaseTag}`;
}
}
output += `\n Enabled (User): ${userEnabled}`;
output += `\n Enabled (Workspace): ${workspaceEnabled}`;
if (extension.contextFiles.length > 0) {
output += `\n Context files:`;
extension.contextFiles.forEach((contextFile) => {
output += `\n ${contextFile}`;
});
}
if (extension.config.mcpServers) {
output += `\n MCP servers:`;
Object.keys(extension.config.mcpServers).forEach((key) => {
output += `\n ${key}`;
});
}
if (extension.config.excludeTools) {
output += `\n Excluded tools:`;
extension.config.excludeTools.forEach((tool) => {
output += `\n ${tool}`;
});
}
return output;
}
export function disableExtension(
name: string,
scope: SettingScope,
cwd: string = process.cwd(),
) {
const config = getTelemetryConfig(cwd);
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
throw new Error('System and SystemDefaults scopes are not supported.');
}
const extension = loadExtensionByName(name, cwd);
if (!extension) {
throw new Error(`Extension with name ${name} does not exist.`);
}
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
[name],
);
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
manager.disable(name, true, scopePath);
logExtensionDisable(config, new ExtensionDisableEvent(name, scope));
}
export function enableExtension(
name: string,
scope: SettingScope,
cwd: string = process.cwd(),
) {
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
throw new Error('System and SystemDefaults scopes are not supported.');
}
const extension = loadExtensionByName(name, cwd);
if (!extension) {
throw new Error(`Extension with name ${name} does not exist.`);
}
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
);
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
manager.enable(name, true, scopePath);
const config = getTelemetryConfig(cwd);
logExtensionEnable(config, new ExtensionEnableEvent(name, scope));
}

View File

@@ -1,424 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as path from 'node:path';
import fs from 'node:fs';
import os from 'node:os';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { ExtensionEnablementManager, Override } from './extensionEnablement.js';
import type { Extension } from '../extension.js';
// Helper to create a temporary directory for testing
function createTestDir() {
const dirPath = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-test-'));
return {
path: dirPath,
cleanup: () => fs.rmSync(dirPath, { recursive: true, force: true }),
};
}
let testDir: { path: string; cleanup: () => void };
let configDir: string;
let manager: ExtensionEnablementManager;
describe('ExtensionEnablementManager', () => {
beforeEach(() => {
testDir = createTestDir();
configDir = path.join(testDir.path, '.gemini');
manager = new ExtensionEnablementManager(configDir);
});
afterEach(() => {
testDir.cleanup();
// Reset the singleton instance for test isolation
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(ExtensionEnablementManager as any).instance = undefined;
});
describe('isEnabled', () => {
it('should return true if extension is not configured', () => {
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
});
it('should return true if no overrides match', () => {
manager.disable('ext-test', false, '/another/path');
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
});
it('should enable a path based on an override rule', () => {
manager.disable('ext-test', true, '/');
manager.enable('ext-test', true, '/home/user/projects/');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
true,
);
});
it('should disable a path based on a disable override rule', () => {
manager.enable('ext-test', true, '/');
manager.disable('ext-test', true, '/home/user/projects/');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
});
it('should respect the last matching rule (enable wins)', () => {
manager.disable('ext-test', true, '/home/user/projects/');
manager.enable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
true,
);
});
it('should respect the last matching rule (disable wins)', () => {
manager.enable('ext-test', true, '/home/user/projects/');
manager.disable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
});
it('should handle', () => {
manager.enable('ext-test', true, '/home/user/projects');
manager.disable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
expect(
manager.isEnabled('ext-test', '/home/user/projects/something-else'),
).toBe(true);
});
});
describe('includeSubdirs', () => {
it('should add a glob when enabling with includeSubdirs', () => {
manager.enable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
});
it('should not add a glob when enabling without includeSubdirs', () => {
manager.enable('ext-test', false, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should add a glob when disabling with includeSubdirs', () => {
manager.disable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('!/path/to/dir/*');
});
it('should remove conflicting glob rule when enabling without subdirs', () => {
manager.enable('ext-test', true, '/path/to/dir'); // Adds /path/to/dir*
manager.enable('ext-test', false, '/path/to/dir'); // Should remove the glob
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should remove conflicting non-glob rule when enabling with subdirs', () => {
manager.enable('ext-test', false, '/path/to/dir'); // Adds /path/to/dir
manager.enable('ext-test', true, '/path/to/dir'); // Should remove the non-glob
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/');
});
it('should remove conflicting rules when disabling', () => {
manager.enable('ext-test', true, '/path/to/dir'); // enabled with glob
manager.disable('ext-test', false, '/path/to/dir'); // disabled without
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('!/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should correctly evaluate isEnabled with subdirs', () => {
manager.disable('ext-test', true, '/');
manager.enable('ext-test', true, '/path/to/dir');
expect(manager.isEnabled('ext-test', '/path/to/dir/')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/dir/sub/')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/another/')).toBe(false);
});
it('should correctly evaluate isEnabled without subdirs', () => {
manager.disable('ext-test', true, '/*');
manager.enable('ext-test', false, '/path/to/dir');
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/dir/sub')).toBe(false);
});
});
describe('pruning child rules', () => {
it('should remove child rules when enabling a parent with subdirs', () => {
// Pre-existing rules for children
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.disable('ext-test', true, '/path/to/dir/subdir2');
manager.enable('ext-test', false, '/path/to/another/dir');
// Enable the parent directory
manager.enable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
// The new parent rule should be present
expect(overrides).toContain(`/path/to/dir/*`);
// Child rules should be removed
expect(overrides).not.toContain('/path/to/dir/subdir1/');
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
// Unrelated rules should remain
expect(overrides).toContain('/path/to/another/dir/');
});
it('should remove child rules when disabling a parent with subdirs', () => {
// Pre-existing rules for children
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.disable('ext-test', true, '/path/to/dir/subdir2');
manager.enable('ext-test', false, '/path/to/another/dir');
// Disable the parent directory
manager.disable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
// The new parent rule should be present
expect(overrides).toContain(`!/path/to/dir/*`);
// Child rules should be removed
expect(overrides).not.toContain('/path/to/dir/subdir1/');
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
// Unrelated rules should remain
expect(overrides).toContain('/path/to/another/dir/');
});
it('should not remove child rules if includeSubdirs is false', () => {
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.enable('ext-test', false, '/path/to/dir'); // Not including subdirs
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
expect(overrides).toContain('/path/to/dir/subdir1/');
expect(overrides).toContain('/path/to/dir/');
});
});
it('should enable a path based on an enable override', () => {
manager.disable('ext-test', true, '/Users/chrstn');
manager.enable('ext-test', true, '/Users/chrstn/gemini-cli');
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
true,
);
});
it('should ignore subdirs', () => {
manager.disable('ext-test', false, '/Users/chrstn');
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
true,
);
});
describe('extension overrides (-e <name>)', () => {
beforeEach(() => {
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
});
it('can enable extensions, case-insensitive', () => {
manager.disable('ext-test', true, '/');
expect(manager.isEnabled('ext-test', '/')).toBe(true);
expect(manager.isEnabled('Ext-Test', '/')).toBe(true);
// Double check that it would have been disabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
).toBe(false);
});
it('disable all other extensions', () => {
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
manager.enable('ext-test-2', true, '/');
expect(manager.isEnabled('ext-test-2', '/')).toBe(false);
// Double check that it would have been enabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test-2', '/'),
).toBe(true);
});
it('none disables all extensions', () => {
manager = new ExtensionEnablementManager(configDir, ['none']);
manager.enable('ext-test', true, '/');
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(false);
// Double check that it would have been enabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
).toBe(true);
});
});
describe('validateExtensionOverrides', () => {
let consoleErrorSpy: ReturnType<typeof vi.spyOn>;
beforeEach(() => {
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
});
afterEach(() => {
consoleErrorSpy.mockRestore();
});
it('should not log an error if enabledExtensionNamesOverride is empty', () => {
const manager = new ExtensionEnablementManager(configDir, []);
manager.validateExtensionOverrides([]);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
it('should not log an error if all enabledExtensionNamesOverride are valid', () => {
const manager = new ExtensionEnablementManager(configDir, [
'ext-one',
'ext-two',
]);
const extensions = [
{ config: { name: 'ext-one' } },
{ config: { name: 'ext-two' } },
] as Extension[];
manager.validateExtensionOverrides(extensions);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
it('should log an error for each invalid extension name in enabledExtensionNamesOverride', () => {
const manager = new ExtensionEnablementManager(configDir, [
'ext-one',
'ext-invalid',
'ext-another-invalid',
]);
const extensions = [
{ config: { name: 'ext-one' } },
{ config: { name: 'ext-two' } },
] as Extension[];
manager.validateExtensionOverrides(extensions);
expect(consoleErrorSpy).toHaveBeenCalledTimes(2);
expect(consoleErrorSpy).toHaveBeenCalledWith(
'Extension not found: ext-invalid',
);
expect(consoleErrorSpy).toHaveBeenCalledWith(
'Extension not found: ext-another-invalid',
);
});
it('should not log an error if "none" is in enabledExtensionNamesOverride', () => {
const manager = new ExtensionEnablementManager(configDir, ['none']);
manager.validateExtensionOverrides([]);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
});
});
describe('Override', () => {
it('should create an override from input', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should create a disable override from input', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(true);
expect(override.includeSubdirs).toBe(false);
});
it('should create an override from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir');
expect(override.baseRule).toBe('/path/to/dir');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(false);
});
it('should create a disable override from a file rule', () => {
const override = Override.fromFileRule('!/path/to/dir/');
expect(override.isDisable).toBe(true);
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.includeSubdirs).toBe(false);
});
it('should create an override with subdirs from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir/*');
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should correctly identify conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', false);
expect(override1.conflictsWith(override2)).toBe(true);
});
it('should correctly identify non-conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/another/dir', true);
expect(override1.conflictsWith(override2)).toBe(false);
});
it('should correctly identify equal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(true);
});
it('should correctly identify unequal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('!/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(false);
});
it('should generate the correct regex', () => {
const override = Override.fromInput('/path/to/dir', true);
const regex = override.asRegex();
expect(regex.test('/path/to/dir/')).toBe(true);
expect(regex.test('/path/to/dir/subdir')).toBe(true);
expect(regex.test('/path/to/another/dir')).toBe(false);
});
it('should correctly identify child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify child overrides with glob', () => {
const parent = Override.fromInput('/path/to/dir/*', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify non-child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const other = Override.fromInput('/path/to/another/dir', false);
expect(other.isChildOf(parent)).toBe(false);
});
it('should generate the correct output string', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.output()).toBe(`/path/to/dir/*`);
});
it('should generate the correct output string for a disable override', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
it('should disable a path based on a disable override rule', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
});

View File

@@ -1,239 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import fs from 'node:fs';
import path from 'node:path';
import { type Extension } from '../extension.js';
export interface ExtensionEnablementConfig {
overrides: string[];
}
export interface AllExtensionsEnablementConfig {
[extensionName: string]: ExtensionEnablementConfig;
}
export class Override {
constructor(
public baseRule: string,
public isDisable: boolean,
public includeSubdirs: boolean,
) {}
static fromInput(inputRule: string, includeSubdirs: boolean): Override {
const isDisable = inputRule.startsWith('!');
let baseRule = isDisable ? inputRule.substring(1) : inputRule;
baseRule = ensureLeadingAndTrailingSlash(baseRule);
return new Override(baseRule, isDisable, includeSubdirs);
}
static fromFileRule(fileRule: string): Override {
const isDisable = fileRule.startsWith('!');
let baseRule = isDisable ? fileRule.substring(1) : fileRule;
const includeSubdirs = baseRule.endsWith('*');
baseRule = includeSubdirs
? baseRule.substring(0, baseRule.length - 1)
: baseRule;
return new Override(baseRule, isDisable, includeSubdirs);
}
conflictsWith(other: Override): boolean {
if (this.baseRule === other.baseRule) {
return (
this.includeSubdirs !== other.includeSubdirs ||
this.isDisable !== other.isDisable
);
}
return false;
}
isEqualTo(other: Override): boolean {
return (
this.baseRule === other.baseRule &&
this.includeSubdirs === other.includeSubdirs &&
this.isDisable === other.isDisable
);
}
asRegex(): RegExp {
return globToRegex(`${this.baseRule}${this.includeSubdirs ? '*' : ''}`);
}
isChildOf(parent: Override) {
if (!parent.includeSubdirs) {
return false;
}
return parent.asRegex().test(this.baseRule);
}
output(): string {
return `${this.isDisable ? '!' : ''}${this.baseRule}${this.includeSubdirs ? '*' : ''}`;
}
matchesPath(path: string) {
return this.asRegex().test(path);
}
}
const ensureLeadingAndTrailingSlash = function (dirPath: string): string {
// Normalize separators to forward slashes for consistent matching across platforms.
let result = dirPath.replace(/\\/g, '/');
if (result.charAt(0) !== '/') {
result = '/' + result;
}
if (result.charAt(result.length - 1) !== '/') {
result = result + '/';
}
return result;
};
/**
* Converts a glob pattern to a RegExp object.
* This is a simplified implementation that supports `*`.
*
* @param glob The glob pattern to convert.
* @returns A RegExp object.
*/
function globToRegex(glob: string): RegExp {
const regexString = glob
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex characters
.replace(/(\/?)\*/g, '($1.*)?'); // Convert * to optional group
return new RegExp(`^${regexString}$`);
}
export class ExtensionEnablementManager {
private configFilePath: string;
private configDir: string;
// If non-empty, this overrides all other extension configuration and enables
// only the ones in this list.
private enabledExtensionNamesOverride: string[];
constructor(configDir: string, enabledExtensionNames?: string[]) {
this.configDir = configDir;
this.configFilePath = path.join(configDir, 'extension-enablement.json');
this.enabledExtensionNamesOverride =
enabledExtensionNames?.map((name) => name.toLowerCase()) ?? [];
}
validateExtensionOverrides(extensions: Extension[]) {
for (const name of this.enabledExtensionNamesOverride) {
if (name === 'none') continue;
if (
!extensions.some(
(ext) => ext.config.name.toLowerCase() === name.toLowerCase(),
)
) {
console.error(`Extension not found: ${name}`);
}
}
}
/**
* Determines if an extension is enabled based on its name and the current
* path. The last matching rule in the overrides list wins.
*
* @param extensionName The name of the extension.
* @param currentPath The absolute path of the current working directory.
* @returns True if the extension is enabled, false otherwise.
*/
isEnabled(extensionName: string, currentPath: string): boolean {
// If we have a single override called 'none', this disables all extensions.
// Typically, this comes from the user passing `-e none`.
if (
this.enabledExtensionNamesOverride.length === 1 &&
this.enabledExtensionNamesOverride[0] === 'none'
) {
return false;
}
// If we have explicit overrides, only enable those extensions.
if (this.enabledExtensionNamesOverride.length > 0) {
// When checking against overrides ONLY, we use a case insensitive match.
// The override names are already lowercased in the constructor.
return this.enabledExtensionNamesOverride.includes(
extensionName.toLocaleLowerCase(),
);
}
// Otherwise, we use the configuration settings
const config = this.readConfig();
const extensionConfig = config[extensionName];
// Extensions are enabled by default.
let enabled = true;
const allOverrides = extensionConfig?.overrides ?? [];
for (const rule of allOverrides) {
const override = Override.fromFileRule(rule);
if (override.matchesPath(ensureLeadingAndTrailingSlash(currentPath))) {
enabled = !override.isDisable;
}
}
return enabled;
}
readConfig(): AllExtensionsEnablementConfig {
try {
const content = fs.readFileSync(this.configFilePath, 'utf-8');
return JSON.parse(content);
} catch (error) {
if (
error instanceof Error &&
'code' in error &&
error.code === 'ENOENT'
) {
return {};
}
console.error('Error reading extension enablement config:', error);
return {};
}
}
writeConfig(config: AllExtensionsEnablementConfig): void {
fs.mkdirSync(this.configDir, { recursive: true });
fs.writeFileSync(this.configFilePath, JSON.stringify(config, null, 2));
}
enable(
extensionName: string,
includeSubdirs: boolean,
scopePath: string,
): void {
const config = this.readConfig();
if (!config[extensionName]) {
config[extensionName] = { overrides: [] };
}
const override = Override.fromInput(scopePath, includeSubdirs);
const overrides = config[extensionName].overrides.filter((rule) => {
const fileOverride = Override.fromFileRule(rule);
if (
fileOverride.conflictsWith(override) ||
fileOverride.isEqualTo(override)
) {
return false; // Remove conflicts and equivalent values.
}
return !fileOverride.isChildOf(override);
});
overrides.push(override.output());
config[extensionName].overrides = overrides;
this.writeConfig(config);
}
disable(
extensionName: string,
includeSubdirs: boolean,
scopePath: string,
): void {
this.enable(extensionName, includeSubdirs, `!${scopePath}`);
}
remove(extensionName: string): void {
const config = this.readConfig();
if (config[extensionName]) {
delete config[extensionName];
this.writeConfig(config);
}
}
}

View File

@@ -1,468 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { vi } from 'vitest';
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import {
EXTENSIONS_CONFIG_FILENAME,
ExtensionStorage,
INSTALL_METADATA_FILENAME,
annotateActiveExtensions,
loadExtension,
} from '../extension.js';
import { checkForAllExtensionUpdates, updateExtension } from './update.js';
import { QWEN_DIR } from '@qwen-code/qwen-code-core';
import { isWorkspaceTrusted } from '../trustedFolders.js';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import { createExtension } from '../../test-utils/createExtension.js';
import { ExtensionEnablementManager } from './extensionEnablement.js';
const mockGit = {
clone: vi.fn(),
getRemotes: vi.fn(),
fetch: vi.fn(),
checkout: vi.fn(),
listRemote: vi.fn(),
revparse: vi.fn(),
// Not a part of the actual API, but we need to use this to do the correct
// file system interactions.
path: vi.fn(),
};
vi.mock('simple-git', () => ({
simpleGit: vi.fn((path: string) => {
mockGit.path.mockReturnValue(path);
return mockGit;
}),
}));
vi.mock('../extensions/github.js', async (importOriginal) => {
const actual =
await importOriginal<typeof import('../extensions/github.js')>();
return {
...actual,
downloadFromGitHubRelease: vi
.fn()
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
};
});
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
return {
...mockedOs,
homedir: vi.fn(),
};
});
vi.mock('../trustedFolders.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('../trustedFolders.js')>();
return {
...actual,
isWorkspaceTrusted: vi.fn(),
};
});
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
const actual =
await importOriginal<typeof import('@qwen-code/qwen-code-core')>();
return {
...actual,
logExtensionInstallEvent: mockLogExtensionInstallEvent,
logExtensionUninstall: mockLogExtensionUninstall,
ExtensionInstallEvent: vi.fn(),
ExtensionUninstallEvent: vi.fn(),
};
});
describe('update tests', () => {
let tempHomeDir: string;
let tempWorkspaceDir: string;
let userExtensionsDir: string;
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'qwen-code-test-home-'),
);
tempWorkspaceDir = fs.mkdtempSync(
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
);
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
// Clean up before each test
fs.rmSync(userExtensionsDir, { recursive: true, force: true });
fs.mkdirSync(userExtensionsDir, { recursive: true });
vi.mocked(isWorkspaceTrusted).mockReturnValue({
isTrusted: true,
source: 'file',
});
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
Object.values(mockGit).forEach((fn) => fn.mockReset());
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
});
describe('updateExtension', () => {
it('should update a git-installed extension', async () => {
const gitUrl = 'https://github.com/google/gemini-extensions.git';
const extensionName = 'qwen-extensions';
const targetExtDir = path.join(userExtensionsDir, extensionName);
const metadataPath = path.join(targetExtDir, INSTALL_METADATA_FILENAME);
fs.mkdirSync(targetExtDir, { recursive: true });
fs.writeFileSync(
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.0.0' }),
);
fs.writeFileSync(
metadataPath,
JSON.stringify({ source: gitUrl, type: 'git' }),
);
mockGit.clone.mockImplementation(async (_, destination) => {
fs.mkdirSync(path.join(mockGit.path(), destination), {
recursive: true,
});
fs.writeFileSync(
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.1.0' }),
);
});
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: targetExtDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const updateInfo = await updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
() => {},
);
expect(updateInfo).toEqual({
name: 'qwen-extensions',
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
});
const updatedConfig = JSON.parse(
fs.readFileSync(
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
'utf-8',
),
);
expect(updatedConfig.version).toBe('1.1.0');
});
it('should call setExtensionUpdateState with UPDATING and then UPDATED_NEEDS_RESTART on success', async () => {
const extensionName = 'test-extension';
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: extensionName,
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
mockGit.clone.mockImplementation(async (_, destination) => {
fs.mkdirSync(path.join(mockGit.path(), destination), {
recursive: true,
});
fs.writeFileSync(
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.1.0' }),
);
});
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const dispatch = vi.fn();
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
await updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
dispatch,
);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATING,
},
});
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
},
});
});
it('should call setExtensionUpdateState with ERROR on failure', async () => {
const extensionName = 'test-extension';
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: extensionName,
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
mockGit.clone.mockRejectedValue(new Error('Git clone failed'));
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const dispatch = vi.fn();
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
await expect(
updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
dispatch,
),
).rejects.toThrow();
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATING,
},
});
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.ERROR,
},
});
});
});
describe('checkForAllExtensionUpdates', () => {
it('should return UpdateAvailable for a git extension with updates', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
]);
mockGit.listRemote.mockResolvedValue('remoteHash HEAD');
mockGit.revparse.mockResolvedValue('localHash');
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
});
it('should return UpToDate for a git extension with no updates', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
]);
mockGit.listRemote.mockResolvedValue('sameHash HEAD');
mockGit.revparse.mockResolvedValue('sameHash');
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UP_TO_DATE,
},
});
});
it('should return UpToDate for a local extension with no updates', async () => {
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
const sourceExtensionDir = createExtension({
extensionsDir: localExtensionSourcePath,
name: 'my-local-ext',
version: '1.0.0',
});
const installedExtensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'local-extension',
version: '1.0.0',
installMetadata: { source: sourceExtensionDir, type: 'local' },
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: installedExtensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'local-extension',
state: ExtensionUpdateState.UP_TO_DATE,
},
});
});
it('should return UpdateAvailable for a local extension with updates', async () => {
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
const sourceExtensionDir = createExtension({
extensionsDir: localExtensionSourcePath,
name: 'my-local-ext',
version: '1.1.0',
});
const installedExtensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'local-extension',
version: '1.0.0',
installMetadata: { source: sourceExtensionDir, type: 'local' },
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: installedExtensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'local-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
});
it('should return Error when git check fails', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'error-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockRejectedValue(new Error('Git error'));
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'error-extension',
state: ExtensionUpdateState.ERROR,
},
});
});
});
});

View File

@@ -1,182 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import {
type ExtensionUpdateAction,
ExtensionUpdateState,
type ExtensionUpdateStatus,
} from '../../ui/state/extensions.js';
import {
copyExtension,
installExtension,
uninstallExtension,
loadExtension,
loadInstallMetadata,
ExtensionStorage,
loadExtensionConfig,
} from '../extension.js';
import { checkForExtensionUpdate } from './github.js';
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import * as fs from 'node:fs';
import { getErrorMessage } from '../../utils/errors.js';
export interface ExtensionUpdateInfo {
name: string;
originalVersion: string;
updatedVersion: string;
}
export async function updateExtension(
extension: GeminiCLIExtension,
cwd: string = process.cwd(),
requestConsent: (consent: string) => Promise<boolean>,
currentState: ExtensionUpdateState,
dispatchExtensionStateUpdate: (action: ExtensionUpdateAction) => void,
): Promise<ExtensionUpdateInfo | undefined> {
if (currentState === ExtensionUpdateState.UPDATING) {
return undefined;
}
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.UPDATING },
});
const installMetadata = loadInstallMetadata(extension.path);
if (!installMetadata?.type) {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
throw new Error(
`Extension ${extension.name} cannot be updated, type is unknown.`,
);
}
if (installMetadata?.type === 'link') {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.UP_TO_DATE },
});
throw new Error(`Extension is linked so does not need to be updated`);
}
const originalVersion = extension.version;
const tempDir = await ExtensionStorage.createTmpDir();
try {
await copyExtension(extension.path, tempDir);
const previousExtensionConfig = await loadExtensionConfig({
extensionDir: extension.path,
workspaceDir: cwd,
});
await uninstallExtension(extension.name, cwd);
await installExtension(
installMetadata,
requestConsent,
cwd,
previousExtensionConfig,
);
const updatedExtensionStorage = new ExtensionStorage(extension.name);
const updatedExtension = loadExtension({
extensionDir: updatedExtensionStorage.getExtensionDir(),
workspaceDir: cwd,
});
if (!updatedExtension) {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
throw new Error('Updated extension not found after installation.');
}
const updatedVersion = updatedExtension.config.version;
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: {
name: extension.name,
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
},
});
return {
name: extension.name,
originalVersion,
updatedVersion,
};
} catch (e) {
console.error(
`Error updating extension, rolling back. ${getErrorMessage(e)}`,
);
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
await copyExtension(tempDir, extension.path);
throw e;
} finally {
await fs.promises.rm(tempDir, { recursive: true, force: true });
}
}
export async function updateAllUpdatableExtensions(
cwd: string = process.cwd(),
requestConsent: (consent: string) => Promise<boolean>,
extensions: GeminiCLIExtension[],
extensionsState: Map<string, ExtensionUpdateStatus>,
dispatch: (action: ExtensionUpdateAction) => void,
): Promise<ExtensionUpdateInfo[]> {
return (
await Promise.all(
extensions
.filter(
(extension) =>
extensionsState.get(extension.name)?.status ===
ExtensionUpdateState.UPDATE_AVAILABLE,
)
.map((extension) =>
updateExtension(
extension,
cwd,
requestConsent,
extensionsState.get(extension.name)!.status,
dispatch,
),
),
)
).filter((updateInfo) => !!updateInfo);
}
export interface ExtensionUpdateCheckResult {
state: ExtensionUpdateState;
error?: string;
}
export async function checkForAllExtensionUpdates(
extensions: GeminiCLIExtension[],
dispatch: (action: ExtensionUpdateAction) => void,
): Promise<void> {
dispatch({ type: 'BATCH_CHECK_START' });
const promises: Array<Promise<void>> = [];
for (const extension of extensions) {
if (!extension.installMetadata) {
dispatch({
type: 'SET_STATE',
payload: {
name: extension.name,
state: ExtensionUpdateState.NOT_UPDATABLE,
},
});
continue;
}
promises.push(
checkForExtensionUpdate(extension, (updatedState) => {
dispatch({
type: 'SET_STATE',
payload: { name: extension.name, state: updatedState },
});
}),
);
}
await Promise.all(promises);
dispatch({ type: 'BATCH_CHECK_END' });
}

View File

@@ -51,7 +51,6 @@ import {
import * as fs from 'node:fs'; // fs will be mocked separately
import stripJsonComments from 'strip-json-comments'; // Will be mocked separately
import { isWorkspaceTrusted } from './trustedFolders.js';
import { disableExtension } from './extension.js';
// These imports will get the versions from the vi.mock('./settings.js', ...) factory.
import {
@@ -64,8 +63,6 @@ import {
needsMigration,
type Settings,
loadEnvironment,
migrateDeprecatedSettings,
SettingScope,
SETTINGS_VERSION,
SETTINGS_VERSION_KEY,
} from './settings.js';
@@ -2649,122 +2646,4 @@ describe('Settings Loading and Merging', () => {
});
});
});
describe('migrateDeprecatedSettings', () => {
let mockFsExistsSync: Mocked<typeof fs.existsSync>;
let mockFsReadFileSync: Mocked<typeof fs.readFileSync>;
let mockDisableExtension: Mocked<typeof disableExtension>;
beforeEach(() => {
vi.resetAllMocks();
mockFsExistsSync = vi.mocked(fs.existsSync);
mockFsReadFileSync = vi.mocked(fs.readFileSync);
mockDisableExtension = vi.mocked(disableExtension);
(mockFsExistsSync as Mock).mockReturnValue(true);
vi.mocked(isWorkspaceTrusted).mockReturnValue(true);
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should migrate disabled extensions from user and workspace settings', () => {
const userSettingsContent = {
extensions: {
disabled: ['user-ext-1', 'shared-ext'],
},
};
const workspaceSettingsContent = {
extensions: {
disabled: ['workspace-ext-1', 'shared-ext'],
},
};
(mockFsReadFileSync as Mock).mockImplementation(
(p: fs.PathOrFileDescriptor) => {
if (p === USER_SETTINGS_PATH)
return JSON.stringify(userSettingsContent);
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
return JSON.stringify(workspaceSettingsContent);
return '{}';
},
);
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
// Check user settings migration
expect(mockDisableExtension).toHaveBeenCalledWith(
'user-ext-1',
SettingScope.User,
MOCK_WORKSPACE_DIR,
);
expect(mockDisableExtension).toHaveBeenCalledWith(
'shared-ext',
SettingScope.User,
MOCK_WORKSPACE_DIR,
);
// Check workspace settings migration
expect(mockDisableExtension).toHaveBeenCalledWith(
'workspace-ext-1',
SettingScope.Workspace,
MOCK_WORKSPACE_DIR,
);
expect(mockDisableExtension).toHaveBeenCalledWith(
'shared-ext',
SettingScope.Workspace,
MOCK_WORKSPACE_DIR,
);
// Check that setValue was called to remove the deprecated setting
expect(setValueSpy).toHaveBeenCalledWith(
SettingScope.User,
'extensions',
{
disabled: undefined,
},
);
expect(setValueSpy).toHaveBeenCalledWith(
SettingScope.Workspace,
'extensions',
{
disabled: undefined,
},
);
});
it('should not do anything if there are no deprecated settings', () => {
const userSettingsContent = {
extensions: {
enabled: ['user-ext-1'],
},
};
const workspaceSettingsContent = {
someOtherSetting: 'value',
};
(mockFsReadFileSync as Mock).mockImplementation(
(p: fs.PathOrFileDescriptor) => {
if (p === USER_SETTINGS_PATH)
return JSON.stringify(userSettingsContent);
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
return JSON.stringify(workspaceSettingsContent);
return '{}';
},
);
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
expect(mockDisableExtension).not.toHaveBeenCalled();
expect(setValueSpy).not.toHaveBeenCalled();
});
});
});

View File

@@ -30,7 +30,6 @@ import {
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
import { customDeepMerge, type MergeableObject } from '../utils/deepMerge.js';
import { updateSettingsFilePreservingFormat } from '../utils/commentJson.js';
import { disableExtension } from './extension.js';
function getMergeStrategyForPath(path: string[]): MergeStrategy | undefined {
let current: SettingDefinition | undefined = undefined;
@@ -81,7 +80,6 @@ const MIGRATION_MAP: Record<string, string> = {
excludeTools: 'tools.exclude',
excludeMCPServers: 'mcp.excluded',
excludedProjectEnvVars: 'advanced.excludedEnvVars',
extensionManagement: 'experimental.extensionManagement',
extensions: 'extensions',
fileFiltering: 'context.fileFiltering',
folderTrustFeature: 'security.folderTrust.featureEnabled',
@@ -812,31 +810,6 @@ export function loadSettings(
);
}
export function migrateDeprecatedSettings(
loadedSettings: LoadedSettings,
workspaceDir: string = process.cwd(),
): void {
const processScope = (scope: SettingScope) => {
const settings = loadedSettings.forScope(scope).settings;
if (settings.extensions?.disabled) {
console.log(
`Migrating deprecated extensions.disabled settings from ${scope} settings...`,
);
for (const extension of settings.extensions.disabled ?? []) {
disableExtension(extension, scope, workspaceDir);
}
const newExtensionsValue = { ...settings.extensions };
newExtensionsValue.disabled = undefined;
loadedSettings.setValue(scope, 'extensions', newExtensionsValue);
}
};
processScope(SettingScope.User);
processScope(SettingScope.Workspace);
}
export function saveSettings(settingsFile: SettingsFile): void {
try {
// Ensure the directory exists

View File

@@ -1192,15 +1192,6 @@ const SETTINGS_SCHEMA = {
description: 'Setting to enable experimental features',
showInDialog: false,
properties: {
extensionManagement: {
type: 'boolean',
label: 'Extension Management',
category: 'Experimental',
requiresRestart: true,
default: true,
description: 'Enable extension management features.',
showInDialog: false,
},
visionModelPreview: {
type: 'boolean',
label: 'Vision Model Preview',
@@ -1223,39 +1214,6 @@ const SETTINGS_SCHEMA = {
},
},
},
extensions: {
type: 'object',
label: 'Extensions',
category: 'Extensions',
requiresRestart: true,
default: {},
description: 'Settings for extensions.',
showInDialog: false,
properties: {
disabled: {
type: 'array',
label: 'Disabled Extensions',
category: 'Extensions',
requiresRestart: true,
default: [] as string[],
description: 'List of disabled extensions.',
showInDialog: false,
mergeStrategy: MergeStrategy.UNION,
},
workspacesWithMigrationNudge: {
type: 'array',
label: 'Workspaces with Migration Nudge',
category: 'Extensions',
requiresRestart: false,
default: [] as string[],
description:
'List of workspaces for which the migration nudge has been shown.',
showInDialog: false,
mergeStrategy: MergeStrategy.UNION,
},
},
},
} as const satisfies SettingsSchema;
export type SettingsSchemaType = typeof SETTINGS_SCHEMA;

View File

@@ -262,7 +262,6 @@ describe('gemini.tsx main function', () => {
);
const { loadSettings } = await import('./config/settings.js');
const cleanupModule = await import('./utils/cleanup.js');
const extensionModule = await import('./config/extension.js');
const validatorModule = await import('./validateNonInterActiveAuth.js');
const streamJsonModule = await import('./nonInteractive/session.js');
const initializerModule = await import('./core/initializer.js');
@@ -275,11 +274,6 @@ describe('gemini.tsx main function', () => {
vi.mocked(cleanupModule.registerCleanup).mockImplementation(() => {});
const runExitCleanupMock = vi.mocked(cleanupModule.runExitCleanup);
runExitCleanupMock.mockResolvedValue(undefined);
vi.spyOn(extensionModule, 'loadExtensions').mockReturnValue([]);
vi.spyOn(
extensionModule.ExtensionStorage,
'getUserExtensionsDir',
).mockReturnValue('/tmp/extensions');
vi.spyOn(initializerModule, 'initializeApp').mockResolvedValue({
authError: null,
themeError: null,

View File

@@ -15,9 +15,8 @@ import React from 'react';
import { validateAuthMethod } from './config/auth.js';
import * as cliConfig from './config/config.js';
import { loadCliConfig, parseArguments } from './config/config.js';
import { ExtensionStorage, loadExtensions } from './config/extension.js';
import type { DnsResolutionOrder, LoadedSettings } from './config/settings.js';
import { loadSettings, migrateDeprecatedSettings } from './config/settings.js';
import { loadSettings } from './config/settings.js';
import {
initializeApp,
type InitializationResult,
@@ -103,7 +102,6 @@ function getNodeMemoryArgs(isDebugMode: boolean): string[] {
return [];
}
import { ExtensionEnablementManager } from './config/extensions/extensionEnablement.js';
import { loadSandboxConfig } from './config/sandboxConfig.js';
import { runAcpAgent } from './acp-integration/acpAgent.js';
@@ -200,10 +198,9 @@ export async function startInteractiveUI(
export async function main() {
setupUnhandledRejectionHandler();
const settings = loadSettings();
migrateDeprecatedSettings(settings);
await cleanupCheckpoints();
let argv = await parseArguments(settings.merged);
let argv = await parseArguments();
// Check for invalid input combinations early to prevent crashes
if (argv.promptInteractive && !process.stdin.isTTY) {
@@ -245,9 +242,9 @@ export async function main() {
if (sandboxConfig) {
const partialConfig = await loadCliConfig(
settings.merged,
[],
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
argv,
undefined,
[],
);
if (
@@ -331,25 +328,21 @@ export async function main() {
// to run Gemini CLI. It is now safe to perform expensive initialization that
// may have side effects.
{
const extensionEnablementManager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
argv.extensions,
);
const extensions = loadExtensions(extensionEnablementManager);
const config = await loadCliConfig(
settings.merged,
extensions,
extensionEnablementManager,
argv,
process.cwd(),
argv.extensions,
);
if (config.getListExtensions()) {
console.log('Installed extensions:');
for (const extension of extensions) {
console.log(`- ${extension.config.name}`);
}
process.exit(0);
}
// FIXME: list extensions after the config initialize
// if (config.getListExtensions()) {
// console.log('Installed extensions:');
// for (const extension of extensions) {
// console.log(`- ${extension.config.name}`);
// }
// process.exit(0);
// }
// Setup unified ConsolePatcher based on interactive mode
const isInteractive = config.isInteractive();
@@ -395,7 +388,7 @@ export async function main() {
}
if (config.getExperimentalZedIntegration()) {
return runAcpAgent(config, settings, extensions, argv);
return runAcpAgent(config, settings, argv);
}
let input = config.getQuestion();

View File

@@ -0,0 +1,327 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { FileCommandLoader } from './FileCommandLoader.js';
import type { Config } from '@qwen-code/qwen-code-core';
import { Storage } from '@qwen-code/qwen-code-core';
describe('FileCommandLoader - Extension Commands Support', () => {
let tempDir: string;
let mockConfig: Partial<Config>;
beforeEach(async () => {
tempDir = await fs.promises.mkdtemp(
path.join(os.tmpdir(), 'file-command-loader-ext-test-'),
);
mockConfig = {
getFolderTrustFeature: () => false,
getFolderTrust: () => true,
getProjectRoot: () => tempDir,
storage: new Storage(tempDir),
getExtensions: () => [],
};
});
afterEach(async () => {
await fs.promises.rm(tempDir, { recursive: true, force: true });
});
it('should load commands from extension with config.commands path', async () => {
// Setup extension structure
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'test-ext');
const customCommandsDir = path.join(extensionDir, 'custom-cmds');
await fs.promises.mkdir(customCommandsDir, { recursive: true });
// Create extension config with custom commands path
const extensionConfig = {
name: 'test-ext',
version: '1.0.0',
commands: 'custom-cmds',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Create a test command in custom directory
const commandContent =
'---\ndescription: Test command from extension\n---\nDo something';
await fs.promises.writeFile(
path.join(customCommandsDir, 'test.md'),
commandContent,
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'test-ext',
config: extensionConfig,
name: 'test-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
contextFiles: [],
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('test-ext:test');
expect(commands[0].description).toBe(
'[test-ext] Test command from extension',
);
});
it('should load commands from extension with multiple commands paths', async () => {
// Setup extension structure
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'multi-ext');
const cmdsDir1 = path.join(extensionDir, 'commands1');
const cmdsDir2 = path.join(extensionDir, 'commands2');
await fs.promises.mkdir(cmdsDir1, { recursive: true });
await fs.promises.mkdir(cmdsDir2, { recursive: true });
// Create extension config with multiple commands paths
const extensionConfig = {
name: 'multi-ext',
version: '1.0.0',
commands: ['commands1', 'commands2'],
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Create test commands in both directories
await fs.promises.writeFile(
path.join(cmdsDir1, 'cmd1.md'),
'---\n---\nCommand 1',
);
await fs.promises.writeFile(
path.join(cmdsDir2, 'cmd2.md'),
'---\n---\nCommand 2',
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'multi-ext',
config: extensionConfig,
contextFiles: [],
name: 'multi-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(2);
const commandNames = commands.map((c) => c.name).sort();
expect(commandNames).toEqual(['multi-ext:cmd1', 'multi-ext:cmd2']);
});
it('should fallback to default "commands" directory when config.commands not specified', async () => {
// Setup extension structure with default commands directory
const extensionDir = path.join(
tempDir,
'.qwen',
'extensions',
'default-ext',
);
const defaultCommandsDir = path.join(extensionDir, 'commands');
await fs.promises.mkdir(defaultCommandsDir, { recursive: true });
// Create extension config without commands field
const extensionConfig = {
name: 'default-ext',
version: '1.0.0',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Create a test command in default directory
await fs.promises.writeFile(
path.join(defaultCommandsDir, 'default.md'),
'---\n---\nDefault command',
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'default-ext',
config: extensionConfig,
contextFiles: [],
name: 'default-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('default-ext:default');
});
it('should handle extension without commands directory gracefully', async () => {
// Setup extension structure without commands directory
const extensionDir = path.join(
tempDir,
'.qwen',
'extensions',
'no-cmds-ext',
);
await fs.promises.mkdir(extensionDir, { recursive: true });
// Create extension config
const extensionConfig = {
name: 'no-cmds-ext',
version: '1.0.0',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'no-cmds-ext',
config: extensionConfig,
contextFiles: [],
name: 'no-cmds-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
// Should not throw and return empty array
expect(commands).toHaveLength(0);
});
it('should prefix extension commands with extension name', async () => {
// Setup extension
const extensionDir = path.join(
tempDir,
'.qwen',
'extensions',
'prefix-ext',
);
const commandsDir = path.join(extensionDir, 'commands');
await fs.promises.mkdir(commandsDir, { recursive: true });
const extensionConfig = {
name: 'prefix-ext',
version: '1.0.0',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
await fs.promises.writeFile(
path.join(commandsDir, 'mycommand.md'),
'---\n---\nMy command',
);
mockConfig.getExtensions = () => [
{
id: 'prefix-ext',
config: extensionConfig,
contextFiles: [],
name: 'prefix-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('prefix-ext:mycommand');
});
it('should load commands from multiple extensions in alphabetical order', async () => {
// Setup two extensions
const ext1Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-b');
const ext2Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-a');
await fs.promises.mkdir(path.join(ext1Dir, 'commands'), {
recursive: true,
});
await fs.promises.mkdir(path.join(ext2Dir, 'commands'), {
recursive: true,
});
// Extension B
await fs.promises.writeFile(
path.join(ext1Dir, 'qwen-extension.json'),
JSON.stringify({ name: 'ext-b', version: '1.0.0' }),
);
await fs.promises.writeFile(
path.join(ext1Dir, 'commands', 'cmd.md'),
'---\n---\nCommand B',
);
// Extension A
await fs.promises.writeFile(
path.join(ext2Dir, 'qwen-extension.json'),
JSON.stringify({ name: 'ext-a', version: '1.0.0' }),
);
await fs.promises.writeFile(
path.join(ext2Dir, 'commands', 'cmd.md'),
'---\n---\nCommand A',
);
mockConfig.getExtensions = () => [
{
id: 'ext-b',
config: { name: 'ext-b', version: '1.0.0' },
contextFiles: [],
name: 'ext-b',
version: '1.0.0',
isActive: true,
path: ext1Dir,
},
{
id: 'ext-a',
config: { name: 'ext-a', version: '1.0.0' },
contextFiles: [],
name: 'ext-a',
version: '1.0.0',
isActive: true,
path: ext2Dir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(2);
// Extensions are sorted alphabetically, so ext-a comes before ext-b
expect(commands[0].name).toBe('ext-a:cmd');
expect(commands[1].name).toBe('ext-b:cmd');
});
});

View File

@@ -0,0 +1,117 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import { promises as fs } from 'node:fs';
import path from 'node:path';
import os from 'node:os';
import { FileCommandLoader } from './FileCommandLoader.js';
describe('FileCommandLoader - Markdown support', () => {
let tempDir: string;
beforeAll(async () => {
// Create a temporary directory for test commands
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-md-test-'));
});
afterAll(async () => {
// Clean up
await fs.rm(tempDir, { recursive: true, force: true });
});
it('should load markdown commands with frontmatter', async () => {
// Create a test markdown command file
const mdContent = `---
description: Test markdown command
---
This is a test prompt from markdown.`;
const commandPath = path.join(tempDir, 'test-command.md');
await fs.writeFile(commandPath, mdContent, 'utf-8');
// Create loader with temp dir as command source
const loader = new FileCommandLoader(null);
// Mock the getCommandDirectories to return our temp dir
const originalMethod = loader['getCommandDirectories'];
loader['getCommandDirectories'] = () => [{ path: tempDir }];
try {
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('test-command');
expect(commands[0].description).toBe('Test markdown command');
} finally {
// Restore original method
loader['getCommandDirectories'] = originalMethod;
}
});
it('should load markdown commands without frontmatter', async () => {
// Create a test markdown command file without frontmatter
const mdContent = 'This is a simple prompt without frontmatter.';
const commandPath = path.join(tempDir, 'simple-command.md');
await fs.writeFile(commandPath, mdContent, 'utf-8');
const loader = new FileCommandLoader(null);
const originalMethod = loader['getCommandDirectories'];
loader['getCommandDirectories'] = () => [{ path: tempDir }];
try {
const commands = await loader.loadCommands(new AbortController().signal);
const simpleCommand = commands.find(
(cmd) => cmd.name === 'simple-command',
);
expect(simpleCommand).toBeDefined();
expect(simpleCommand?.description).toContain('Custom command from');
} finally {
loader['getCommandDirectories'] = originalMethod;
}
});
it('should load both toml and markdown commands', async () => {
// Create both TOML and Markdown files
const tomlContent = `prompt = "TOML prompt"
description = "TOML command"`;
const mdContent = `---
description: Markdown command
---
Markdown prompt`;
await fs.writeFile(
path.join(tempDir, 'toml-cmd.toml'),
tomlContent,
'utf-8',
);
await fs.writeFile(path.join(tempDir, 'md-cmd.md'), mdContent, 'utf-8');
const loader = new FileCommandLoader(null);
const originalMethod = loader['getCommandDirectories'];
loader['getCommandDirectories'] = () => [{ path: tempDir }];
try {
const commands = await loader.loadCommands(new AbortController().signal);
const tomlCommand = commands.find((cmd) => cmd.name === 'toml-cmd');
const mdCommand = commands.find((cmd) => cmd.name === 'md-cmd');
expect(tomlCommand).toBeDefined();
expect(tomlCommand?.description).toBe('TOML command');
expect(mdCommand).toBeDefined();
expect(mdCommand?.description).toBe('Markdown command');
} finally {
loader['getCommandDirectories'] = originalMethod;
}
});
});

View File

@@ -568,9 +568,9 @@ describe('FileCommandLoader', () => {
expect(commands).toHaveLength(3);
const commandNames = commands.map((cmd) => cmd.name);
expect(commandNames).toEqual(['user', 'project', 'ext']);
expect(commandNames).toEqual(['user', 'project', 'test-ext:ext']);
const extCommand = commands.find((cmd) => cmd.name === 'ext');
const extCommand = commands.find((cmd) => cmd.name === 'test-ext:ext');
expect(extCommand?.extensionName).toBe('test-ext');
expect(extCommand?.description).toMatch(/^\[test-ext\]/);
});
@@ -656,14 +656,14 @@ describe('FileCommandLoader', () => {
expect(result1.content).toEqual([{ text: 'Project deploy command' }]);
}
expect(commands[2].name).toBe('deploy');
expect(commands[2].name).toBe('test-ext:deploy');
expect(commands[2].extensionName).toBe('test-ext');
expect(commands[2].description).toMatch(/^\[test-ext\]/);
const result2 = await commands[2].action?.(
createMockCommandContext({
invocation: {
raw: '/deploy',
name: 'deploy',
raw: '/test-ext:deploy',
name: 'test-ext:deploy',
args: '',
},
}),
@@ -729,7 +729,7 @@ describe('FileCommandLoader', () => {
const commands = await loader.loadCommands(signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('active');
expect(commands[0].name).toBe('active-ext:active');
expect(commands[0].extensionName).toBe('active-ext');
expect(commands[0].description).toMatch(/^\[active-ext\]/);
});
@@ -803,17 +803,17 @@ describe('FileCommandLoader', () => {
expect(commands).toHaveLength(3);
const commandNames = commands.map((cmd) => cmd.name).sort();
expect(commandNames).toEqual(['b:c', 'b:d:e', 'simple']);
expect(commandNames).toEqual(['a:b:c', 'a:b:d:e', 'a:simple']);
const nestedCmd = commands.find((cmd) => cmd.name === 'b:c');
const nestedCmd = commands.find((cmd) => cmd.name === 'a:b:c');
expect(nestedCmd?.extensionName).toBe('a');
expect(nestedCmd?.description).toMatch(/^\[a\]/);
expect(nestedCmd).toBeDefined();
const result = await nestedCmd!.action?.(
createMockCommandContext({
invocation: {
raw: '/b:c',
name: 'b:c',
raw: '/a:b:c',
name: 'a:b:c',
args: '',
},
}),

View File

@@ -5,34 +5,23 @@
*/
import { promises as fs } from 'node:fs';
import * as fsSync from 'node:fs';
import path from 'node:path';
import toml from '@iarna/toml';
import { glob } from 'glob';
import { z } from 'zod';
import type { Config } from '@qwen-code/qwen-code-core';
import { Storage } from '@qwen-code/qwen-code-core';
import { EXTENSIONS_CONFIG_FILENAME, Storage } from '@qwen-code/qwen-code-core';
import type { ICommandLoader } from './types.js';
import type {
CommandContext,
SlashCommand,
SlashCommandActionReturn,
} from '../ui/commands/types.js';
import { CommandKind } from '../ui/commands/types.js';
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
import type {
IPromptProcessor,
PromptPipelineContent,
} from './prompt-processors/types.js';
import {
SHORTHAND_ARGS_PLACEHOLDER,
SHELL_INJECTION_TRIGGER,
AT_FILE_INJECTION_TRIGGER,
} from './prompt-processors/types.js';
parseMarkdownCommand,
MarkdownCommandDefSchema,
} from './markdown-command-parser.js';
import {
ConfirmationRequiredError,
ShellProcessor,
} from './prompt-processors/shellProcessor.js';
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
createSlashCommandFromDefinition,
type CommandDefinition,
} from './command-factory.js';
import type { SlashCommand } from '../ui/commands/types.js';
interface CommandDirectory {
path: string;
@@ -96,7 +85,12 @@ export class FileCommandLoader implements ICommandLoader {
const commandDirs = this.getCommandDirectories();
for (const dirInfo of commandDirs) {
try {
const files = await glob('**/*.toml', {
// Scan both .toml and .md files
const tomlFiles = await glob('**/*.toml', {
...globOptions,
cwd: dirInfo.path,
});
const mdFiles = await glob('**/*.md', {
...globOptions,
cwd: dirInfo.path,
});
@@ -105,18 +99,28 @@ export class FileCommandLoader implements ICommandLoader {
return [];
}
const commandPromises = files.map((file) =>
this.parseAndAdaptFile(
// Process TOML files
const tomlCommandPromises = tomlFiles.map((file) =>
this.parseAndAdaptTomlFile(
path.join(dirInfo.path, file),
dirInfo.path,
dirInfo.extensionName,
),
);
const commands = (await Promise.all(commandPromises)).filter(
(cmd): cmd is SlashCommand => cmd !== null,
// Process Markdown files
const mdCommandPromises = mdFiles.map((file) =>
this.parseAndAdaptMarkdownFile(
path.join(dirInfo.path, file),
dirInfo.path,
dirInfo.extensionName,
),
);
const commands = (
await Promise.all([...tomlCommandPromises, ...mdCommandPromises])
).filter((cmd): cmd is SlashCommand => cmd !== null);
// Add all commands without deduplication
allCommands.push(...commands);
} catch (error) {
@@ -159,17 +163,73 @@ export class FileCommandLoader implements ICommandLoader {
.filter((ext) => ext.isActive)
.sort((a, b) => a.name.localeCompare(b.name)); // Sort alphabetically for deterministic loading
const extensionCommandDirs = activeExtensions.map((ext) => ({
path: path.join(ext.path, 'commands'),
extensionName: ext.name,
}));
// Collect command directories from each extension
for (const ext of activeExtensions) {
// Get commands paths from extension config
const commandsPaths = this.getExtensionCommandsPaths(ext);
dirs.push(...extensionCommandDirs);
for (const cmdPath of commandsPaths) {
dirs.push({
path: cmdPath,
extensionName: ext.name,
});
}
}
}
return dirs;
}
/**
* Get commands paths from an extension.
* Returns paths from config.commands if specified, otherwise defaults to 'commands' directory.
*/
private getExtensionCommandsPaths(ext: {
path: string;
name: string;
}): string[] {
// Try to get extension config
try {
const configPath = path.join(ext.path, EXTENSIONS_CONFIG_FILENAME);
if (fsSync.existsSync(configPath)) {
const configContent = fsSync.readFileSync(configPath, 'utf-8');
const config = JSON.parse(configContent);
if (config.commands) {
const commandsArray = Array.isArray(config.commands)
? config.commands
: [config.commands];
return commandsArray
.map((cmdPath: string) =>
path.isAbsolute(cmdPath) ? cmdPath : path.join(ext.path, cmdPath),
)
.filter((cmdPath: string) => {
try {
return fsSync.existsSync(cmdPath);
} catch {
return false;
}
});
}
}
} catch (error) {
console.warn(`Failed to read extension config for ${ext.name}:`, error);
}
// Default fallback: use 'commands' directory
const defaultPath = path.join(ext.path, 'commands');
try {
if (fsSync.existsSync(defaultPath)) {
return [defaultPath];
}
} catch {
// Ignore
}
return [];
}
/**
* Parses a single .toml file and transforms it into a SlashCommand object.
* @param filePath The absolute path to the .toml file.
@@ -177,7 +237,7 @@ export class FileCommandLoader implements ICommandLoader {
* @param extensionName Optional extension name to prefix commands with.
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
*/
private async parseAndAdaptFile(
private async parseAndAdaptTomlFile(
filePath: string,
baseDir: string,
extensionName?: string,
@@ -216,104 +276,79 @@ export class FileCommandLoader implements ICommandLoader {
const validDef = validationResult.data;
const relativePathWithExt = path.relative(baseDir, filePath);
const relativePath = relativePathWithExt.substring(
0,
relativePathWithExt.length - 5, // length of '.toml'
);
const baseCommandName = relativePath
.split(path.sep)
// Sanitize each path segment to prevent ambiguity. Since ':' is our
// namespace separator, we replace any literal colons in filenames
// with underscores to avoid naming conflicts.
.map((segment) => segment.replaceAll(':', '_'))
.join(':');
// Add extension name tag for extension commands
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
let description = validDef.description || defaultDescription;
if (extensionName) {
description = `[${extensionName}] ${description}`;
}
const processors: IPromptProcessor[] = [];
const usesArgs = validDef.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
const usesShellInjection = validDef.prompt.includes(
SHELL_INJECTION_TRIGGER,
);
const usesAtFileInjection = validDef.prompt.includes(
AT_FILE_INJECTION_TRIGGER,
);
// 1. @-File Injection (Security First).
// This runs first to ensure we're not executing shell commands that
// could dynamically generate malicious @-paths.
if (usesAtFileInjection) {
processors.push(new AtFileProcessor(baseCommandName));
}
// 2. Argument and Shell Injection.
// This runs after file content has been safely injected.
if (usesShellInjection || usesArgs) {
processors.push(new ShellProcessor(baseCommandName));
}
// 3. Default Argument Handling.
// Appends the raw invocation if no explicit {{args}} are used.
if (!usesArgs) {
processors.push(new DefaultArgumentProcessor());
}
return {
name: baseCommandName,
description,
kind: CommandKind.FILE,
// Use factory to create command
return createSlashCommandFromDefinition(
filePath,
baseDir,
validDef,
extensionName,
action: async (
context: CommandContext,
_args: string,
): Promise<SlashCommandActionReturn> => {
if (!context.invocation) {
console.error(
`[FileCommandLoader] Critical error: Command '${baseCommandName}' was executed without invocation context.`,
);
return {
type: 'submit_prompt',
content: [{ text: validDef.prompt }], // Fallback to unprocessed prompt
};
}
'.toml',
);
}
try {
let processedContent: PromptPipelineContent = [
{ text: validDef.prompt },
];
for (const processor of processors) {
processedContent = await processor.process(
processedContent,
context,
);
}
/**
* Parses a single .md file and transforms it into a SlashCommand object.
* @param filePath The absolute path to the .md file.
* @param baseDir The root command directory for name calculation.
* @param extensionName Optional extension name to prefix commands with.
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
*/
private async parseAndAdaptMarkdownFile(
filePath: string,
baseDir: string,
extensionName?: string,
): Promise<SlashCommand | null> {
let fileContent: string;
try {
fileContent = await fs.readFile(filePath, 'utf-8');
} catch (error: unknown) {
console.error(
`[FileCommandLoader] Failed to read file ${filePath}:`,
error instanceof Error ? error.message : String(error),
);
return null;
}
return {
type: 'submit_prompt',
content: processedContent,
};
} catch (e) {
// Check if it's our specific error type
if (e instanceof ConfirmationRequiredError) {
// Halt and request confirmation from the UI layer.
return {
type: 'confirm_shell_commands',
commandsToConfirm: e.commandsToConfirm,
originalInvocation: {
raw: context.invocation.raw,
},
};
}
// Re-throw other errors to be handled by the global error handler.
throw e;
}
},
let parsed: ReturnType<typeof parseMarkdownCommand>;
try {
parsed = parseMarkdownCommand(fileContent);
} catch (error: unknown) {
console.error(
`[FileCommandLoader] Failed to parse Markdown file ${filePath}:`,
error instanceof Error ? error.message : String(error),
);
return null;
}
const validationResult = MarkdownCommandDefSchema.safeParse(parsed);
if (!validationResult.success) {
console.error(
`[FileCommandLoader] Skipping invalid command file: ${filePath}. Validation errors:`,
validationResult.error.flatten(),
);
return null;
}
const validDef = validationResult.data;
// Convert to CommandDefinition format
const definition: CommandDefinition = {
prompt: validDef.prompt,
description:
validDef.frontmatter?.description &&
typeof validDef.frontmatter.description === 'string'
? validDef.frontmatter.description
: undefined,
};
// Use factory to create command
return createSlashCommandFromDefinition(
filePath,
baseDir,
definition,
extensionName,
'.md',
);
}
}

View File

@@ -0,0 +1,159 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* This file contains helper functions for FileCommandLoader to create SlashCommand
* objects from parsed command definitions (TOML or Markdown).
*/
import path from 'node:path';
import type {
CommandContext,
SlashCommand,
SlashCommandActionReturn,
} from '../ui/commands/types.js';
import { CommandKind } from '../ui/commands/types.js';
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
import type {
IPromptProcessor,
PromptPipelineContent,
} from './prompt-processors/types.js';
import {
SHORTHAND_ARGS_PLACEHOLDER,
SHELL_INJECTION_TRIGGER,
AT_FILE_INJECTION_TRIGGER,
} from './prompt-processors/types.js';
import {
ConfirmationRequiredError,
ShellProcessor,
} from './prompt-processors/shellProcessor.js';
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
export interface CommandDefinition {
prompt: string;
description?: string;
}
/**
* Creates a SlashCommand from a parsed command definition.
* This function is used by both TOML and Markdown command loaders.
*
* @param filePath The absolute path to the command file
* @param baseDir The root command directory for name calculation
* @param definition The parsed command definition (prompt and optional description)
* @param extensionName Optional extension name to prefix commands with
* @param fileExtension The file extension (e.g., '.toml' or '.md')
* @returns A SlashCommand object
*/
export function createSlashCommandFromDefinition(
filePath: string,
baseDir: string,
definition: CommandDefinition,
extensionName: string | undefined,
fileExtension: string,
): SlashCommand {
const relativePathWithExt = path.relative(baseDir, filePath);
const relativePath = relativePathWithExt.substring(
0,
relativePathWithExt.length - fileExtension.length,
);
const baseCommandName = relativePath
.split(path.sep)
// Sanitize each path segment to prevent ambiguity. Since ':' is our
// namespace separator, we replace any literal colons in filenames
// with underscores to avoid naming conflicts.
.map((segment) => segment.replaceAll(':', '_'))
.join(':');
// Prefix command name with extension name if provided
const commandName = extensionName
? `${extensionName}:${baseCommandName}`
: baseCommandName;
// Add extension name tag for extension commands
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
let description = definition.description || defaultDescription;
if (extensionName) {
description = `[${extensionName}] ${description}`;
}
const processors: IPromptProcessor[] = [];
const usesArgs = definition.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
const usesShellInjection = definition.prompt.includes(
SHELL_INJECTION_TRIGGER,
);
const usesAtFileInjection = definition.prompt.includes(
AT_FILE_INJECTION_TRIGGER,
);
// 1. @-File Injection (Security First).
// This runs first to ensure we're not executing shell commands that
// could dynamically generate malicious @-paths.
if (usesAtFileInjection) {
processors.push(new AtFileProcessor(baseCommandName));
}
// 2. Argument and Shell Injection.
// This runs after file content has been safely injected.
if (usesShellInjection || usesArgs) {
processors.push(new ShellProcessor(baseCommandName));
}
// 3. Default Argument Handling.
// Appends the raw invocation if no explicit {{args}} are used.
if (!usesArgs) {
processors.push(new DefaultArgumentProcessor());
}
return {
name: commandName,
description,
kind: CommandKind.FILE,
extensionName,
action: async (
context: CommandContext,
_args: string,
): Promise<SlashCommandActionReturn> => {
if (!context.invocation) {
console.error(
`[FileCommandLoader] Critical error: Command '${commandName}' was executed without invocation context.`,
);
return {
type: 'submit_prompt',
content: [{ text: definition.prompt }], // Fallback to unprocessed prompt
};
}
try {
let processedContent: PromptPipelineContent = [
{ text: definition.prompt },
];
for (const processor of processors) {
processedContent = await processor.process(processedContent, context);
}
return {
type: 'submit_prompt',
content: processedContent,
};
} catch (e) {
// Check if it's our specific error type
if (e instanceof ConfirmationRequiredError) {
// Halt and request confirmation from the UI layer.
return {
type: 'confirm_shell_commands',
commandsToConfirm: e.commandsToConfirm,
originalInvocation: {
raw: context.invocation.raw,
},
};
}
// Re-throw other errors to be handled by the global error handler.
throw e;
}
},
};
}

View File

@@ -0,0 +1,253 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { promises as fs } from 'node:fs';
import path from 'node:path';
import os from 'node:os';
import {
detectTomlCommands,
migrateTomlCommands,
generateMigrationPrompt,
} from './command-migration-tool.js';
describe('command-migration-tool', () => {
let tempDir: string;
beforeEach(async () => {
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-migration-test-'));
});
afterEach(async () => {
await fs.rm(tempDir, { recursive: true, force: true });
});
describe('detectTomlCommands', () => {
it('should detect TOML files in directory', async () => {
// Create some TOML files
await fs.writeFile(
path.join(tempDir, 'cmd1.toml'),
'prompt = "test"',
'utf-8',
);
await fs.writeFile(
path.join(tempDir, 'cmd2.toml'),
'prompt = "test"',
'utf-8',
);
const tomlFiles = await detectTomlCommands(tempDir);
expect(tomlFiles).toHaveLength(2);
expect(tomlFiles).toContain('cmd1.toml');
expect(tomlFiles).toContain('cmd2.toml');
});
it('should detect TOML files in subdirectories', async () => {
const subdir = path.join(tempDir, 'subdir');
await fs.mkdir(subdir);
await fs.writeFile(
path.join(subdir, 'nested.toml'),
'prompt = "test"',
'utf-8',
);
const tomlFiles = await detectTomlCommands(tempDir);
expect(tomlFiles).toContain('subdir/nested.toml');
});
it('should return empty array for non-existent directory', async () => {
const nonExistent = path.join(tempDir, 'does-not-exist');
const tomlFiles = await detectTomlCommands(nonExistent);
expect(tomlFiles).toEqual([]);
});
it('should not detect non-TOML files', async () => {
await fs.writeFile(path.join(tempDir, 'file.txt'), 'text', 'utf-8');
await fs.writeFile(path.join(tempDir, 'file.md'), 'markdown', 'utf-8');
const tomlFiles = await detectTomlCommands(tempDir);
expect(tomlFiles).toHaveLength(0);
});
});
describe('migrateTomlCommands', () => {
it('should migrate TOML file to Markdown', async () => {
const tomlContent = `prompt = "Test prompt"
description = "Test description"`;
await fs.writeFile(path.join(tempDir, 'test.toml'), tomlContent, 'utf-8');
const result = await migrateTomlCommands({
commandDir: tempDir,
createBackup: true,
deleteOriginal: false,
});
expect(result.success).toBe(true);
expect(result.convertedFiles).toContain('test.toml');
expect(result.failedFiles).toHaveLength(0);
// Check Markdown file was created
const mdPath = path.join(tempDir, 'test.md');
const mdContent = await fs.readFile(mdPath, 'utf-8');
expect(mdContent).toContain('description: Test description');
expect(mdContent).toContain('Test prompt');
// Check backup was created (original renamed to .toml.backup)
const backupPath = path.join(tempDir, 'test.toml.backup');
const backupExists = await fs
.access(backupPath)
.then(() => true)
.catch(() => false);
expect(backupExists).toBe(true);
// Original .toml file should not exist (renamed to .backup)
const tomlExists = await fs
.access(path.join(tempDir, 'test.toml'))
.then(() => true)
.catch(() => false);
expect(tomlExists).toBe(false);
});
it('should delete original TOML when deleteOriginal is true', async () => {
await fs.writeFile(
path.join(tempDir, 'delete-me.toml'),
'prompt = "Test"',
'utf-8',
);
await migrateTomlCommands({
commandDir: tempDir,
createBackup: false,
deleteOriginal: true,
});
// Original should be deleted
const tomlExists = await fs
.access(path.join(tempDir, 'delete-me.toml'))
.then(() => true)
.catch(() => false);
expect(tomlExists).toBe(false);
// Markdown should exist
const mdExists = await fs
.access(path.join(tempDir, 'delete-me.md'))
.then(() => true)
.catch(() => false);
expect(mdExists).toBe(true);
// Backup should not exist (createBackup was false)
const backupExists = await fs
.access(path.join(tempDir, 'delete-me.toml.backup'))
.then(() => true)
.catch(() => false);
expect(backupExists).toBe(false);
});
it('should fail if Markdown file already exists', async () => {
await fs.writeFile(
path.join(tempDir, 'existing.toml'),
'prompt = "Test"',
'utf-8',
);
await fs.writeFile(
path.join(tempDir, 'existing.md'),
'Already exists',
'utf-8',
);
const result = await migrateTomlCommands({
commandDir: tempDir,
createBackup: false,
});
expect(result.success).toBe(false);
expect(result.failedFiles).toHaveLength(1);
expect(result.failedFiles[0].file).toBe('existing.toml');
expect(result.failedFiles[0].error).toContain('already exists');
});
it('should handle migration without backup', async () => {
await fs.writeFile(
path.join(tempDir, 'no-backup.toml'),
'prompt = "Test"',
'utf-8',
);
const result = await migrateTomlCommands({
commandDir: tempDir,
createBackup: false,
deleteOriginal: false,
});
expect(result.success).toBe(true);
// Original TOML file should still exist (no backup, no delete)
const tomlExists = await fs
.access(path.join(tempDir, 'no-backup.toml'))
.then(() => true)
.catch(() => false);
expect(tomlExists).toBe(true);
// Backup should not exist
const backupExists = await fs
.access(path.join(tempDir, 'no-backup.toml.backup'))
.then(() => true)
.catch(() => false);
expect(backupExists).toBe(false);
});
it('should return success with empty results for no TOML files', async () => {
const result = await migrateTomlCommands({
commandDir: tempDir,
});
expect(result.success).toBe(true);
expect(result.convertedFiles).toHaveLength(0);
expect(result.failedFiles).toHaveLength(0);
});
});
describe('generateMigrationPrompt', () => {
it('should generate prompt for few files', () => {
const files = ['cmd1.toml', 'cmd2.toml'];
const prompt = generateMigrationPrompt(files);
expect(prompt).toContain('Found 2 command files');
expect(prompt).toContain('cmd1.toml');
expect(prompt).toContain('cmd2.toml');
expect(prompt).toContain('qwen-code migrate-commands');
});
it('should truncate file list for many files', () => {
const files = Array.from({ length: 10 }, (_, i) => `cmd${i}.toml`);
const prompt = generateMigrationPrompt(files);
expect(prompt).toContain('Found 10 command files');
expect(prompt).toContain('... and 7 more');
});
it('should return empty string for no files', () => {
const prompt = generateMigrationPrompt([]);
expect(prompt).toBe('');
});
it('should use singular form for single file', () => {
const prompt = generateMigrationPrompt(['single.toml']);
expect(prompt).toContain('Found 1 command file');
// Don't check for plural since "files" appears in other parts of the message
});
});
});

View File

@@ -0,0 +1,169 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Tool for migrating TOML commands to Markdown format.
*/
import { promises as fs } from 'node:fs';
import path from 'node:path';
import { glob } from 'glob';
import { convertTomlToMarkdown } from '@qwen-code/qwen-code-core';
export interface MigrationResult {
success: boolean;
convertedFiles: string[];
failedFiles: Array<{ file: string; error: string }>;
}
export interface MigrationOptions {
/** Directory containing command files */
commandDir: string;
/** Whether to create backups (default: true) */
createBackup?: boolean;
/** Whether to delete original TOML files after migration (default: false) */
deleteOriginal?: boolean;
}
/**
* Scans a directory for TOML command files.
* @param commandDir Directory to scan
* @returns Array of TOML file paths (relative to commandDir)
*/
export async function detectTomlCommands(
commandDir: string,
): Promise<string[]> {
try {
await fs.access(commandDir);
} catch {
// Directory doesn't exist
return [];
}
const tomlFiles = await glob('**/*.toml', {
cwd: commandDir,
nodir: true,
dot: false,
});
return tomlFiles;
}
/**
* Migrates TOML command files to Markdown format.
* @param options Migration options
* @returns Migration result with details
*/
export async function migrateTomlCommands(
options: MigrationOptions,
): Promise<MigrationResult> {
const { commandDir, createBackup = true, deleteOriginal = false } = options;
const result: MigrationResult = {
success: true,
convertedFiles: [],
failedFiles: [],
};
// Detect TOML files
const tomlFiles = await detectTomlCommands(commandDir);
if (tomlFiles.length === 0) {
return result;
}
// Process each TOML file
for (const relativeFile of tomlFiles) {
const tomlPath = path.join(commandDir, relativeFile);
try {
// Read TOML file
const tomlContent = await fs.readFile(tomlPath, 'utf-8');
// Convert to Markdown
const markdownContent = convertTomlToMarkdown(tomlContent);
// Generate Markdown file path (same location, .md extension)
const markdownPath = tomlPath.replace(/\.toml$/, '.md');
// Check if Markdown file already exists
try {
await fs.access(markdownPath);
throw new Error(
`Markdown file already exists: ${path.basename(markdownPath)}`,
);
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
throw error;
}
// File doesn't exist, continue
}
// Write Markdown file
await fs.writeFile(markdownPath, markdownContent, 'utf-8');
// Backup original if requested (rename to .toml.backup)
if (createBackup) {
const backupPath = `${tomlPath}.backup`;
await fs.rename(tomlPath, backupPath);
} else if (deleteOriginal) {
// Delete original if requested and no backup
await fs.unlink(tomlPath);
}
result.convertedFiles.push(relativeFile);
} catch (error) {
result.success = false;
result.failedFiles.push({
file: relativeFile,
error: error instanceof Error ? error.message : String(error),
});
}
}
return result;
}
/**
* Generates a migration report message.
* @param tomlFiles List of TOML files found
* @returns Human-readable migration prompt message
*/
export function generateMigrationPrompt(tomlFiles: string[]): string {
if (tomlFiles.length === 0) {
return '';
}
const count = tomlFiles.length;
const fileList =
tomlFiles.length <= 5
? tomlFiles.map((f) => ` - ${f}`).join('\n')
: ` - ${tomlFiles.slice(0, 3).join('\n - ')}\n - ... and ${tomlFiles.length - 3} more`;
return `
⚠️ TOML Command Format Deprecation Notice
Found ${count} command file${count > 1 ? 's' : ''} in TOML format:
${fileList}
The TOML format for commands is being deprecated in favor of Markdown format.
Markdown format is more readable and easier to edit.
You can migrate these files automatically using:
qwen-code migrate-commands
Or manually convert each file:
- TOML: prompt = "..." / description = "..."
- Markdown: YAML frontmatter + content
The migration tool will:
✓ Convert TOML files to Markdown
✓ Create backups of original files
✓ Preserve all command functionality
TOML format will continue to work for now, but migration is recommended.
`.trim();
}

View File

@@ -0,0 +1,144 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import {
parseMarkdownCommand,
MarkdownCommandDefSchema,
} from './markdown-command-parser.js';
describe('parseMarkdownCommand', () => {
it('should parse markdown with YAML frontmatter', () => {
const content = `---
description: Test command
---
This is the prompt content.`;
const result = parseMarkdownCommand(content);
expect(result).toEqual({
frontmatter: {
description: 'Test command',
},
prompt: 'This is the prompt content.',
});
});
it('should parse markdown without frontmatter', () => {
const content = 'This is just a prompt without frontmatter.';
const result = parseMarkdownCommand(content);
expect(result).toEqual({
prompt: 'This is just a prompt without frontmatter.',
});
});
it('should handle multi-line prompts', () => {
const content = `---
description: Multi-line test
---
First line of prompt.
Second line of prompt.
Third line of prompt.`;
const result = parseMarkdownCommand(content);
expect(result.prompt).toBe(
'First line of prompt.\nSecond line of prompt.\nThird line of prompt.',
);
});
it('should trim whitespace from prompt', () => {
const content = `---
description: Whitespace test
---
Prompt with leading and trailing spaces
`;
const result = parseMarkdownCommand(content);
expect(result.prompt).toBe('Prompt with leading and trailing spaces');
});
it('should handle empty frontmatter', () => {
const content = `---
---
Prompt content after empty frontmatter.`;
const result = parseMarkdownCommand(content);
// Empty YAML frontmatter returns undefined, not {}
expect(result.frontmatter).toBeUndefined();
expect(result.prompt).toBe('Prompt content after empty frontmatter.');
});
it('should handle invalid YAML frontmatter gracefully', () => {
// The YAML parser we use is quite tolerant, so most "invalid" YAML
// actually parses successfully. This test verifies that behavior.
const content = `---
description: test
---
Prompt content.`;
const result = parseMarkdownCommand(content);
expect(result.frontmatter).toBeDefined();
expect(result.prompt).toBe('Prompt content.');
});
});
describe('MarkdownCommandDefSchema', () => {
it('should validate valid markdown command def', () => {
const validDef = {
frontmatter: {
description: 'Test description',
},
prompt: 'Test prompt',
};
const result = MarkdownCommandDefSchema.safeParse(validDef);
expect(result.success).toBe(true);
});
it('should validate markdown command def without frontmatter', () => {
const validDef = {
prompt: 'Test prompt',
};
const result = MarkdownCommandDefSchema.safeParse(validDef);
expect(result.success).toBe(true);
});
it('should reject command def without prompt', () => {
const invalidDef = {
frontmatter: {
description: 'Test description',
},
};
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
expect(result.success).toBe(false);
});
it('should reject command def with non-string prompt', () => {
const invalidDef = {
prompt: 123,
};
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
expect(result.success).toBe(false);
});
});

View File

@@ -0,0 +1,64 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { z } from 'zod';
import { parse as parseYaml } from '@qwen-code/qwen-code-core';
/**
* Defines the Zod schema for a Markdown command definition file.
* The frontmatter contains optional metadata, and the body is the prompt.
*/
export const MarkdownCommandDefSchema = z.object({
frontmatter: z
.object({
description: z.string().optional(),
})
.optional(),
prompt: z.string({
required_error: 'The prompt content is required.',
invalid_type_error: 'The prompt content must be a string.',
}),
});
export type MarkdownCommandDef = z.infer<typeof MarkdownCommandDefSchema>;
/**
* Parses a Markdown command file with optional YAML frontmatter.
* @param content The file content
* @returns Parsed command definition with frontmatter and prompt
*/
export function parseMarkdownCommand(content: string): MarkdownCommandDef {
// Match YAML frontmatter pattern: ---\n...\n---\n
// Allow empty frontmatter: ---\n---\n // Use (?:[\s\S]*?) to make the frontmatter content optional
const frontmatterRegex = /^---\n([\s\S]*?)---\n([\s\S]*)$/;
const match = content.match(frontmatterRegex);
if (!match) {
// No frontmatter, entire content is the prompt
return {
prompt: content.trim(),
};
}
const [, frontmatterYaml, body] = match;
// Parse YAML frontmatter if not empty
let frontmatter: Record<string, unknown> | undefined;
if (frontmatterYaml.trim()) {
try {
frontmatter = parseYaml(frontmatterYaml) as Record<string, unknown>;
} catch (error) {
throw new Error(
`Failed to parse YAML frontmatter: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
return {
frontmatter,
prompt: body.trim(),
};
}

View File

@@ -0,0 +1,5 @@
---
description: Example markdown command
---
This is an example prompt from a markdown file.

View File

@@ -1,49 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import {
EXTENSIONS_CONFIG_FILENAME,
INSTALL_METADATA_FILENAME,
} from '../config/extension.js';
import {
type MCPServerConfig,
type ExtensionInstallMetadata,
} from '@qwen-code/qwen-code-core';
export function createExtension({
extensionsDir = 'extensions-dir',
name = 'my-extension',
version = '1.0.0',
addContextFile = false,
contextFileName = undefined as string | undefined,
mcpServers = {} as Record<string, MCPServerConfig>,
installMetadata = undefined as ExtensionInstallMetadata | undefined,
} = {}): string {
const extDir = path.join(extensionsDir, name);
fs.mkdirSync(extDir, { recursive: true });
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name, version, contextFileName, mcpServers }),
);
if (addContextFile) {
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
}
if (contextFileName) {
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
}
if (installMetadata) {
fs.writeFileSync(
path.join(extDir, INSTALL_METADATA_FILENAME),
JSON.stringify(installMetadata),
);
}
return extDir;
}

View File

@@ -76,7 +76,6 @@ vi.mock('./hooks/useFolderTrust.js');
vi.mock('./hooks/useIdeTrustListener.js');
vi.mock('./hooks/useMessageQueue.js');
vi.mock('./hooks/useAutoAcceptIndicator.js');
vi.mock('./hooks/useWorkspaceMigration.js');
vi.mock('./hooks/useGitBranchName.js');
vi.mock('./contexts/VimModeContext.js');
vi.mock('./contexts/SessionContext.js');
@@ -103,7 +102,6 @@ import { useFolderTrust } from './hooks/useFolderTrust.js';
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
import { useMessageQueue } from './hooks/useMessageQueue.js';
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
import { useGitBranchName } from './hooks/useGitBranchName.js';
import { useVimMode } from './contexts/VimModeContext.js';
import { useSessionStats } from './contexts/SessionContext.js';
@@ -134,7 +132,6 @@ describe('AppContainer State Management', () => {
const mockedUseIdeTrustListener = useIdeTrustListener as Mock;
const mockedUseMessageQueue = useMessageQueue as Mock;
const mockedUseAutoAcceptIndicator = useAutoAcceptIndicator as Mock;
const mockedUseWorkspaceMigration = useWorkspaceMigration as Mock;
const mockedUseGitBranchName = useGitBranchName as Mock;
const mockedUseVimMode = useVimMode as Mock;
const mockedUseSessionStats = useSessionStats as Mock;
@@ -239,12 +236,6 @@ describe('AppContainer State Management', () => {
getQueuedMessagesText: vi.fn().mockReturnValue(''),
});
mockedUseAutoAcceptIndicator.mockReturnValue(false);
mockedUseWorkspaceMigration.mockReturnValue({
showWorkspaceMigrationDialog: false,
workspaceExtensions: [],
onWorkspaceMigrationDialogOpen: vi.fn(),
onWorkspaceMigrationDialogClose: vi.fn(),
});
mockedUseGitBranchName.mockReturnValue('main');
mockedUseVimMode.mockReturnValue({
isVimEnabled: false,

View File

@@ -38,6 +38,7 @@ import {
getErrorMessage,
getAllGeminiMdFilenames,
ShellExecutionService,
Storage,
} from '@qwen-code/qwen-code-core';
import { buildResumedHistoryItems } from './utils/resumeHistoryUtils.js';
import { validateAuthMethod } from '../config/auth.js';
@@ -76,6 +77,9 @@ import { useLoadingIndicator } from './hooks/useLoadingIndicator.js';
import { useFolderTrust } from './hooks/useFolderTrust.js';
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
import { type IdeIntegrationNudgeResult } from './IdeIntegrationNudge.js';
import { type CommandMigrationNudgeResult } from './CommandFormatMigrationNudge.js';
import { useCommandMigration } from './hooks/useCommandMigration.js';
import { migrateTomlCommands } from '../services/command-migration-tool.js';
import { appEvents, AppEvent } from '../utils/events.js';
import { type UpdateObject } from './utils/updateCheck.js';
import { setUpdateHandler } from '../utils/handleAutoUpdate.js';
@@ -83,10 +87,12 @@ import { ConsolePatcher } from './utils/ConsolePatcher.js';
import { registerCleanup, runExitCleanup } from '../utils/cleanup.js';
import { useMessageQueue } from './hooks/useMessageQueue.js';
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
import { useSessionStats } from './contexts/SessionContext.js';
import { useGitBranchName } from './hooks/useGitBranchName.js';
import { useExtensionUpdates } from './hooks/useExtensionUpdates.js';
import {
useExtensionUpdates,
useConfirmUpdateRequests,
} from './hooks/useExtensionUpdates.js';
import { ShellFocusContext } from './contexts/ShellFocusContext.js';
import { t } from '../i18n/index.js';
import { useWelcomeBack } from './hooks/useWelcomeBack.js';
@@ -97,6 +103,7 @@ import { processVisionSwitchOutcome } from './hooks/useVisionAutoSwitch.js';
import { useSubagentCreateDialog } from './hooks/useSubagentCreateDialog.js';
import { useAgentsManagerDialog } from './hooks/useAgentsManagerDialog.js';
import { useAttentionNotifications } from './hooks/useAttentionNotifications.js';
import { requestConsentInteractive } from '../commands/extensions/consent.js';
const CTRL_EXIT_PROMPT_DURATION_MS = 1000;
@@ -157,15 +164,21 @@ export const AppContainer = (props: AppContainerProps) => {
config.isTrustedFolder(),
);
const extensions = config.getExtensions();
const extensionManager = config.getExtensionManager();
extensionManager.setRequestConsent((description) =>
requestConsentInteractive(description, addConfirmUpdateExtensionRequest),
);
const { addConfirmUpdateExtensionRequest, confirmUpdateExtensionRequests } =
useConfirmUpdateRequests();
const {
extensionsUpdateState,
extensionsUpdateStateInternal,
dispatchExtensionStateUpdate,
confirmUpdateExtensionRequests,
addConfirmUpdateExtensionRequest,
} = useExtensionUpdates(
extensions,
extensionManager,
historyManager.addItem,
config.getWorkingDir(),
);
@@ -436,13 +449,6 @@ export const AppContainer = (props: AppContainerProps) => {
remount: refreshStatic,
});
const {
showWorkspaceMigrationDialog,
workspaceExtensions,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
} = useWorkspaceMigration(settings);
const { toggleVimEnabled } = useVimMode();
const {
@@ -578,11 +584,11 @@ export const AppContainer = (props: AppContainerProps) => {
: [],
config.getDebugMode(),
config.getFileService(),
settings.merged,
config.getExtensionContextFilePaths(),
config.isTrustedFolder(),
settings.merged.context?.importFormat || 'tree', // Use setting or default to 'tree'
config.getFileFilteringOptions(),
config.getDiscoveryMaxDirs(),
);
config.setUserMemory(memoryContent);
@@ -845,6 +851,13 @@ export const AppContainer = (props: AppContainerProps) => {
!idePromptAnswered,
);
// Command migration nudge
const {
showMigrationNudge: shouldShowCommandMigrationNudge,
tomlFiles: commandMigrationTomlFiles,
setShowMigrationNudge: setShowCommandMigrationNudge,
} = useCommandMigration(settings, config.storage);
const [showErrorDetails, setShowErrorDetails] = useState<boolean>(false);
const [showToolDescriptions, setShowToolDescriptions] =
useState<boolean>(false);
@@ -935,6 +948,92 @@ export const AppContainer = (props: AppContainerProps) => {
[handleSlashCommand, settings],
);
const handleCommandMigrationComplete = useCallback(
async (result: CommandMigrationNudgeResult) => {
setShowCommandMigrationNudge(false);
if (result.userSelection === 'yes') {
// Perform migration for both workspace and user levels
try {
const results = [];
// Migrate workspace commands
const workspaceCommandsDir = config.storage.getProjectCommandsDir();
const workspaceResult = await migrateTomlCommands({
commandDir: workspaceCommandsDir,
createBackup: true,
deleteOriginal: false,
});
if (
workspaceResult.convertedFiles.length > 0 ||
workspaceResult.failedFiles.length > 0
) {
results.push({ level: 'workspace', result: workspaceResult });
}
// Migrate user commands
const userCommandsDir = Storage.getUserCommandsDir();
const userResult = await migrateTomlCommands({
commandDir: userCommandsDir,
createBackup: true,
deleteOriginal: false,
});
if (
userResult.convertedFiles.length > 0 ||
userResult.failedFiles.length > 0
) {
results.push({ level: 'user', result: userResult });
}
// Report results
for (const { level, result: migrationResult } of results) {
if (
migrationResult.success &&
migrationResult.convertedFiles.length > 0
) {
historyManager.addItem(
{
type: MessageType.INFO,
text: `[${level}] Successfully migrated ${migrationResult.convertedFiles.length} command file${migrationResult.convertedFiles.length > 1 ? 's' : ''} to Markdown format. Original files backed up as .toml.backup`,
},
Date.now(),
);
}
if (migrationResult.failedFiles.length > 0) {
historyManager.addItem(
{
type: MessageType.ERROR,
text: `[${level}] Failed to migrate ${migrationResult.failedFiles.length} file${migrationResult.failedFiles.length > 1 ? 's' : ''}:\n${migrationResult.failedFiles.map((f) => `${f.file}: ${f.error}`).join('\n')}`,
},
Date.now(),
);
}
}
if (results.length === 0) {
historyManager.addItem(
{
type: MessageType.INFO,
text: 'No TOML files found to migrate.',
},
Date.now(),
);
}
} catch (error) {
historyManager.addItem(
{
type: MessageType.ERROR,
text: `❌ Migration failed: ${getErrorMessage(error)}`,
},
Date.now(),
);
}
}
},
[historyManager, setShowCommandMigrationNudge, config.storage],
);
const { elapsedTime, currentLoadingPhrase } = useLoadingIndicator(
streamingState,
settings.merged.ui?.customWittyPhrases,
@@ -1177,8 +1276,8 @@ export const AppContainer = (props: AppContainerProps) => {
const dialogsVisible =
showWelcomeBackDialog ||
showWorkspaceMigrationDialog ||
shouldShowIdePrompt ||
shouldShowCommandMigrationNudge ||
isFolderTrustDialogOpen ||
!!shellConfirmationRequest ||
!!confirmationRequest ||
@@ -1244,6 +1343,8 @@ export const AppContainer = (props: AppContainerProps) => {
suggestionsWidth,
isInputActive,
shouldShowIdePrompt,
shouldShowCommandMigrationNudge,
commandMigrationTomlFiles,
isFolderTrustDialogOpen: isFolderTrustDialogOpen ?? false,
isTrustedFolder,
constrainHeight,
@@ -1260,8 +1361,6 @@ export const AppContainer = (props: AppContainerProps) => {
historyRemountKey,
messageQueue,
showAutoAcceptIndicator,
showWorkspaceMigrationDialog,
workspaceExtensions,
currentModel,
contextFileNames,
errorCount,
@@ -1333,6 +1432,8 @@ export const AppContainer = (props: AppContainerProps) => {
suggestionsWidth,
isInputActive,
shouldShowIdePrompt,
shouldShowCommandMigrationNudge,
commandMigrationTomlFiles,
isFolderTrustDialogOpen,
isTrustedFolder,
constrainHeight,
@@ -1349,8 +1450,6 @@ export const AppContainer = (props: AppContainerProps) => {
historyRemountKey,
messageQueue,
showAutoAcceptIndicator,
showWorkspaceMigrationDialog,
workspaceExtensions,
contextFileNames,
errorCount,
availableTerminalHeight,
@@ -1404,14 +1503,13 @@ export const AppContainer = (props: AppContainerProps) => {
setShellModeActive,
vimHandleInput,
handleIdePromptComplete,
handleCommandMigrationComplete,
handleFolderTrustSelect,
setConstrainHeight,
onEscapePromptChange: handleEscapePromptChange,
refreshStatic,
handleFinalSubmit,
handleClearScreen,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
// Vision switch dialog
handleVisionSwitchSelect,
// Welcome back dialog
@@ -1441,14 +1539,13 @@ export const AppContainer = (props: AppContainerProps) => {
setShellModeActive,
vimHandleInput,
handleIdePromptComplete,
handleCommandMigrationComplete,
handleFolderTrustSelect,
setConstrainHeight,
handleEscapePromptChange,
refreshStatic,
handleFinalSubmit,
handleClearScreen,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
handleVisionSwitchSelect,
handleWelcomeBackSelection,
handleWelcomeBackClose,

View File

@@ -0,0 +1,90 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { Box, Text } from 'ink';
import type { RadioSelectItem } from './components/shared/RadioButtonSelect.js';
import { RadioButtonSelect } from './components/shared/RadioButtonSelect.js';
import { useKeypress } from './hooks/useKeypress.js';
import { theme } from './semantic-colors.js';
export type CommandMigrationNudgeResult = {
userSelection: 'yes' | 'no';
};
interface CommandFormatMigrationNudgeProps {
tomlFiles: string[];
onComplete: (result: CommandMigrationNudgeResult) => void;
}
export function CommandFormatMigrationNudge({
tomlFiles,
onComplete,
}: CommandFormatMigrationNudgeProps) {
useKeypress(
(key) => {
if (key.name === 'escape') {
onComplete({
userSelection: 'no',
});
}
},
{ isActive: true },
);
const OPTIONS: Array<RadioSelectItem<CommandMigrationNudgeResult>> = [
{
label: 'Yes',
value: {
userSelection: 'yes',
},
key: 'Yes',
},
{
label: 'No (esc)',
value: {
userSelection: 'no',
},
key: 'No (esc)',
},
];
const count = tomlFiles.length;
const fileList =
count <= 3
? tomlFiles.map((f) => `${f}`).join('\n')
: `${tomlFiles.slice(0, 2).join('\n • ')}\n • ... and ${count - 2} more`;
return (
<Box
flexDirection="column"
borderStyle="round"
borderColor={theme.status.warning}
padding={1}
width="100%"
marginLeft={1}
>
<Box marginBottom={1} flexDirection="column">
<Text>
<Text color={theme.status.warning}>{'⚠️ '}</Text>
<Text bold>Command Format Migration</Text>
</Text>
<Text color={theme.text.secondary}>
{`Found ${count} TOML command file${count > 1 ? 's' : ''}:`}
</Text>
<Text color={theme.text.secondary}>{fileList}</Text>
<Text>{''}</Text>
<Text color={theme.text.secondary}>
The TOML format is deprecated. Would you like to migrate them to
Markdown format?
</Text>
<Text color={theme.text.secondary}>
(Backups will be created and original files will be preserved)
</Text>
</Box>
<RadioButtonSelect items={OPTIONS} onSelect={onComplete} />
</Box>
);
}

View File

@@ -4,13 +4,6 @@
* SPDX-License-Identifier: Apache-2.0
*/
import { requestConsentInteractive } from '../../config/extension.js';
import {
updateAllUpdatableExtensions,
type ExtensionUpdateInfo,
updateExtension,
checkForAllExtensionUpdates,
} from '../../config/extensions/update.js';
import { getErrorMessage } from '../../utils/errors.js';
import { ExtensionUpdateState } from '../state/extensions.js';
import { MessageType } from '../types.js';
@@ -20,8 +13,34 @@ import {
CommandKind,
} from './types.js';
import { t } from '../../i18n/index.js';
import type { ExtensionUpdateInfo } from '@qwen-code/qwen-code-core';
function showMessageIfNoExtensions(
context: CommandContext,
extensions: unknown[],
): boolean {
if (extensions.length === 0) {
context.ui.addItem(
{
type: MessageType.INFO,
text: 'No extensions installed. Run `/extensions explore` to check out the gallery.',
},
Date.now(),
);
return true;
}
return false;
}
async function listAction(context: CommandContext) {
const extensions = context.services.config
? context.services.config.getExtensions()
: [];
if (showMessageIfNoExtensions(context, extensions)) {
return;
}
context.ui.addItem(
{
type: MessageType.EXTENSIONS_LIST,
@@ -34,7 +53,6 @@ async function updateAction(context: CommandContext, args: string) {
const updateArgs = args.split(' ').filter((value) => value.length > 0);
const all = updateArgs.length === 1 && updateArgs[0] === '--all';
const names = all ? undefined : updateArgs;
let updateInfos: ExtensionUpdateInfo[] = [];
if (!all && names?.length === 0) {
context.ui.addItem(
@@ -47,29 +65,40 @@ async function updateAction(context: CommandContext, args: string) {
return;
}
let updateInfos: ExtensionUpdateInfo[] = [];
const extensionManager = context.services.config!.getExtensionManager();
const extensions = context.services.config
? context.services.config.getExtensions()
: [];
if (showMessageIfNoExtensions(context, extensions)) {
return Promise.resolve();
}
try {
await checkForAllExtensionUpdates(
context.services.config!.getExtensions(),
context.ui.dispatchExtensionStateUpdate,
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
await extensionManager.checkForAllExtensionUpdates((extensionName, state) =>
context.ui.dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
}),
);
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
context.ui.setPendingItem({
type: MessageType.EXTENSIONS_LIST,
});
if (all) {
updateInfos = await updateAllUpdatableExtensions(
context.services.config!.getWorkingDir(),
// We don't have the ability to prompt for consent yet in this flow.
(description) =>
requestConsentInteractive(
description,
context.ui.addConfirmUpdateExtensionRequest,
),
context.services.config!.getExtensions(),
updateInfos = await extensionManager.updateAllUpdatableExtensions(
context.ui.extensionsUpdateState,
context.ui.dispatchExtensionStateUpdate,
(extensionName, state) =>
context.ui.dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
}),
);
} else if (names?.length) {
const workingDir = context.services.config!.getWorkingDir();
const extensions = context.services.config!.getExtensions();
for (const name of names) {
const extension = extensions.find(
@@ -85,17 +114,15 @@ async function updateAction(context: CommandContext, args: string) {
);
continue;
}
const updateInfo = await updateExtension(
const updateInfo = await extensionManager.updateExtension(
extension,
workingDir,
(description) =>
requestConsentInteractive(
description,
context.ui.addConfirmUpdateExtensionRequest,
),
context.ui.extensionsUpdateState.get(extension.name)?.status ??
ExtensionUpdateState.UNKNOWN,
context.ui.dispatchExtensionStateUpdate,
(extensionName, state) =>
context.ui.dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
}),
);
if (updateInfo) updateInfos.push(updateInfo);
}

View File

@@ -6,6 +6,7 @@
import { Box, Text } from 'ink';
import { IdeIntegrationNudge } from '../IdeIntegrationNudge.js';
import { CommandFormatMigrationNudge } from '../CommandFormatMigrationNudge.js';
import { LoopDetectionConfirmation } from './LoopDetectionConfirmation.js';
import { FolderTrustDialog } from './FolderTrustDialog.js';
import { ShellConfirmationDialog } from './ShellConfirmationDialog.js';
@@ -16,7 +17,6 @@ import { QwenOAuthProgress } from './QwenOAuthProgress.js';
import { AuthDialog } from '../auth/AuthDialog.js';
import { OpenAIKeyPrompt } from './OpenAIKeyPrompt.js';
import { EditorSettingsDialog } from './EditorSettingsDialog.js';
import { WorkspaceMigrationDialog } from './WorkspaceMigrationDialog.js';
import { PermissionsModifyTrustDialog } from './PermissionsModifyTrustDialog.js';
import { ModelDialog } from './ModelDialog.js';
import { ApprovalModeDialog } from './ApprovalModeDialog.js';
@@ -77,15 +77,6 @@ export const DialogManager = ({
if (uiState.showIdeRestartPrompt) {
return <IdeTrustChangeDialog reason={uiState.ideTrustRestartReason} />;
}
if (uiState.showWorkspaceMigrationDialog) {
return (
<WorkspaceMigrationDialog
workspaceExtensions={uiState.workspaceExtensions}
onOpen={uiActions.onWorkspaceMigrationDialogOpen}
onClose={uiActions.onWorkspaceMigrationDialogClose}
/>
);
}
if (uiState.shouldShowIdePrompt) {
return (
<IdeIntegrationNudge
@@ -94,6 +85,14 @@ export const DialogManager = ({
/>
);
}
if (uiState.shouldShowCommandMigrationNudge) {
return (
<CommandFormatMigrationNudge
tomlFiles={uiState.commandMigrationTomlFiles}
onComplete={uiActions.handleCommandMigrationComplete}
/>
);
}
if (uiState.isFolderTrustDialogOpen) {
return (
<FolderTrustDialog

View File

@@ -1,119 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { Box, Text } from 'ink';
import {
type Extension,
performWorkspaceExtensionMigration,
} from '../../config/extension.js';
import { RadioButtonSelect } from './shared/RadioButtonSelect.js';
import { theme } from '../semantic-colors.js';
import { useState } from 'react';
import { useKeypress } from '../hooks/useKeypress.js';
export function WorkspaceMigrationDialog(props: {
workspaceExtensions: Extension[];
onOpen: () => void;
onClose: () => void;
}) {
const { workspaceExtensions, onOpen, onClose } = props;
const [migrationComplete, setMigrationComplete] = useState(false);
const [failedExtensions, setFailedExtensions] = useState<string[]>([]);
onOpen();
const onMigrate = async () => {
const failed = await performWorkspaceExtensionMigration(
workspaceExtensions,
// We aren't updating extensions, just moving them around, don't need to ask for consent.
async (_) => true,
);
setFailedExtensions(failed);
setMigrationComplete(true);
};
useKeypress(
(key) => {
if (migrationComplete && key.sequence === 'q') {
process.exit(0);
}
},
{ isActive: true },
);
if (migrationComplete) {
return (
<Box
flexDirection="column"
borderStyle="round"
borderColor={theme.border.default}
padding={1}
>
{failedExtensions.length > 0 ? (
<>
<Text color={theme.text.primary}>
The following extensions failed to migrate. Please try installing
them manually. To see other changes, Qwen Code must be restarted.
Press &apos;q&apos; to quit.
</Text>
<Box flexDirection="column" marginTop={1} marginLeft={2}>
{failedExtensions.map((failed) => (
<Text key={failed}>- {failed}</Text>
))}
</Box>
</>
) : (
<Text color={theme.text.primary}>
Migration complete. To see changes, Qwen Code must be restarted.
Press &apos;q&apos; to quit.
</Text>
)}
</Box>
);
}
return (
<Box
flexDirection="column"
borderStyle="round"
borderColor={theme.border.default}
padding={1}
>
<Text bold color={theme.text.primary}>
Workspace-level extensions are deprecated{'\n'}
</Text>
<Text color={theme.text.primary}>
Would you like to install them at the user level?
</Text>
<Text color={theme.text.primary}>
The extension definition will remain in your workspace directory.
</Text>
<Text color={theme.text.primary}>
If you opt to skip, you can install them manually using the extensions
install command.
</Text>
<Box flexDirection="column" marginTop={1} marginLeft={2}>
{workspaceExtensions.map((extension) => (
<Text key={extension.config.name}>- {extension.config.name}</Text>
))}
</Box>
<Box marginTop={1}>
<RadioButtonSelect
items={[
{ label: 'Install all', value: 'migrate', key: 'migrate' },
{ label: 'Skip', value: 'skip', key: 'skip' },
]}
onSelect={(value: string) => {
if (value === 'migrate') {
onMigrate();
} else {
onClose();
}
}}
/>
</Box>
</Box>
);
}

View File

@@ -218,7 +218,7 @@ export const AgentSelectionStep = ({
const renderAgentItem = (
agent: {
name: string;
level: 'project' | 'user' | 'builtin' | 'session';
level: 'project' | 'user' | 'builtin' | 'session' | 'extension';
isBuiltin?: boolean;
},
index: number,

View File

@@ -18,7 +18,6 @@ const mockUseUIState = vi.mocked(useUIState);
const mockExtensions = [
{ name: 'ext-one', version: '1.0.0', isActive: true },
{ name: 'ext-two', version: '2.1.0', isActive: true },
{ name: 'ext-disabled', version: '3.0.0', isActive: false },
];
describe('<ExtensionsList />', () => {
@@ -29,7 +28,6 @@ describe('<ExtensionsList />', () => {
const mockUIState = (
extensions: unknown[],
extensionsUpdateState: Map<string, ExtensionUpdateState>,
disabledExtensions: string[] = [],
) => {
mockUseUIState.mockReturnValue({
commandContext: createMockCommandContext({
@@ -37,13 +35,6 @@ describe('<ExtensionsList />', () => {
config: {
getExtensions: () => extensions,
},
settings: {
merged: {
extensions: {
disabled: disabledExtensions,
},
},
},
},
}),
extensionsUpdateState,
@@ -58,12 +49,11 @@ describe('<ExtensionsList />', () => {
});
it('should render a list of extensions with their version and status', () => {
mockUIState(mockExtensions, new Map(), ['ext-disabled']);
mockUIState(mockExtensions, new Map());
const { lastFrame } = render(<ExtensionsList />);
const output = lastFrame();
expect(output).toContain('ext-one (v1.0.0) - active');
expect(output).toContain('ext-two (v2.1.0) - active');
expect(output).toContain('ext-disabled (v3.0.0) - disabled');
});
it('should display "unknown state" if an extension has no update state', () => {

View File

@@ -9,12 +9,10 @@ import { useUIState } from '../../contexts/UIStateContext.js';
import { ExtensionUpdateState } from '../../state/extensions.js';
export const ExtensionsList = () => {
const { commandContext, extensionsUpdateState } = useUIState();
const allExtensions = commandContext.services.config!.getExtensions();
const settings = commandContext.services.settings;
const disabledExtensions = settings.merged.extensions?.disabled ?? [];
const { extensionsUpdateState, commandContext } = useUIState();
const extensions = commandContext.services.config?.getExtensions() || [];
if (allExtensions.length === 0) {
if (extensions.length === 0) {
return <Text>No extensions installed.</Text>;
}
@@ -22,10 +20,11 @@ export const ExtensionsList = () => {
<Box flexDirection="column" marginTop={1} marginBottom={1}>
<Text>Installed extensions:</Text>
<Box flexDirection="column" paddingLeft={2}>
{allExtensions.map((ext) => {
{extensions.map((ext) => {
const state = extensionsUpdateState.get(ext.name);
const isActive = !disabledExtensions.includes(ext.name);
const isActive = ext.isActive;
const activeString = isActive ? 'active' : 'disabled';
const activeColor = isActive ? 'green' : 'grey';
let stateColor = 'gray';
const stateText = state || 'unknown state';
@@ -44,6 +43,7 @@ export const ExtensionsList = () => {
break;
case ExtensionUpdateState.UP_TO_DATE:
case ExtensionUpdateState.NOT_UPDATABLE:
case ExtensionUpdateState.UPDATED:
stateColor = 'green';
break;
default:
@@ -52,12 +52,22 @@ export const ExtensionsList = () => {
}
return (
<Box key={ext.name}>
<Box key={ext.name} flexDirection="column" marginBottom={1}>
<Text>
<Text color="cyan">{`${ext.name} (v${ext.version})`}</Text>
{` - ${activeString}`}
<Text color={activeColor}>{` - ${activeString}`}</Text>
{<Text color={stateColor}>{` (${stateText})`}</Text>}
</Text>
{ext.resolvedSettings && ext.resolvedSettings.length > 0 && (
<Box flexDirection="column" paddingLeft={2}>
<Text>settings:</Text>
{ext.resolvedSettings.map((setting) => (
<Text key={setting.name}>
- {setting.name}: {setting.value}
</Text>
))}
</Box>
)}
</Box>
);
})}

View File

@@ -7,6 +7,7 @@
import { createContext, useContext } from 'react';
import { type Key } from '../hooks/useKeypress.js';
import { type IdeIntegrationNudgeResult } from '../IdeIntegrationNudge.js';
import { type CommandMigrationNudgeResult } from '../CommandFormatMigrationNudge.js';
import { type FolderTrustChoice } from '../components/FolderTrustDialog.js';
import {
type AuthType,
@@ -47,14 +48,13 @@ export interface UIActions {
setShellModeActive: (value: boolean) => void;
vimHandleInput: (key: Key) => boolean;
handleIdePromptComplete: (result: IdeIntegrationNudgeResult) => void;
handleCommandMigrationComplete: (result: CommandMigrationNudgeResult) => void;
handleFolderTrustSelect: (choice: FolderTrustChoice) => void;
setConstrainHeight: (value: boolean) => void;
onEscapePromptChange: (show: boolean) => void;
refreshStatic: () => void;
handleFinalSubmit: (value: string) => void;
handleClearScreen: () => void;
onWorkspaceMigrationDialogOpen: () => void;
onWorkspaceMigrationDialogClose: () => void;
// Vision switch dialog
handleVisionSwitchSelect: (outcome: VisionSwitchOutcome) => void;
// Welcome back dialog

View File

@@ -72,6 +72,8 @@ export interface UIState {
suggestionsWidth: number;
isInputActive: boolean;
shouldShowIdePrompt: boolean;
shouldShowCommandMigrationNudge: boolean;
commandMigrationTomlFiles: string[];
isFolderTrustDialogOpen: boolean;
isTrustedFolder: boolean | undefined;
constrainHeight: boolean;
@@ -87,9 +89,6 @@ export interface UIState {
historyRemountKey: number;
messageQueue: string[];
showAutoAcceptIndicator: ApprovalMode;
showWorkspaceMigrationDialog: boolean;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
workspaceExtensions: any[]; // Extension[]
// Quota-related state
currentModel: string;
contextFileNames: string[];

View File

@@ -0,0 +1,51 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { useEffect, useState } from 'react';
import { Storage } from '@qwen-code/qwen-code-core';
import { detectTomlCommands } from '../../services/command-migration-tool.js';
import type { LoadedSettings } from '../../config/settings.js';
/**
* Hook to detect TOML command files and manage migration nudge visibility.
* Checks all command directories: workspace, user, and global levels.
*/
export function useCommandMigration(
settings: LoadedSettings,
storage: Storage,
) {
const [showMigrationNudge, setShowMigrationNudge] = useState(false);
const [tomlFiles, setTomlFiles] = useState<string[]>([]);
useEffect(() => {
const checkTomlCommands = async () => {
const allFiles: string[] = [];
// Check workspace commands directory (.qwen/commands)
const workspaceCommandsDir = storage.getProjectCommandsDir();
const workspaceFiles = await detectTomlCommands(workspaceCommandsDir);
allFiles.push(...workspaceFiles.map((f) => `workspace: ${f}`));
// Check user commands directory (~/.qwen/commands)
const userCommandsDir = Storage.getUserCommandsDir();
const userFiles = await detectTomlCommands(userCommandsDir);
allFiles.push(...userFiles.map((f) => `user: ${f}`));
if (allFiles.length > 0) {
setTomlFiles(allFiles);
setShowMigrationNudge(true);
}
};
checkTomlCommands();
}, [storage]);
return {
showMigrationNudge,
tomlFiles,
setShowMigrationNudge,
};
}

View File

@@ -4,26 +4,21 @@
* SPDX-License-Identifier: Apache-2.0
*/
import { vi } from 'vitest';
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import {
ExtensionStorage,
annotateActiveExtensions,
loadExtension,
} from '../../config/extension.js';
import { createExtension } from '../../test-utils/createExtension.js';
import { useExtensionUpdates } from './useExtensionUpdates.js';
import { QWEN_DIR, type GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import {
QWEN_DIR,
type ExtensionManager,
type Extension,
type ExtensionUpdateInfo,
ExtensionUpdateState,
} from '@qwen-code/qwen-code-core';
import { renderHook, waitFor } from '@testing-library/react';
import { MessageType } from '../types.js';
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
import {
checkForAllExtensionUpdates,
updateExtension,
} from '../../config/extensions/update.js';
import { ExtensionUpdateState } from '../state/extensions.js';
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
@@ -33,63 +28,85 @@ vi.mock('os', async (importOriginal) => {
};
});
vi.mock('../../config/extensions/update.js', () => ({
checkForAllExtensionUpdates: vi.fn(),
updateExtension: vi.fn(),
}));
function createMockExtension(overrides: Partial<Extension> = {}): Extension {
return {
id: 'test-extension-id',
name: 'test-extension',
version: '1.0.0',
path: '/some/path',
isActive: true,
config: {
name: 'test-extension',
version: '1.0.0',
},
contextFiles: [],
installMetadata: {
type: 'git',
source: 'https://some/repo',
autoUpdate: false,
},
...overrides,
};
}
function createMockExtensionManager(
extensions: Extension[],
checkCallback?: (
callback: (extensionName: string, state: ExtensionUpdateState) => void,
) => Promise<void>,
updateResult?: ExtensionUpdateInfo | undefined,
): ExtensionManager {
return {
getLoadedExtensions: vi.fn(() => extensions),
checkForAllExtensionUpdates: vi.fn(
async (
callback: (extensionName: string, state: ExtensionUpdateState) => void,
) => {
if (checkCallback) {
await checkCallback(callback);
}
},
),
updateExtension: vi.fn(async () => updateResult),
} as unknown as ExtensionManager;
}
describe('useExtensionUpdates', () => {
let tempHomeDir: string;
let userExtensionsDir: string;
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
);
tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'qwen-cli-test-home-'));
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
fs.mkdirSync(userExtensionsDir, { recursive: true });
vi.mocked(checkForAllExtensionUpdates).mockReset();
vi.mocked(updateExtension).mockReset();
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
vi.clearAllMocks();
});
it('should check for updates and log a message if an update is available', async () => {
const extensions = [
{
name: 'test-extension',
const extension = createMockExtension({
name: 'test-extension',
installMetadata: {
type: 'git',
version: '1.0.0',
path: '/some/path',
isActive: true,
installMetadata: {
type: 'git',
source: 'https://some/repo',
autoUpdate: false,
},
source: 'https://some/repo',
autoUpdate: false,
},
];
});
const addItem = vi.fn();
const cwd = '/test/cwd';
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
const extensionManager = createMockExtensionManager(
[extension],
async (callback) => {
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
},
);
renderHook(() =>
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
);
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
await waitFor(() => {
expect(addItem).toHaveBeenCalledWith(
@@ -103,43 +120,32 @@ describe('useExtensionUpdates', () => {
});
it('should check for updates and automatically update if autoUpdate is true', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
const extension = createMockExtension({
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
source: 'https://some.git/repo',
autoUpdate: true,
},
});
const extension = annotateActiveExtensions(
[loadExtension({ extensionDir, workspaceDir: tempHomeDir })!],
tempHomeDir,
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const addItem = vi.fn();
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
const extensionManager = createMockExtensionManager(
[extension],
async (callback) => {
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
},
{
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: 'test-extension',
},
);
vi.mocked(updateExtension).mockResolvedValue({
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: '',
});
renderHook(() => useExtensionUpdates([extension], addItem, tempHomeDir));
renderHook(() =>
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
);
await waitFor(
() => {
@@ -156,77 +162,64 @@ describe('useExtensionUpdates', () => {
});
it('should batch update notifications for multiple extensions', async () => {
const extensionDir1 = createExtension({
extensionsDir: userExtensionsDir,
const extension1 = createMockExtension({
id: 'test-extension-1-id',
name: 'test-extension-1',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo1',
type: 'git',
source: 'https://some.git/repo1',
autoUpdate: true,
},
});
const extensionDir2 = createExtension({
extensionsDir: userExtensionsDir,
const extension2 = createMockExtension({
id: 'test-extension-2-id',
name: 'test-extension-2',
version: '2.0.0',
installMetadata: {
source: 'https://some.git/repo2',
type: 'git',
source: 'https://some.git/repo2',
autoUpdate: true,
},
});
const extensions = annotateActiveExtensions(
[
loadExtension({
extensionDir: extensionDir1,
workspaceDir: tempHomeDir,
})!,
loadExtension({
extensionDir: extensionDir2,
workspaceDir: tempHomeDir,
})!,
],
tempHomeDir,
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
);
const addItem = vi.fn();
let updateCallCount = 0;
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({
type: 'SET_STATE',
payload: {
const extensionManager = {
getLoadedExtensions: vi.fn(() => [extension1, extension2]),
checkForAllExtensionUpdates: vi.fn(
async (
callback: (
extensionName: string,
state: ExtensionUpdateState,
) => void,
) => {
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
},
),
updateExtension: vi.fn(async () => {
updateCallCount++;
if (updateCallCount === 1) {
return {
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: 'test-extension-1',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension-2',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
},
};
}
return {
originalVersion: '2.0.0',
updatedVersion: '2.1.0',
name: 'test-extension-2',
};
}),
} as unknown as ExtensionManager;
renderHook(() =>
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
);
vi.mocked(updateExtension)
.mockResolvedValueOnce({
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: '',
})
.mockResolvedValueOnce({
originalVersion: '2.0.0',
updatedVersion: '2.1.0',
name: '',
});
renderHook(() => useExtensionUpdates(extensions, addItem, tempHomeDir));
await waitFor(
() => {
expect(addItem).toHaveBeenCalledTimes(2);
@@ -250,60 +243,40 @@ describe('useExtensionUpdates', () => {
});
it('should batch update notifications for multiple extensions with autoUpdate: false', async () => {
const extensions = [
{
name: 'test-extension-1',
const extension1 = createMockExtension({
id: 'test-extension-1-id',
name: 'test-extension-1',
version: '1.0.0',
installMetadata: {
type: 'git',
version: '1.0.0',
path: '/some/path1',
isActive: true,
installMetadata: {
type: 'git',
source: 'https://some/repo1',
autoUpdate: false,
},
source: 'https://some/repo1',
autoUpdate: false,
},
{
name: 'test-extension-2',
});
const extension2 = createMockExtension({
id: 'test-extension-2-id',
name: 'test-extension-2',
version: '2.0.0',
installMetadata: {
type: 'git',
version: '2.0.0',
path: '/some/path2',
isActive: true,
installMetadata: {
type: 'git',
source: 'https://some/repo2',
autoUpdate: false,
},
source: 'https://some/repo2',
autoUpdate: false,
},
];
});
const addItem = vi.fn();
const cwd = '/test/cwd';
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({ type: 'BATCH_CHECK_START' });
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension-1',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
const extensionManager = createMockExtensionManager(
[extension1, extension2],
async (callback) => {
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
await new Promise((r) => setTimeout(r, 50));
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension-2',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
dispatch({ type: 'BATCH_CHECK_END' });
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
},
);
renderHook(() =>
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
);
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
await waitFor(() => {
expect(addItem).toHaveBeenCalledTimes(1);

View File

@@ -4,7 +4,7 @@
* SPDX-License-Identifier: Apache-2.0
*/
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import type { ExtensionManager } from '@qwen-code/qwen-code-core';
import { getErrorMessage } from '../../utils/errors.js';
import {
ExtensionUpdateState,
@@ -14,11 +14,6 @@ import {
import { useCallback, useEffect, useMemo, useReducer } from 'react';
import type { UseHistoryManagerReturn } from './useHistoryManager.js';
import { MessageType, type ConfirmationRequest } from '../types.js';
import {
checkForAllExtensionUpdates,
updateExtension,
} from '../../config/extensions/update.js';
import { requestConsentInteractive } from '../../config/extension.js';
import { checkExhaustive } from '../../utils/checks.js';
type ConfirmationRequestWrapper = {
@@ -45,15 +40,7 @@ function confirmationRequestsReducer(
}
}
export const useExtensionUpdates = (
extensions: GeminiCLIExtension[],
addItem: UseHistoryManagerReturn['addItem'],
cwd: string,
) => {
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
extensionUpdatesReducer,
initialExtensionUpdatesState,
);
export const useConfirmUpdateRequests = () => {
const [
confirmUpdateExtensionRequests,
dispatchConfirmUpdateExtensionRequests,
@@ -78,15 +65,52 @@ export const useExtensionUpdates = (
},
[dispatchConfirmUpdateExtensionRequests],
);
return {
addConfirmUpdateExtensionRequest,
confirmUpdateExtensionRequests,
dispatchConfirmUpdateExtensionRequests,
};
};
export const useExtensionUpdates = (
extensionManager: ExtensionManager,
addItem: UseHistoryManagerReturn['addItem'],
cwd: string,
) => {
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
extensionUpdatesReducer,
initialExtensionUpdatesState,
);
const extensions = extensionManager.getLoadedExtensions();
useEffect(() => {
(async () => {
await checkForAllExtensionUpdates(
extensions,
dispatchExtensionStateUpdate,
const extensionsToCheck = extensions.filter((extension) => {
const currentStatus = extensionsUpdateState.extensionStatuses.get(
extension.name,
);
if (!currentStatus) return true;
const currentState = currentStatus.status;
return !currentState || currentState === ExtensionUpdateState.UNKNOWN;
});
if (extensionsToCheck.length === 0) return;
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
await extensionManager.checkForAllExtensionUpdates(
(extensionName: string, state: ExtensionUpdateState) => {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
});
},
);
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
})();
}, [extensions, extensions.length, dispatchExtensionStateUpdate]);
}, [
extensions,
extensionManager,
extensionsUpdateState.extensionStatuses,
dispatchExtensionStateUpdate,
]);
useEffect(() => {
if (extensionsUpdateState.batchChecksInProgress > 0) {
@@ -113,17 +137,17 @@ export const useExtensionUpdates = (
});
if (extension.installMetadata?.autoUpdate) {
updateExtension(
extension,
cwd,
(description) =>
requestConsentInteractive(
description,
addConfirmUpdateExtensionRequest,
),
currentState.status,
dispatchExtensionStateUpdate,
)
extensionManager
.updateExtension(
extension,
currentState.status,
(extensionName, state) => {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
});
},
)
.then((result) => {
if (!result) return;
addItem(
@@ -157,13 +181,7 @@ export const useExtensionUpdates = (
Date.now(),
);
}
}, [
extensions,
extensionsUpdateState,
addConfirmUpdateExtensionRequest,
addItem,
cwd,
]);
}, [extensions, extensionManager, extensionsUpdateState, addItem, cwd]);
const extensionsUpdateStateComputed = useMemo(() => {
const result = new Map<string, ExtensionUpdateState>();
@@ -180,7 +198,5 @@ export const useExtensionUpdates = (
extensionsUpdateState: extensionsUpdateStateComputed,
extensionsUpdateStateInternal: extensionsUpdateState.extensionStatuses,
dispatchExtensionStateUpdate,
confirmUpdateExtensionRequests,
addConfirmUpdateExtensionRequest,
};
};

View File

@@ -1,70 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { useState, useEffect } from 'react';
import {
type Extension,
getWorkspaceExtensions,
} from '../../config/extension.js';
import { type LoadedSettings, SettingScope } from '../../config/settings.js';
import process from 'node:process';
export function useWorkspaceMigration(settings: LoadedSettings) {
const [showWorkspaceMigrationDialog, setShowWorkspaceMigrationDialog] =
useState(false);
const [workspaceExtensions, setWorkspaceExtensions] = useState<Extension[]>(
[],
);
useEffect(() => {
// Default to true if not set.
if (!(settings.merged.experimental?.extensionManagement ?? true)) {
return;
}
const cwd = process.cwd();
const extensions = getWorkspaceExtensions(cwd);
if (
extensions.length > 0 &&
!settings.merged.extensions?.workspacesWithMigrationNudge?.includes(cwd)
) {
setWorkspaceExtensions(extensions);
setShowWorkspaceMigrationDialog(true);
console.log(settings.merged.extensions);
}
}, [
settings.merged.extensions,
settings.merged.experimental?.extensionManagement,
]);
const onWorkspaceMigrationDialogOpen = () => {
const userSettings = settings.forScope(SettingScope.User);
const extensionSettings = userSettings.settings.extensions || {
disabled: [],
};
const workspacesWithMigrationNudge =
extensionSettings.workspacesWithMigrationNudge || [];
const cwd = process.cwd();
if (!workspacesWithMigrationNudge.includes(cwd)) {
workspacesWithMigrationNudge.push(cwd);
}
extensionSettings.workspacesWithMigrationNudge =
workspacesWithMigrationNudge;
settings.setValue(SettingScope.User, 'extensions', extensionSettings);
};
const onWorkspaceMigrationDialogClose = () => {
setShowWorkspaceMigrationDialog(false);
};
return {
showWorkspaceMigrationDialog,
workspaceExtensions,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
};
}

View File

@@ -10,6 +10,7 @@ export enum ExtensionUpdateState {
CHECKING_FOR_UPDATES = 'checking for updates',
UPDATED_NEEDS_RESTART = 'updated, needs restart',
UPDATING = 'updating',
UPDATED = 'updated',
UPDATE_AVAILABLE = 'update available',
UP_TO_DATE = 'up to date',
ERROR = 'error',

View File

@@ -17,10 +17,16 @@
* resolveEnvVarsInString("URL: ${BASE_URL}/api") // Returns "URL: https://api.example.com/api"
* resolveEnvVarsInString("Missing: $UNDEFINED_VAR") // Returns "Missing: $UNDEFINED_VAR"
*/
export function resolveEnvVarsInString(value: string): string {
export function resolveEnvVarsInString(
value: string,
customEnv?: Record<string, string>,
): string {
const envVarRegex = /\$(?:(\w+)|{([^}]+)})/g; // Find $VAR_NAME or ${VAR_NAME}
return value.replace(envVarRegex, (match, varName1, varName2) => {
const varName = varName1 || varName2;
if (customEnv && typeof customEnv[varName] === 'string') {
return customEnv[varName];
}
if (process && process.env && typeof process.env[varName] === 'string') {
return process.env[varName]!;
}
@@ -47,8 +53,11 @@ export function resolveEnvVarsInString(value: string): string {
* };
* const resolved = resolveEnvVarsInObject(config);
*/
export function resolveEnvVarsInObject<T>(obj: T): T {
return resolveEnvVarsInObjectInternal(obj, new WeakSet());
export function resolveEnvVarsInObject<T>(
obj: T,
customEnv?: Record<string, string>,
): T {
return resolveEnvVarsInObjectInternal(obj, new WeakSet(), customEnv);
}
/**
@@ -61,6 +70,7 @@ export function resolveEnvVarsInObject<T>(obj: T): T {
function resolveEnvVarsInObjectInternal<T>(
obj: T,
visited: WeakSet<object>,
customEnv?: Record<string, string>,
): T {
if (
obj === null ||
@@ -72,7 +82,7 @@ function resolveEnvVarsInObjectInternal<T>(
}
if (typeof obj === 'string') {
return resolveEnvVarsInString(obj) as unknown as T;
return resolveEnvVarsInString(obj, customEnv) as unknown as T;
}
if (Array.isArray(obj)) {
@@ -84,7 +94,7 @@ function resolveEnvVarsInObjectInternal<T>(
visited.add(obj);
const result = obj.map((item) =>
resolveEnvVarsInObjectInternal(item, visited),
resolveEnvVarsInObjectInternal(item, visited, customEnv),
) as unknown as T;
visited.delete(obj);
return result;
@@ -101,7 +111,11 @@ function resolveEnvVarsInObjectInternal<T>(
const newObj = { ...obj } as T;
for (const key in newObj) {
if (Object.prototype.hasOwnProperty.call(newObj, key)) {
newObj[key] = resolveEnvVarsInObjectInternal(newObj[key], visited);
newObj[key] = resolveEnvVarsInObjectInternal(
newObj[key],
visited,
customEnv,
);
}
}
visited.delete(obj as object);

View File

@@ -11,7 +11,9 @@
"src/**/*.ts",
"src/**/*.tsx",
"src/**/*.json",
"./package.json"
"./package.json",
"../core/src/utils/toml-to-markdown-converter.test.ts",
"../core/src/utils/toml-to-markdown-converter.ts"
],
"exclude": [
"node_modules",

View File

@@ -80,6 +80,10 @@ import {
type TelemetryTarget,
uiTelemetryService,
} from '../telemetry/index.js';
import {
ExtensionManager,
type Extension,
} from '../extension/extensionManager.js';
// Utils
import { shouldAttemptBrowserLaunch } from '../utils/browser.js';
@@ -102,6 +106,7 @@ import {
type ResumedSessionData,
} from '../services/sessionService.js';
import { randomUUID } from 'node:crypto';
import { loadServerHierarchicalMemory } from '../utils/memoryDiscovery.js';
// Re-export types
export type { AnyToolInvocation, FileFilteringOptions, MCPOAuthConfig };
@@ -192,20 +197,17 @@ export interface GitCoAuthorSettings {
email?: string;
}
export interface GeminiCLIExtension {
name: string;
version: string;
isActive: boolean;
path: string;
installMetadata?: ExtensionInstallMetadata;
}
export interface ExtensionInstallMetadata {
source: string;
type: 'git' | 'local' | 'link' | 'github-release';
type: 'git' | 'local' | 'link' | 'github-release' | 'marketplace';
releaseTag?: string; // Only present for github-release installs.
ref?: string;
autoUpdate?: boolean;
allowPreRelease?: boolean;
marketplace?: {
marketplaceSource: string;
pluginName: string;
};
}
export const DEFAULT_TRUNCATE_TOOL_OUTPUT_THRESHOLD = 25_000;
@@ -303,14 +305,15 @@ export interface ConfigParameters {
includeDirectories?: string[];
bugCommand?: BugCommandSettings;
model?: string;
extensionContextFilePaths?: string[];
outputLanguageFilePath?: string;
maxSessionTurns?: number;
sessionTokenLimit?: number;
experimentalSkills?: boolean;
experimentalZedIntegration?: boolean;
listExtensions?: boolean;
extensions?: GeminiCLIExtension[];
blockedMcpServers?: Array<{ name: string; extensionName: string }>;
overrideExtensions?: string[];
allowedMcpServers?: string[];
excludedMcpServers?: string[];
noBrowser?: boolean;
summarizeToolOutput?: Record<string, SummarizeToolOutputSettings>;
folderTrustFeature?: boolean;
@@ -320,6 +323,8 @@ export interface ConfigParameters {
generationConfig?: Partial<ContentGeneratorConfig>;
cliVersion?: string;
loadMemoryFromIncludeDirectories?: boolean;
importFormat?: 'tree' | 'flat';
discoveryMaxDirs?: number;
chatRecording?: boolean;
// Web search providers
webSearch?: {
@@ -338,7 +343,6 @@ export interface ConfigParameters {
shouldUseNodePtyShell?: boolean;
skipNextSpeakerCheck?: boolean;
shellExecutionConfig?: ShellExecutionConfig;
extensionManagement?: boolean;
skipLoopDetection?: boolean;
vlmSwitchMode?: string;
truncateToolOutputThreshold?: number;
@@ -391,6 +395,7 @@ export class Config {
private toolRegistry!: ToolRegistry;
private promptRegistry!: PromptRegistry;
private subagentManager!: SubagentManager;
private extensionManager!: ExtensionManager;
private skillManager!: SkillManager;
private fileSystemService: FileSystemService;
private contentGeneratorConfig!: ContentGeneratorConfig;
@@ -413,6 +418,8 @@ export class Config {
private readonly toolCallCommand: string | undefined;
private readonly mcpServerCommand: string | undefined;
private mcpServers: Record<string, MCPServerConfig> | undefined;
private readonly allowedMcpServers?: string[];
private readonly excludedMcpServers?: string[];
private sessionSubagents: SubagentConfig[];
private userMemory: string;
private sdkMode: boolean;
@@ -435,11 +442,12 @@ export class Config {
private gitService: GitService | undefined = undefined;
private sessionService: SessionService | undefined = undefined;
private chatRecordingService: ChatRecordingService | undefined = undefined;
private extensionContextFilePaths: string[] = [];
private readonly checkpointing: boolean;
private readonly proxy: string | undefined;
private readonly cwd: string;
private readonly bugCommand: BugCommandSettings | undefined;
private readonly extensionContextFilePaths: string[];
private readonly outputLanguageFilePath?: string;
private readonly noBrowser: boolean;
private readonly folderTrustFeature: boolean;
private readonly folderTrust: boolean;
@@ -449,11 +457,8 @@ export class Config {
private readonly maxSessionTurns: number;
private readonly sessionTokenLimit: number;
private readonly listExtensions: boolean;
private readonly _extensions: GeminiCLIExtension[];
private readonly _blockedMcpServers: Array<{
name: string;
extensionName: string;
}>;
private readonly overrideExtensions?: string[];
fallbackModelHandler?: FallbackModelHandler;
private quotaErrorOccurred: boolean = false;
private readonly summarizeToolOutput:
@@ -464,6 +469,8 @@ export class Config {
private readonly experimentalSkills: boolean = false;
private readonly chatRecordingEnabled: boolean;
private readonly loadMemoryFromIncludeDirectories: boolean = false;
private readonly importFormat: 'tree' | 'flat';
private readonly discoveryMaxDirs: number;
private readonly webSearch?: {
provider: Array<{
type: 'tavily' | 'google' | 'dashscope';
@@ -480,7 +487,6 @@ export class Config {
private readonly shouldUseNodePtyShell: boolean;
private readonly skipNextSpeakerCheck: boolean;
private shellExecutionConfig: ShellExecutionConfig;
private readonly extensionManagement: boolean = true;
private readonly skipLoopDetection: boolean;
private readonly skipStartupContext: boolean;
private readonly vlmSwitchMode: string | undefined;
@@ -521,6 +527,8 @@ export class Config {
this.toolCallCommand = params.toolCallCommand;
this.mcpServerCommand = params.mcpServerCommand;
this.mcpServers = params.mcpServers;
this.allowedMcpServers = params.allowedMcpServers;
this.excludedMcpServers = params.excludedMcpServers;
this.sessionSubagents = params.sessionSubagents ?? [];
this.sdkMode = params.sdkMode ?? false;
this.userMemory = params.userMemory ?? '';
@@ -543,6 +551,7 @@ export class Config {
email: 'qwen-coder@alibabacloud.com',
};
this.usageStatisticsEnabled = params.usageStatisticsEnabled ?? true;
this.outputLanguageFilePath = params.outputLanguageFilePath;
this.fileFiltering = {
respectGitIgnore: params.fileFiltering?.respectGitIgnore ?? true,
@@ -556,15 +565,13 @@ export class Config {
this.cwd = params.cwd ?? process.cwd();
this.fileDiscoveryService = params.fileDiscoveryService ?? null;
this.bugCommand = params.bugCommand;
this.extensionContextFilePaths = params.extensionContextFilePaths ?? [];
this.maxSessionTurns = params.maxSessionTurns ?? -1;
this.sessionTokenLimit = params.sessionTokenLimit ?? -1;
this.experimentalZedIntegration =
params.experimentalZedIntegration ?? false;
this.experimentalSkills = params.experimentalSkills ?? false;
this.listExtensions = params.listExtensions ?? false;
this._extensions = params.extensions ?? [];
this._blockedMcpServers = params.blockedMcpServers ?? [];
this.overrideExtensions = params.overrideExtensions;
this.noBrowser = params.noBrowser ?? false;
this.summarizeToolOutput = params.summarizeToolOutput;
this.folderTrustFeature = params.folderTrustFeature ?? false;
@@ -583,6 +590,8 @@ export class Config {
this.loadMemoryFromIncludeDirectories =
params.loadMemoryFromIncludeDirectories ?? false;
this.importFormat = params.importFormat ?? 'tree';
this.discoveryMaxDirs = params.discoveryMaxDirs ?? 200;
this.chatCompression = params.chatCompression;
this.interactive = params.interactive ?? false;
this.trustedFolder = params.trustedFolder;
@@ -608,7 +617,6 @@ export class Config {
params.truncateToolOutputLines ?? DEFAULT_TRUNCATE_TOOL_OUTPUT_LINES;
this.enableToolOutputTruncation = params.enableToolOutputTruncation ?? true;
this.useSmartEdit = params.useSmartEdit ?? false;
this.extensionManagement = params.extensionManagement ?? true;
this.channel = params.channel;
this.storage = new Storage(this.targetDir);
this.vlmSwitchMode = params.vlmSwitchMode;
@@ -656,6 +664,23 @@ export class Config {
this.subagentManager.loadSessionSubagents(this.sessionSubagents);
}
this.extensionManager = new ExtensionManager({
workspaceDir: this.targetDir,
enabledExtensionOverrides: this.overrideExtensions,
config: this,
isWorkspaceTrusted: this.isTrustedFolder(),
});
await this.extensionManager.refreshCache();
const activeExtensions = await this.extensionManager
.getLoadedExtensions()
.filter((e) => e.isActive);
this.extensionContextFilePaths = activeExtensions.flatMap(
(e) => e.contextFiles,
);
await this.refreshHierarchicalMemory();
this.toolRegistry = await this.createToolRegistry(
options?.sendSdkMcpMessage,
);
@@ -665,6 +690,24 @@ export class Config {
logStartSession(this, new StartSessionEvent(this));
}
async refreshHierarchicalMemory(): Promise<void> {
const { memoryContent, fileCount } = await loadServerHierarchicalMemory(
this.getWorkingDir(),
this.shouldLoadMemoryFromIncludeDirectories()
? this.getWorkspaceContext().getDirectories()
: [],
this.getDebugMode(),
this.getFileService(),
this.getExtensionContextFilePaths(),
this.getFolderTrust(),
this.getImportFormat(),
this.getFileFilteringOptions(),
this.getDiscoveryMaxDirs(),
);
this.setUserMemory(memoryContent);
this.setGeminiMdFileCount(fileCount);
}
getContentGenerator(): ContentGenerator {
return this.contentGenerator;
}
@@ -763,6 +806,14 @@ export class Config {
return this.loadMemoryFromIncludeDirectories;
}
getImportFormat(): 'tree' | 'flat' {
return this.importFormat;
}
getDiscoveryMaxDirs(): number {
return this.discoveryMaxDirs;
}
getContentGeneratorConfig(): ContentGeneratorConfig {
return this.contentGeneratorConfig;
}
@@ -870,7 +921,14 @@ export class Config {
}
getExcludeTools(): string[] | undefined {
return this.excludeTools;
const allExcludeTools = new Set(this.excludeTools || []);
const extensions = this.getActiveExtensions();
for (const extension of extensions) {
for (const tool of extension.config.excludeTools || []) {
allExcludeTools.add(tool);
}
}
return [...allExcludeTools];
}
getToolDiscoveryCommand(): string | undefined {
@@ -886,7 +944,37 @@ export class Config {
}
getMcpServers(): Record<string, MCPServerConfig> | undefined {
return this.mcpServers;
let mcpServers = { ...(this.mcpServers || {}) };
const extensions = this.getActiveExtensions();
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) return;
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
if (this.allowedMcpServers) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key]) =>
this.allowedMcpServers?.includes(key),
),
);
}
if (this.excludedMcpServers) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(
([key]) => !this.excludedMcpServers?.includes(key),
),
);
}
return mcpServers;
}
addMcpServers(servers: Record<string, MCPServerConfig>): void {
@@ -1065,7 +1153,10 @@ export class Config {
}
getExtensionContextFilePaths(): string[] {
return this.extensionContextFilePaths;
return [
...this.extensionContextFilePaths,
...(this.outputLanguageFilePath ? [this.outputLanguageFilePath] : []),
];
}
getExperimentalZedIntegration(): boolean {
@@ -1080,16 +1171,54 @@ export class Config {
return this.listExtensions;
}
getExtensionManagement(): boolean {
return this.extensionManagement;
getExtensionManager(): ExtensionManager {
return this.extensionManager;
}
getExtensions(): GeminiCLIExtension[] {
return this._extensions;
getExtensions(): Extension[] {
const extensions = this.extensionManager.getLoadedExtensions();
if (this.overrideExtensions) {
return extensions.filter((e) =>
this.overrideExtensions?.includes(e.name),
);
} else {
return extensions;
}
}
getActiveExtensions(): Extension[] {
return this.getExtensions().filter((e) => e.isActive);
}
getBlockedMcpServers(): Array<{ name: string; extensionName: string }> {
return this._blockedMcpServers;
const mcpServers = { ...(this.mcpServers || {}) };
const extensions = this.getActiveExtensions();
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) return;
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
const blockedMcpServers: Array<{ name: string; extensionName: string }> =
[];
if (this.allowedMcpServers) {
Object.entries(mcpServers).forEach(([key, server]) => {
const isAllowed = this.allowedMcpServers?.includes(key);
if (!isAllowed) {
blockedMcpServers.push({
name: key,
extensionName: server.extensionName || '',
});
}
});
}
return blockedMcpServers;
}
getNoBrowser(): boolean {

View File

@@ -0,0 +1,121 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import {
convertClaudeToQwenConfig,
mergeClaudeConfigs,
isClaudePluginConfig,
type ClaudePluginConfig,
type ClaudeMarketplacePluginConfig,
} from './claude-converter.js';
describe('convertClaudeToQwenConfig', () => {
it('should convert basic Claude config', () => {
const claudeConfig: ClaudePluginConfig = {
name: 'claude-plugin',
version: '1.0.0',
};
const result = convertClaudeToQwenConfig(claudeConfig);
expect(result.name).toBe('claude-plugin');
expect(result.version).toBe('1.0.0');
});
it('should convert config with basic fields only', () => {
const claudeConfig: ClaudePluginConfig = {
name: 'full-plugin',
version: '1.0.0',
commands: 'commands',
agents: ['agents/agent1.md'],
skills: ['skills/skill1'],
};
const result = convertClaudeToQwenConfig(claudeConfig);
// Commands, skills, agents are collected as directories, not in config
expect(result.name).toBe('full-plugin');
expect(result.version).toBe('1.0.0');
expect(result.mcpServers).toBeUndefined();
});
it('should throw error for missing name', () => {
const invalidConfig = {
version: '1.0.0',
} as ClaudePluginConfig;
expect(() => convertClaudeToQwenConfig(invalidConfig)).toThrow();
});
});
describe('mergeClaudeConfigs', () => {
it('should merge marketplace and plugin configs', () => {
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
name: 'marketplace-name',
version: '2.0.0',
source: 'github:org/repo',
description: 'From marketplace',
};
const pluginConfig: ClaudePluginConfig = {
name: 'plugin-name',
version: '1.0.0',
commands: 'commands',
};
const merged = mergeClaudeConfigs(marketplacePlugin, pluginConfig);
// Marketplace takes precedence
expect(merged.name).toBe('marketplace-name');
expect(merged.version).toBe('2.0.0');
expect(merged.description).toBe('From marketplace');
// Plugin fields preserved
expect(merged.commands).toBe('commands');
});
it('should work with strict=false and no plugin config', () => {
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
name: 'standalone',
version: '1.0.0',
source: 'local',
strict: false,
commands: 'commands',
};
const merged = mergeClaudeConfigs(marketplacePlugin);
expect(merged.name).toBe('standalone');
expect(merged.commands).toBe('commands');
});
it('should throw error for strict mode without plugin config', () => {
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
name: 'strict-plugin',
version: '1.0.0',
source: 'github:org/repo',
strict: true,
};
expect(() => mergeClaudeConfigs(marketplacePlugin)).toThrow();
});
});
describe('isClaudePluginConfig', () => {
it('should identify Claude plugin directory', () => {
const extensionDir = '/tmp/test-extension';
const marketplace = {
marketplaceSource: 'https://test.com',
pluginName: 'test-plugin',
};
// This will check if marketplace.json exists and contains the plugin
// Note: In real usage, this requires actual file system setup
expect(typeof isClaudePluginConfig(extensionDir, marketplace)).toBe(
'boolean',
);
});
});

View File

@@ -0,0 +1,500 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Converter for Claude Code plugins to Qwen Code format.
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import { glob } from 'glob';
import type { ExtensionConfig } from './extensionManager.js';
import { ExtensionStorage } from './storage.js';
import type { MCPServerConfig } from '../config/config.js';
export interface ClaudePluginConfig {
name: string;
version: string;
description?: string;
author?: { name?: string; email?: string; url?: string };
homepage?: string;
repository?: string;
license?: string;
keywords?: string[];
commands?: string | string[];
agents?: string | string[];
skills?: string | string[];
hooks?: string;
mcpServers?: string | Record<string, MCPServerConfig>;
outputStyles?: string | string[];
lspServers?: string;
}
export type ClaudePluginSource =
| { source: 'github'; repo: string }
| { source: 'url'; url: string };
export interface ClaudeMarketplacePluginConfig extends ClaudePluginConfig {
source: string | ClaudePluginSource;
category?: string;
strict?: boolean;
tags?: string[];
}
export interface ClaudeMarketplaceConfig {
name: string;
owner: { name: string; email: string };
plugins: ClaudeMarketplacePluginConfig[];
metadata?: { description?: string; version?: string; pluginRoot?: string };
}
/**
* Converts a Claude plugin config to Qwen Code format.
* @param claudeConfig Claude plugin configuration
* @returns Qwen ExtensionConfig
*/
export function convertClaudeToQwenConfig(
claudeConfig: ClaudePluginConfig,
): ExtensionConfig {
// Validate required fields
if (!claudeConfig.name || !claudeConfig.version) {
throw new Error('Claude plugin config must have name and version fields');
}
// Parse MCP servers
let mcpServers: Record<string, MCPServerConfig> | undefined;
if (claudeConfig.mcpServers) {
if (typeof claudeConfig.mcpServers === 'string') {
// TODO: Load from file path
console.warn(
`[Claude Converter] MCP servers path not yet supported: ${claudeConfig.mcpServers}`,
);
} else {
mcpServers = claudeConfig.mcpServers;
}
}
// Warn about unsupported fields
if (claudeConfig.hooks) {
console.warn(
`[Claude Converter] Hooks are not yet supported in ${claudeConfig.name}`,
);
}
if (claudeConfig.outputStyles) {
console.warn(
`[Claude Converter] Output styles are not yet supported in ${claudeConfig.name}`,
);
}
if (claudeConfig.lspServers) {
console.warn(
`[Claude Converter] LSP servers are not yet supported in ${claudeConfig.name}`,
);
}
// Direct field mapping - commands, skills, agents will be collected as folders
return {
name: claudeConfig.name,
version: claudeConfig.version,
mcpServers,
};
}
/**
* Converts a complete Claude plugin package to Qwen Code format.
* Creates a new temporary directory with:
* 1. Converted qwen-extension.json
* 2. Commands, skills, and agents collected to respective folders
* 3. MCP servers resolved from JSON files if needed
* 4. All other files preserved
*
* @param extensionDir Path to the Claude plugin directory
* @param marketplace Marketplace information for loading marketplace.json
* @returns Object containing converted config and the temporary directory path
*/
export async function convertClaudePluginPackage(
extensionDir: string,
marketplace: { marketplaceSource: string; pluginName: string },
): Promise<{ config: ExtensionConfig; convertedDir: string }> {
// Step 1: Load marketplace.json
const marketplaceJsonPath = path.join(
extensionDir,
'.claude-plugin',
'marketplace.json',
);
if (!fs.existsSync(marketplaceJsonPath)) {
throw new Error(
`Marketplace configuration not found at ${marketplaceJsonPath}`,
);
}
const marketplaceContent = fs.readFileSync(marketplaceJsonPath, 'utf-8');
const marketplaceConfig: ClaudeMarketplaceConfig =
JSON.parse(marketplaceContent);
// Find the target plugin in marketplace
const marketplacePlugin = marketplaceConfig.plugins.find(
(p) => p.name === marketplace.pluginName,
);
if (!marketplacePlugin) {
throw new Error(
`Plugin ${marketplace.pluginName} not found in marketplace.json`,
);
}
// Step 2: Resolve plugin source directory based on source field
const source = marketplacePlugin.source;
let pluginSourceDir: string;
if (typeof source === 'string') {
// Check if it's a URL (online path)
if (source.startsWith('http://') || source.startsWith('https://')) {
throw new Error(
`Online plugin sources are not supported in convertClaudePluginPackage. ` +
`Plugin ${marketplace.pluginName} has source: ${source}. ` +
`This should be downloaded and resolved before calling this function.`,
);
}
// Relative path within marketplace directory
const marketplaceDir = marketplaceConfig.metadata?.pluginRoot
? path.join(extensionDir, marketplaceConfig.metadata.pluginRoot)
: extensionDir;
pluginSourceDir = path.join(marketplaceDir, source);
} else if (source.source === 'github' || source.source === 'url') {
throw new Error(
`Online plugin sources (github/url) are not supported in convertClaudePluginPackage. ` +
`Plugin ${marketplace.pluginName} has source type: ${source.source}. ` +
`This should be downloaded and resolved before calling this function.`,
);
} else {
throw new Error(
`Unsupported plugin source type for ${marketplace.pluginName}: ${JSON.stringify(source)}`,
);
}
if (!fs.existsSync(pluginSourceDir)) {
throw new Error(`Plugin source directory not found: ${pluginSourceDir}`);
}
// Step 3: Load and merge plugin.json if exists (based on strict mode)
const strict = marketplacePlugin.strict ?? true;
let mergedConfig: ClaudePluginConfig;
if (strict) {
const pluginJsonPath = path.join(
pluginSourceDir,
'.claude-plugin',
'plugin.json',
);
if (!fs.existsSync(pluginJsonPath)) {
throw new Error(`Strict mode requires plugin.json at ${pluginJsonPath}`);
}
const pluginContent = fs.readFileSync(pluginJsonPath, 'utf-8');
const pluginConfig: ClaudePluginConfig = JSON.parse(pluginContent);
mergedConfig = mergeClaudeConfigs(marketplacePlugin, pluginConfig);
} else {
mergedConfig = marketplacePlugin as ClaudePluginConfig;
}
// Step 4: Resolve MCP servers from JSON files if needed
if (mergedConfig.mcpServers && typeof mergedConfig.mcpServers === 'string') {
const mcpServersPath = path.isAbsolute(mergedConfig.mcpServers)
? mergedConfig.mcpServers
: path.join(pluginSourceDir, mergedConfig.mcpServers);
if (fs.existsSync(mcpServersPath)) {
try {
const mcpContent = fs.readFileSync(mcpServersPath, 'utf-8');
mergedConfig.mcpServers = JSON.parse(mcpContent) as Record<
string,
MCPServerConfig
>;
} catch (error) {
console.warn(
`Failed to parse MCP servers file ${mcpServersPath}: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
}
// Step 5: Create temporary directory for converted extension
const tmpDir = await ExtensionStorage.createTmpDir();
try {
// Step 6: Copy plugin files to temporary directory
await copyDirectory(pluginSourceDir, tmpDir);
// Step 7: Collect commands to commands folder
if (mergedConfig.commands) {
const commandsDestDir = path.join(tmpDir, 'commands');
await collectResources(
mergedConfig.commands,
pluginSourceDir,
commandsDestDir,
);
}
// Step 8: Collect skills to skills folder
if (mergedConfig.skills) {
const skillsDestDir = path.join(tmpDir, 'skills');
await collectResources(
mergedConfig.skills,
pluginSourceDir,
skillsDestDir,
);
}
// Step 9: Collect agents to agents folder
if (mergedConfig.agents) {
const agentsDestDir = path.join(tmpDir, 'agents');
await collectResources(
mergedConfig.agents,
pluginSourceDir,
agentsDestDir,
);
}
// Step 10: Convert to Qwen format config
const qwenConfig = convertClaudeToQwenConfig(mergedConfig);
// Step 11: Write qwen-extension.json
const qwenConfigPath = path.join(tmpDir, 'qwen-extension.json');
fs.writeFileSync(
qwenConfigPath,
JSON.stringify(qwenConfig, null, 2),
'utf-8',
);
return {
config: qwenConfig,
convertedDir: tmpDir,
};
} catch (error) {
// Clean up temporary directory on error
try {
fs.rmSync(tmpDir, { recursive: true, force: true });
} catch {
// Ignore cleanup errors
}
throw error;
}
}
/**
* Recursively copies a directory and its contents.
* @param source Source directory path
* @param destination Destination directory path
*/
async function copyDirectory(
source: string,
destination: string,
): Promise<void> {
// Create destination directory if it doesn't exist
if (!fs.existsSync(destination)) {
fs.mkdirSync(destination, { recursive: true });
}
const entries = fs.readdirSync(source, { withFileTypes: true });
for (const entry of entries) {
const sourcePath = path.join(source, entry.name);
const destPath = path.join(destination, entry.name);
if (entry.isDirectory()) {
await copyDirectory(sourcePath, destPath);
} else {
fs.copyFileSync(sourcePath, destPath);
}
}
}
/**
* Collects resources (commands, skills, agents) to a destination folder.
* If a resource is already in the destination folder, it will be skipped.
* @param resourcePaths String or array of resource paths
* @param pluginRoot Root directory of the plugin
* @param destDir Destination directory for collected resources
*/
async function collectResources(
resourcePaths: string | string[],
pluginRoot: string,
destDir: string,
): Promise<void> {
const paths = Array.isArray(resourcePaths) ? resourcePaths : [resourcePaths];
// Create destination directory
if (!fs.existsSync(destDir)) {
fs.mkdirSync(destDir, { recursive: true });
}
// Get the destination folder name (e.g., 'commands', 'skills', 'agents')
const destFolderName = path.basename(destDir);
for (const resourcePath of paths) {
const resolvedPath = path.isAbsolute(resourcePath)
? resourcePath
: path.join(pluginRoot, resourcePath);
if (!fs.existsSync(resolvedPath)) {
console.warn(`Resource path not found: ${resolvedPath}`);
continue;
}
const stat = fs.statSync(resolvedPath);
if (stat.isDirectory()) {
// If it's a directory, check if it's already the destination folder
const dirName = path.basename(resolvedPath);
const parentDir = path.dirname(resolvedPath);
// If the directory is already named as the destination folder (e.g., 'commands')
// and it's at the plugin root level, skip it
if (dirName === destFolderName && parentDir === pluginRoot) {
console.log(
`Skipping ${resolvedPath} as it's already in the correct location`,
);
continue;
}
// Copy all files from the directory
const files = await glob('**/*', {
cwd: resolvedPath,
nodir: true,
dot: false,
});
for (const file of files) {
const srcFile = path.join(resolvedPath, file);
const destFile = path.join(destDir, file);
// Ensure parent directory exists
const destFileDir = path.dirname(destFile);
if (!fs.existsSync(destFileDir)) {
fs.mkdirSync(destFileDir, { recursive: true });
}
fs.copyFileSync(srcFile, destFile);
}
} else {
// If it's a file, check if it's already in the destination folder
const relativePath = path.relative(pluginRoot, resolvedPath);
// Check if the file path starts with the destination folder name
// e.g., 'commands/test1.md' or 'commands/me/test.md' should be skipped
const segments = relativePath.split(path.sep);
if (segments.length > 0 && segments[0] === destFolderName) {
console.log(
`Skipping ${resolvedPath} as it's already in ${destFolderName}/`,
);
continue;
}
// Copy the file to destination
const fileName = path.basename(resolvedPath);
const destFile = path.join(destDir, fileName);
fs.copyFileSync(resolvedPath, destFile);
}
}
}
/**
* Merges marketplace plugin config with the actual plugin.json config.
* Marketplace config takes precedence for conflicting fields.
* @param marketplacePlugin Marketplace plugin definition
* @param pluginConfig Actual plugin.json config (optional if strict=false)
* @returns Merged Claude plugin config
*/
export function mergeClaudeConfigs(
marketplacePlugin: ClaudeMarketplacePluginConfig,
pluginConfig?: ClaudePluginConfig,
): ClaudePluginConfig {
if (!pluginConfig && marketplacePlugin.strict !== false) {
throw new Error(
`Plugin ${marketplacePlugin.name} requires plugin.json (strict mode)`,
);
}
// Start with plugin.json config (if exists)
const merged: ClaudePluginConfig = pluginConfig
? { ...pluginConfig }
: {
name: marketplacePlugin.name,
version: '1.0.0', // Default version if not in marketplace
};
// Overlay marketplace config (takes precedence)
if (marketplacePlugin.name) merged.name = marketplacePlugin.name;
if (marketplacePlugin.version) merged.version = marketplacePlugin.version;
if (marketplacePlugin.description)
merged.description = marketplacePlugin.description;
if (marketplacePlugin.author) merged.author = marketplacePlugin.author;
if (marketplacePlugin.homepage) merged.homepage = marketplacePlugin.homepage;
if (marketplacePlugin.repository)
merged.repository = marketplacePlugin.repository;
if (marketplacePlugin.license) merged.license = marketplacePlugin.license;
if (marketplacePlugin.keywords) merged.keywords = marketplacePlugin.keywords;
if (marketplacePlugin.commands) merged.commands = marketplacePlugin.commands;
if (marketplacePlugin.agents) merged.agents = marketplacePlugin.agents;
if (marketplacePlugin.skills) merged.skills = marketplacePlugin.skills;
if (marketplacePlugin.hooks) merged.hooks = marketplacePlugin.hooks;
if (marketplacePlugin.mcpServers)
merged.mcpServers = marketplacePlugin.mcpServers;
if (marketplacePlugin.outputStyles)
merged.outputStyles = marketplacePlugin.outputStyles;
if (marketplacePlugin.lspServers)
merged.lspServers = marketplacePlugin.lspServers;
return merged;
}
/**
* Checks if a config object is in Claude plugin format.
* @param config Configuration object to check
* @returns true if config appears to be Claude format
*/
export function isClaudePluginConfig(
extensionDir: string,
marketplace: { marketplaceSource: string; pluginName: string },
) {
const marketplaceConfigFilePath = path.join(
extensionDir,
'.claude-plugin/marketplace.json',
);
if (!fs.existsSync(marketplaceConfigFilePath)) {
return false;
}
const marketplaceConfigContent = fs.readFileSync(
marketplaceConfigFilePath,
'utf-8',
);
const marketplaceConfig = JSON.parse(marketplaceConfigContent);
if (typeof marketplaceConfig !== 'object' || marketplaceConfig === null) {
return false;
}
const marketplaceConfigObj = marketplaceConfig as Record<string, unknown>;
// Must have name and owner
if (
typeof marketplaceConfigObj['name'] !== 'string' ||
typeof marketplaceConfigObj['owner'] !== 'object'
) {
return false;
}
if (!Array.isArray(marketplaceConfigObj['plugins'])) {
return false;
}
const marketplacePluginObj = marketplaceConfigObj['plugins'].find(
(plugin: ClaudeMarketplacePluginConfig) =>
plugin.name === marketplace.pluginName,
);
if (!marketplacePluginObj) return false;
return true;
}

View File

@@ -0,0 +1,832 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import {
INSTALL_METADATA_FILENAME,
EXTENSIONS_CONFIG_FILENAME,
} from './variables.js';
import { QWEN_DIR } from '../config/storage.js';
import {
ExtensionManager,
SettingScope,
type ExtensionManagerOptions,
validateName,
getExtensionId,
hashValue,
extensionConsentString,
maybeRequestConsentOrFail,
parseInstallSource,
type ExtensionConfig,
} from './extensionManager.js';
import type { MCPServerConfig, ExtensionInstallMetadata } from '../index.js';
const mockGit = {
clone: vi.fn(),
getRemotes: vi.fn(),
fetch: vi.fn(),
checkout: vi.fn(),
listRemote: vi.fn(),
revparse: vi.fn(),
path: vi.fn(),
};
vi.mock('simple-git', () => ({
simpleGit: vi.fn((path: string) => {
mockGit.path.mockReturnValue(path);
return mockGit;
}),
}));
vi.mock('./github.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('./github.js')>();
return {
...actual,
downloadFromGitHubRelease: vi
.fn()
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
};
});
const mockHomedir = vi.hoisted(() => vi.fn());
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
return {
...mockedOs,
homedir: mockHomedir,
};
});
const mockLogExtensionEnable = vi.hoisted(() => vi.fn());
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
const mockLogExtensionDisable = vi.hoisted(() => vi.fn());
const mockLogExtensionUpdateEvent = vi.hoisted(() => vi.fn());
vi.mock('../telemetry/loggers.js', () => ({
logExtensionEnable: mockLogExtensionEnable,
logExtensionUpdateEvent: mockLogExtensionUpdateEvent,
}));
vi.mock('../index.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('../index.js')>();
return {
...actual,
logExtensionEnable: mockLogExtensionEnable,
logExtensionInstallEvent: mockLogExtensionInstallEvent,
logExtensionUninstall: mockLogExtensionUninstall,
logExtensionDisable: mockLogExtensionDisable,
};
});
const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
function createExtension({
extensionsDir = 'extensions-dir',
name = 'my-extension',
version = '1.0.0',
addContextFile = false,
contextFileName = undefined as string | undefined,
mcpServers = {} as Record<string, MCPServerConfig>,
installMetadata = undefined as ExtensionInstallMetadata | undefined,
} = {}): string {
const extDir = path.join(extensionsDir, name);
fs.mkdirSync(extDir, { recursive: true });
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name, version, contextFileName, mcpServers }),
);
if (addContextFile) {
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
}
if (contextFileName) {
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
}
if (installMetadata) {
fs.writeFileSync(
path.join(extDir, INSTALL_METADATA_FILENAME),
JSON.stringify(installMetadata),
);
}
return extDir;
}
describe('extension tests', () => {
let tempHomeDir: string;
let tempWorkspaceDir: string;
let userExtensionsDir: string;
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'qwen-code-test-home-'),
);
tempWorkspaceDir = fs.mkdtempSync(
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
);
userExtensionsDir = path.join(tempHomeDir, EXTENSIONS_DIRECTORY_NAME);
fs.mkdirSync(userExtensionsDir, { recursive: true });
mockHomedir.mockReturnValue(tempHomeDir);
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
Object.values(mockGit).forEach((fn) => fn.mockReset());
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
vi.restoreAllMocks();
});
function createExtensionManager(
options: Partial<ExtensionManagerOptions> = {},
): ExtensionManager {
return new ExtensionManager({
workspaceDir: tempWorkspaceDir,
isWorkspaceTrusted: true,
...options,
});
}
describe('loadExtension', () => {
it('should include extension path in loaded extension', async () => {
const extensionDir = path.join(userExtensionsDir, 'test-extension');
fs.mkdirSync(extensionDir, { recursive: true });
createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
expect(extensions[0].path).toBe(extensionDir);
expect(extensions[0].config.name).toBe('test-extension');
});
it('should load context file path when QWEN.md is present', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
addContextFile: true,
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '2.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(2);
const ext1 = extensions.find((e) => e.config.name === 'ext1');
const ext2 = extensions.find((e) => e.config.name === 'ext2');
expect(ext1?.contextFiles).toEqual([
path.join(userExtensionsDir, 'ext1', 'QWEN.md'),
]);
expect(ext2?.contextFiles).toEqual([]);
});
it('should load context file path from the extension config', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
addContextFile: false,
contextFileName: 'my-context-file.md',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
const ext1 = extensions.find((e) => e.config.name === 'ext1');
expect(ext1?.contextFiles).toEqual([
path.join(userExtensionsDir, 'ext1', 'my-context-file.md'),
]);
});
it('should skip extensions with invalid JSON and log a warning', async () => {
const consoleSpy = vi
.spyOn(console, 'error')
.mockImplementation(() => {});
// Good extension
createExtension({
extensionsDir: userExtensionsDir,
name: 'good-ext',
version: '1.0.0',
});
// Bad extension
const badExtDir = path.join(userExtensionsDir, 'bad-ext');
fs.mkdirSync(badExtDir);
const badConfigPath = path.join(badExtDir, EXTENSIONS_CONFIG_FILENAME);
fs.writeFileSync(badConfigPath, '{ "name": "bad-ext"'); // Malformed
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
expect(extensions[0].config.name).toBe('good-ext');
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining(`Warning: Skipping extension in ${badExtDir}`),
);
consoleSpy.mockRestore();
});
it('should skip extensions with missing name and log a warning', async () => {
const consoleSpy = vi
.spyOn(console, 'error')
.mockImplementation(() => {});
// Good extension
createExtension({
extensionsDir: userExtensionsDir,
name: 'good-ext',
version: '1.0.0',
});
// Bad extension
const badExtDir = path.join(userExtensionsDir, 'bad-ext-no-name');
fs.mkdirSync(badExtDir);
const badConfigPath = path.join(badExtDir, EXTENSIONS_CONFIG_FILENAME);
fs.writeFileSync(badConfigPath, JSON.stringify({ version: '1.0.0' }));
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
expect(extensions[0].config.name).toBe('good-ext');
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining(`Warning: Skipping extension in ${badExtDir}`),
);
consoleSpy.mockRestore();
});
it('should filter trust out of mcp servers', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js'],
trust: true,
} as MCPServerConfig,
},
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
// trust should be filtered from extension.mcpServers (not config.mcpServers)
expect(extensions[0].mcpServers?.['test-server']?.trust).toBeUndefined();
// config.mcpServers should still have trust (original config)
expect(extensions[0].config.mcpServers?.['test-server']?.trust).toBe(
true,
);
});
});
describe('enableExtension / disableExtension', () => {
it('should disable an extension at the user scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('my-extension', SettingScope.User);
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
});
it('should disable an extension at the workspace scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension(
'my-extension',
SettingScope.Workspace,
tempWorkspaceDir,
);
expect(manager.isEnabled('my-extension', tempHomeDir)).toBe(true);
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
});
it('should handle disabling the same extension twice', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('my-extension', SettingScope.User);
manager.disableExtension('my-extension', SettingScope.User);
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
});
it('should throw an error if you request system scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
expect(() =>
manager.disableExtension('my-extension', SettingScope.System),
).toThrow('System and SystemDefaults scopes are not supported.');
});
it('should enable an extension at the user scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('ext1', SettingScope.User);
expect(manager.isEnabled('ext1')).toBe(false);
manager.enableExtension('ext1', SettingScope.User);
expect(manager.isEnabled('ext1')).toBe(true);
});
it('should enable an extension at the workspace scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('ext1', SettingScope.Workspace);
expect(manager.isEnabled('ext1', tempWorkspaceDir)).toBe(false);
manager.enableExtension('ext1', SettingScope.Workspace);
expect(manager.isEnabled('ext1', tempWorkspaceDir)).toBe(true);
});
});
describe('validateExtensionOverrides', () => {
it('should mark all extensions as active if no enabled extensions are provided', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(2);
expect(extensions.every((e) => e.isActive)).toBe(true);
});
it('should mark only the enabled extensions as active', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext3',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['ext1', 'ext3'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions.find((e) => e.name === 'ext1')?.isActive).toBe(true);
expect(extensions.find((e) => e.name === 'ext2')?.isActive).toBe(false);
expect(extensions.find((e) => e.name === 'ext3')?.isActive).toBe(true);
});
it('should mark all extensions as inactive when "none" is provided', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['none'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions.every((e) => !e.isActive)).toBe(true);
});
it('should handle case-insensitivity', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['EXT1'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions.find((e) => e.name === 'ext1')?.isActive).toBe(true);
});
it('should log an error for unknown extensions', async () => {
const consoleSpy = vi
.spyOn(console, 'error')
.mockImplementation(() => {});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['ext4'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
manager.validateExtensionOverrides(extensions);
expect(consoleSpy).toHaveBeenCalledWith('Extension not found: ext4');
consoleSpy.mockRestore();
});
});
describe('loadExtensionConfig', () => {
it('should resolve environment variables in extension configuration', async () => {
process.env['TEST_API_KEY'] = 'test-api-key-123';
process.env['TEST_DB_URL'] = 'postgresql://localhost:5432/testdb';
try {
const extDir = path.join(userExtensionsDir, 'test-extension');
fs.mkdirSync(extDir);
const extensionConfig = {
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js'],
env: {
API_KEY: '$TEST_API_KEY',
DATABASE_URL: '${TEST_DB_URL}',
STATIC_VALUE: 'no-substitution',
},
},
},
};
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify(extensionConfig),
);
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
const extension = extensions[0];
expect(extension.config.name).toBe('test-extension');
expect(extension.config.mcpServers).toBeDefined();
const serverConfig = extension.config.mcpServers?.['test-server'];
expect(serverConfig).toBeDefined();
expect(serverConfig?.env).toBeDefined();
expect(serverConfig?.env?.['API_KEY']).toBe('test-api-key-123');
expect(serverConfig?.env?.['DATABASE_URL']).toBe(
'postgresql://localhost:5432/testdb',
);
expect(serverConfig?.env?.['STATIC_VALUE']).toBe('no-substitution');
} finally {
delete process.env['TEST_API_KEY'];
delete process.env['TEST_DB_URL'];
}
});
it('should handle missing environment variables gracefully', async () => {
const extDir = path.join(userExtensionsDir, 'test-extension');
fs.mkdirSync(extDir);
const extensionConfig = {
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js'],
env: {
MISSING_VAR: '$UNDEFINED_ENV_VAR',
MISSING_VAR_BRACES: '${ALSO_UNDEFINED}',
},
},
},
};
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify(extensionConfig),
);
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
const extension = extensions[0];
const serverConfig = extension.config.mcpServers!['test-server'];
expect(serverConfig.env).toBeDefined();
expect(serverConfig.env!['MISSING_VAR']).toBe('$UNDEFINED_ENV_VAR');
expect(serverConfig.env!['MISSING_VAR_BRACES']).toBe('${ALSO_UNDEFINED}');
});
});
});
describe('extensionManager utility functions', () => {
describe('validateName', () => {
it('should accept valid extension names', () => {
expect(() => validateName('my-extension')).not.toThrow();
expect(() => validateName('Extension123')).not.toThrow();
expect(() => validateName('test-ext-1')).not.toThrow();
expect(() => validateName('UPPERCASE')).not.toThrow();
});
it('should reject names with invalid characters', () => {
expect(() => validateName('my_extension')).toThrow(
'Invalid extension name',
);
expect(() => validateName('my.extension')).toThrow(
'Invalid extension name',
);
expect(() => validateName('my extension')).toThrow(
'Invalid extension name',
);
expect(() => validateName('my@ext')).toThrow('Invalid extension name');
});
it('should reject empty names', () => {
expect(() => validateName('')).toThrow('Invalid extension name');
});
});
describe('hashValue', () => {
it('should generate consistent hash for same input', () => {
const hash1 = hashValue('test-input');
const hash2 = hashValue('test-input');
expect(hash1).toBe(hash2);
});
it('should generate different hashes for different inputs', () => {
const hash1 = hashValue('input-1');
const hash2 = hashValue('input-2');
expect(hash1).not.toBe(hash2);
});
it('should generate a valid SHA256 hash', () => {
const hash = hashValue('test');
expect(hash).toMatch(/^[a-f0-9]{64}$/);
});
});
describe('getExtensionId', () => {
it('should use hashed name when no install metadata', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const id = getExtensionId(config);
expect(id).toBe(hashValue('test-ext'));
});
it('should use hashed source for local install', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const metadata = { type: 'local' as const, source: '/path/to/ext' };
const id = getExtensionId(config, metadata);
expect(id).toBe(hashValue('/path/to/ext'));
});
it('should use GitHub URL for git install', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const metadata = {
type: 'git' as const,
source: 'https://github.com/owner/repo',
};
const id = getExtensionId(config, metadata);
expect(id).toBe(hashValue('https://github.com/owner/repo'));
});
});
describe('extensionConsentString', () => {
it('should generate basic consent string', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const consent = extensionConsentString(config);
expect(consent).toContain('Installing extension "test-ext"');
expect(consent).toContain('Extensions may introduce unexpected behavior');
});
it('should include MCP servers in consent string', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
mcpServers: {
'my-server': { command: 'node', args: ['server.js'] },
},
};
const consent = extensionConsentString(config);
expect(consent).toContain(
'This extension will run the following MCP servers',
);
expect(consent).toContain('my-server');
});
it('should include commands in consent string', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const consent = extensionConsentString(config, ['cmd1', 'cmd2']);
expect(consent).toContain(
'This extension will add the following commands',
);
expect(consent).toContain('cmd1, cmd2');
});
it('should include context file info', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
contextFileName: 'CONTEXT.md',
};
const consent = extensionConsentString(config);
expect(consent).toContain('CONTEXT.md');
});
it('should include excluded tools', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
excludeTools: ['tool1', 'tool2'],
};
const consent = extensionConsentString(config);
expect(consent).toContain('exclude the following core tools');
});
});
describe('maybeRequestConsentOrFail', () => {
it('should request consent for new installation', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const requestConsent = vi.fn().mockResolvedValue(true);
await maybeRequestConsentOrFail(config, requestConsent, []);
expect(requestConsent).toHaveBeenCalledTimes(1);
});
it('should throw if user declines consent', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const requestConsent = vi.fn().mockResolvedValue(false);
await expect(
maybeRequestConsentOrFail(config, requestConsent, []),
).rejects.toThrow('Installation cancelled');
});
it('should skip consent if config unchanged during update', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '0.9.0',
};
const requestConsent = vi.fn().mockResolvedValue(true);
await maybeRequestConsentOrFail(
config,
requestConsent,
[],
previousConfig,
);
expect(requestConsent).not.toHaveBeenCalled();
});
it('should request consent if config changed during update', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
mcpServers: { server: { command: 'node' } },
};
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '0.9.0',
};
const requestConsent = vi.fn().mockResolvedValue(true);
await maybeRequestConsentOrFail(
config,
requestConsent,
[],
previousConfig,
);
expect(requestConsent).toHaveBeenCalledTimes(1);
});
});
describe('parseInstallSource', () => {
it('should parse HTTPS URL as git type', async () => {
const result = await parseInstallSource('https://github.com/owner/repo');
expect(result.type).toBe('git');
expect(result.source).toBe('https://github.com/owner/repo');
});
it('should parse HTTP URL as git type', async () => {
const result = await parseInstallSource('http://example.com/repo');
expect(result.type).toBe('git');
});
it('should parse git@ URL as git type', async () => {
const result = await parseInstallSource('git@github.com:owner/repo.git');
expect(result.type).toBe('git');
});
it('should parse sso:// URL as git type', async () => {
const result = await parseInstallSource('sso://some/path');
expect(result.type).toBe('git');
});
it('should parse marketplace URL correctly', async () => {
const result = await parseInstallSource(
'https://example.com/marketplace:plugin-name',
);
expect(result.type).toBe('marketplace');
expect(result.marketplace?.pluginName).toBe('plugin-name');
});
it('should throw for non-existent local path', async () => {
await expect(
parseInstallSource('/nonexistent/path/to/extension'),
).rejects.toThrow('Install source not found');
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,730 @@
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import * as path from 'node:path';
import * as os from 'node:os';
import {
getEnvContents,
maybePromptForSettings,
promptForSetting,
type ExtensionSetting,
updateSetting,
ExtensionSettingScope,
getScopedEnvContents,
} from './extensionSettings.js';
import type { ExtensionConfig } from './extensionManager.js';
import { ExtensionStorage } from './storage.js';
import prompts from 'prompts';
import * as fsPromises from 'node:fs/promises';
import * as fs from 'node:fs';
import { KeychainTokenStorage } from '../mcp/token-storage/keychain-token-storage.js';
import { EXTENSION_SETTINGS_FILENAME } from './variables.js';
vi.mock('prompts');
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
return {
...mockedOs,
homedir: vi.fn(),
};
});
vi.mock(
'../mcp/token-storage/keychain-token-storage.js',
async (importOriginal) => {
const actual =
await importOriginal<
typeof import('../mcp/token-storage/keychain-token-storage.js')
>();
return {
...actual,
KeychainTokenStorage: vi.fn(),
};
},
);
describe('extensionSettings', () => {
let tempHomeDir: string;
let tempWorkspaceDir: string;
let extensionDir: string;
let mockKeychainData: Record<string, Record<string, string>>;
beforeEach(() => {
vi.clearAllMocks();
mockKeychainData = {};
vi.mocked(KeychainTokenStorage).mockImplementation(
(serviceName: string) => {
if (!mockKeychainData[serviceName]) {
mockKeychainData[serviceName] = {};
}
const keychainData = mockKeychainData[serviceName];
return {
getSecret: vi
.fn()
.mockImplementation(
async (key: string) => keychainData[key] || null,
),
setSecret: vi
.fn()
.mockImplementation(async (key: string, value: string) => {
keychainData[key] = value;
}),
deleteSecret: vi.fn().mockImplementation(async (key: string) => {
delete keychainData[key];
}),
listSecrets: vi
.fn()
.mockImplementation(async () => Object.keys(keychainData)),
isAvailable: vi.fn().mockResolvedValue(true),
} as unknown as KeychainTokenStorage;
},
);
tempHomeDir = os.tmpdir() + path.sep + `gemini-cli-test-home-${Date.now()}`;
tempWorkspaceDir = path.join(
os.tmpdir(),
`gemini-cli-test-workspace-${Date.now()}`,
);
extensionDir = path.join(tempHomeDir, '.gemini', 'extensions', 'test-ext');
// Spy and mock the method, but also create the directory so we can write to it.
vi.spyOn(ExtensionStorage.prototype, 'getExtensionDir').mockReturnValue(
extensionDir,
);
fs.mkdirSync(extensionDir, { recursive: true });
fs.mkdirSync(tempWorkspaceDir, { recursive: true });
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
vi.mocked(prompts).mockClear();
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
vi.restoreAllMocks();
});
describe('maybePromptForSettings', () => {
const mockRequestSetting = vi.fn(
async (setting: ExtensionSetting) => `mock-${setting.envVar}`,
);
beforeEach(() => {
mockRequestSetting.mockClear();
});
it('should do nothing if settings are undefined', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
});
it('should do nothing if settings are empty', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [],
};
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
});
it('should prompt for all settings if there is no previous config', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
expect(mockRequestSetting).toHaveBeenCalledTimes(2);
expect(mockRequestSetting).toHaveBeenCalledWith(config.settings![0]);
expect(mockRequestSetting).toHaveBeenCalledWith(config.settings![1]);
});
it('should only prompt for new settings', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const previousSettings = { VAR1: 'previous-VAR1' };
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).toHaveBeenCalledTimes(1);
expect(mockRequestSetting).toHaveBeenCalledWith(newConfig.settings![1]);
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
const expectedContent = 'VAR1=previous-VAR1\nVAR2=mock-VAR2\n';
expect(actualContent).toBe(expectedContent);
});
it('should clear settings if new config has no settings', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{
name: 's2',
description: 'd2',
envVar: 'SENSITIVE_VAR',
sensitive: true,
},
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [],
};
const previousSettings = {
VAR1: 'previous-VAR1',
SENSITIVE_VAR: 'secret',
};
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('SENSITIVE_VAR', 'secret');
const envPath = path.join(extensionDir, '.env');
await fsPromises.writeFile(envPath, 'VAR1=previous-VAR1');
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
const actualContent = await fsPromises.readFile(envPath, 'utf-8');
expect(actualContent).toBe('');
expect(await userKeychain.getSecret('SENSITIVE_VAR')).toBeNull();
});
it('should remove sensitive settings from keychain', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{
name: 's1',
description: 'd1',
envVar: 'SENSITIVE_VAR',
sensitive: true,
},
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [],
};
const previousSettings = { SENSITIVE_VAR: 'secret' };
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('SENSITIVE_VAR', 'secret');
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(await userKeychain.getSecret('SENSITIVE_VAR')).toBeNull();
});
it('should remove settings that are no longer in the config', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
const previousSettings = {
VAR1: 'previous-VAR1',
VAR2: 'previous-VAR2',
};
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
const expectedContent = 'VAR1=previous-VAR1\n';
expect(actualContent).toBe(expectedContent);
});
it('should reprompt if a setting changes sensitivity', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1', sensitive: false },
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1', sensitive: true },
],
};
const previousSettings = { VAR1: 'previous-VAR1' };
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).toHaveBeenCalledTimes(1);
expect(mockRequestSetting).toHaveBeenCalledWith(newConfig.settings![0]);
// The value should now be in keychain, not the .env file.
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toBe('');
});
it('should not prompt if settings are identical', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const previousSettings = {
VAR1: 'previous-VAR1',
VAR2: 'previous-VAR2',
};
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
const expectedContent = 'VAR1=previous-VAR1\nVAR2=previous-VAR2\n';
expect(actualContent).toBe(expectedContent);
});
it('should wrap values with spaces in quotes', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
mockRequestSetting.mockResolvedValue('a value with spaces');
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toBe('VAR1="a value with spaces"\n');
});
it('should not attempt to clear secrets if keychain is unavailable', async () => {
// Arrange
const mockIsAvailable = vi.fn().mockResolvedValue(false);
const mockListSecrets = vi.fn();
vi.mocked(KeychainTokenStorage).mockImplementation(
() =>
({
isAvailable: mockIsAvailable,
listSecrets: mockListSecrets,
deleteSecret: vi.fn(),
getSecret: vi.fn(),
setSecret: vi.fn(),
}) as unknown as KeychainTokenStorage,
);
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [], // Empty settings triggers clearSettings
};
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
// Act
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
previousConfig,
undefined,
);
// Assert
expect(mockIsAvailable).toHaveBeenCalled();
expect(mockListSecrets).not.toHaveBeenCalled();
});
});
describe('promptForSetting', () => {
it.each([
{
description:
'should use prompts with type "password" for sensitive settings',
setting: {
name: 'API Key',
description: 'Your secret key',
envVar: 'API_KEY',
sensitive: true,
},
expectedType: 'password',
promptValue: 'secret-key',
},
{
description:
'should use prompts with type "text" for non-sensitive settings',
setting: {
name: 'Username',
description: 'Your public username',
envVar: 'USERNAME',
sensitive: false,
},
expectedType: 'text',
promptValue: 'test-user',
},
{
description: 'should default to "text" if sensitive is undefined',
setting: {
name: 'Username',
description: 'Your public username',
envVar: 'USERNAME',
},
expectedType: 'text',
promptValue: 'test-user',
},
])('$description', async ({ setting, expectedType, promptValue }) => {
vi.mocked(prompts).mockResolvedValue({ value: promptValue });
const result = await promptForSetting(setting as ExtensionSetting);
expect(prompts).toHaveBeenCalledWith({
type: expectedType,
name: 'value',
message: `${setting.name}\n${setting.description}`,
});
expect(result).toBe(promptValue);
});
it('should return undefined if the user cancels the prompt', async () => {
vi.mocked(prompts).mockResolvedValue({ value: undefined });
const result = await promptForSetting({
name: 'Test',
description: 'Test desc',
envVar: 'TEST_VAR',
});
expect(result).toBeUndefined();
});
});
describe('getScopedEnvContents', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{
name: 's2',
description: 'd2',
envVar: 'SENSITIVE_VAR',
sensitive: true,
},
],
};
const extensionId = '12345';
it('should return combined contents from user .env and keychain for USER scope', async () => {
const userEnvPath = path.join(extensionDir, EXTENSION_SETTINGS_FILENAME);
await fsPromises.writeFile(userEnvPath, 'VAR1=user-value1');
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('SENSITIVE_VAR', 'user-secret');
const contents = await getScopedEnvContents(
config,
extensionId,
ExtensionSettingScope.USER,
);
expect(contents).toEqual({
VAR1: 'user-value1',
SENSITIVE_VAR: 'user-secret',
});
});
it('should return combined contents from workspace .env and keychain for WORKSPACE scope', async () => {
const workspaceEnvPath = path.join(
tempWorkspaceDir,
EXTENSION_SETTINGS_FILENAME,
);
await fsPromises.writeFile(workspaceEnvPath, 'VAR1=workspace-value1');
const workspaceKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345 ${tempWorkspaceDir}`,
);
await workspaceKeychain.setSecret('SENSITIVE_VAR', 'workspace-secret');
const contents = await getScopedEnvContents(
config,
extensionId,
ExtensionSettingScope.WORKSPACE,
);
expect(contents).toEqual({
VAR1: 'workspace-value1',
SENSITIVE_VAR: 'workspace-secret',
});
});
});
describe('getEnvContents (merged)', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2', sensitive: true },
{ name: 's3', description: 'd3', envVar: 'VAR3' },
],
};
const extensionId = '12345';
it('should merge user and workspace settings, with workspace taking precedence', async () => {
// User settings
const userEnvPath = path.join(extensionDir, EXTENSION_SETTINGS_FILENAME);
await fsPromises.writeFile(
userEnvPath,
'VAR1=user-value1\nVAR3=user-value3',
);
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext ${extensionId}`,
);
await userKeychain.setSecret('VAR2', 'user-secret2');
// Workspace settings
const workspaceEnvPath = path.join(
tempWorkspaceDir,
EXTENSION_SETTINGS_FILENAME,
);
await fsPromises.writeFile(workspaceEnvPath, 'VAR1=workspace-value1');
const workspaceKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext ${extensionId} ${tempWorkspaceDir}`,
);
await workspaceKeychain.setSecret('VAR2', 'workspace-secret2');
const contents = await getEnvContents(config, extensionId);
expect(contents).toEqual({
VAR1: 'workspace-value1',
VAR2: 'workspace-secret2',
VAR3: 'user-value3',
});
});
});
describe('updateSetting', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2', sensitive: true },
],
};
const mockRequestSetting = vi.fn();
beforeEach(async () => {
const userEnvPath = path.join(extensionDir, '.env');
await fsPromises.writeFile(userEnvPath, 'VAR1=value1\n');
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('VAR2', 'value2');
mockRequestSetting.mockClear();
});
it('should update a non-sensitive setting in USER scope', async () => {
mockRequestSetting.mockResolvedValue('new-value1');
await updateSetting(
config,
'12345',
'VAR1',
mockRequestSetting,
ExtensionSettingScope.USER,
);
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toContain('VAR1=new-value1');
});
it('should update a non-sensitive setting in WORKSPACE scope', async () => {
mockRequestSetting.mockResolvedValue('new-workspace-value');
await updateSetting(
config,
'12345',
'VAR1',
mockRequestSetting,
ExtensionSettingScope.WORKSPACE,
);
const expectedEnvPath = path.join(tempWorkspaceDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toContain('VAR1=new-workspace-value');
});
it('should update a sensitive setting in USER scope', async () => {
mockRequestSetting.mockResolvedValue('new-value2');
await updateSetting(
config,
'12345',
'VAR2',
mockRequestSetting,
ExtensionSettingScope.USER,
);
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
expect(await userKeychain.getSecret('VAR2')).toBe('new-value2');
});
it('should update a sensitive setting in WORKSPACE scope', async () => {
mockRequestSetting.mockResolvedValue('new-workspace-secret');
await updateSetting(
config,
'12345',
'VAR2',
mockRequestSetting,
ExtensionSettingScope.WORKSPACE,
);
const workspaceKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345 ${tempWorkspaceDir}`,
);
expect(await workspaceKeychain.getSecret('VAR2')).toBe(
'new-workspace-secret',
);
});
it('should leave existing, unmanaged .env variables intact when updating in WORKSPACE scope', async () => {
// Setup a pre-existing .env file in the workspace with unmanaged variables
const workspaceEnvPath = path.join(tempWorkspaceDir, '.env');
const originalEnvContent =
'PROJECT_VAR_1=value_1\nPROJECT_VAR_2=value_2\nVAR1=original-value'; // VAR1 is managed by extension
await fsPromises.writeFile(workspaceEnvPath, originalEnvContent);
// Simulate updating an extension-managed non-sensitive setting
mockRequestSetting.mockResolvedValue('updated-value');
await updateSetting(
config,
'12345',
'VAR1',
mockRequestSetting,
ExtensionSettingScope.WORKSPACE,
);
// Read the .env file after update
const actualContent = await fsPromises.readFile(
workspaceEnvPath,
'utf-8',
);
// Assert that original variables are intact and extension variable is updated
expect(actualContent).toContain('PROJECT_VAR_1=value_1');
expect(actualContent).toContain('PROJECT_VAR_2=value_2');
expect(actualContent).toContain('VAR1=updated-value');
// Ensure no other unexpected changes or deletions
const lines = actualContent.split('\n').filter((line) => line.length > 0);
expect(lines).toHaveLength(3); // Should only have the three variables
});
});
});

View File

@@ -0,0 +1,298 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'node:fs/promises';
import * as fsSync from 'node:fs';
import * as dotenv from 'dotenv';
import * as path from 'node:path';
import { ExtensionStorage } from './storage.js';
import type { ExtensionConfig } from './extensionManager.js';
import prompts from 'prompts';
import { EXTENSION_SETTINGS_FILENAME } from './variables.js';
import { KeychainTokenStorage } from '../mcp/token-storage/keychain-token-storage.js';
export enum ExtensionSettingScope {
USER = 'user',
WORKSPACE = 'workspace',
}
export interface ExtensionSetting {
name: string;
description: string;
envVar: string;
// NOTE: If no value is set, this setting will be considered NOT sensitive.
sensitive?: boolean;
}
const getKeychainStorageName = (
extensionName: string,
extensionId: string,
scope: ExtensionSettingScope,
): string => {
const base = `Qwen Code Extensions ${extensionName} ${extensionId}`;
if (scope === ExtensionSettingScope.WORKSPACE) {
return `${base} ${process.cwd()}`;
}
return base;
};
const getEnvFilePath = (
extensionName: string,
scope: ExtensionSettingScope,
): string => {
if (scope === ExtensionSettingScope.WORKSPACE) {
return path.join(process.cwd(), EXTENSION_SETTINGS_FILENAME);
}
return new ExtensionStorage(extensionName).getEnvFilePath();
};
export async function maybePromptForSettings(
extensionConfig: ExtensionConfig,
extensionId: string,
requestSetting: (setting: ExtensionSetting) => Promise<string>,
previousExtensionConfig?: ExtensionConfig,
previousSettings?: Record<string, string>,
): Promise<void> {
const { name: extensionName, settings } = extensionConfig;
if (
(!settings || settings.length === 0) &&
(!previousExtensionConfig?.settings ||
previousExtensionConfig.settings.length === 0)
) {
return;
}
// We assume user scope here because we don't have a way to ask the user for scope during the initial setup.
// The user can change the scope later using the `settings set` command.
const scope = ExtensionSettingScope.USER;
const envFilePath = getEnvFilePath(extensionName, scope);
const keychain = new KeychainTokenStorage(
getKeychainStorageName(extensionName, extensionId, scope),
);
if (!settings || settings.length === 0) {
await clearSettings(envFilePath, keychain);
return;
}
const settingsChanges = getSettingsChanges(
settings,
previousExtensionConfig?.settings ?? [],
);
const allSettings: Record<string, string> = { ...previousSettings };
for (const removedEnvSetting of settingsChanges.removeEnv) {
delete allSettings[removedEnvSetting.envVar];
}
for (const removedSensitiveSetting of settingsChanges.removeSensitive) {
await keychain.deleteSecret(removedSensitiveSetting.envVar);
}
for (const setting of settingsChanges.promptForSensitive.concat(
settingsChanges.promptForEnv,
)) {
const answer = await requestSetting(setting);
allSettings[setting.envVar] = answer;
}
const nonSensitiveSettings: Record<string, string> = {};
for (const setting of settings) {
const value = allSettings[setting.envVar];
if (value === undefined) {
continue;
}
if (setting.sensitive) {
await keychain.setSecret(setting.envVar, value);
} else {
nonSensitiveSettings[setting.envVar] = value;
}
}
const envContent = formatEnvContent(nonSensitiveSettings);
await fs.writeFile(envFilePath, envContent);
}
function formatEnvContent(settings: Record<string, string>): string {
let envContent = '';
for (const [key, value] of Object.entries(settings)) {
const formattedValue = value.includes(' ') ? `"${value}"` : value;
envContent += `${key}=${formattedValue}\n`;
}
return envContent;
}
export async function promptForSetting(
setting: ExtensionSetting,
): Promise<string> {
const response = await prompts({
type: setting.sensitive ? 'password' : 'text',
name: 'value',
message: `${setting.name}\n${setting.description}`,
});
return response.value;
}
export async function getScopedEnvContents(
extensionConfig: ExtensionConfig,
extensionId: string,
scope: ExtensionSettingScope,
): Promise<Record<string, string>> {
const { name: extensionName } = extensionConfig;
const keychain = new KeychainTokenStorage(
getKeychainStorageName(extensionName, extensionId, scope),
);
const envFilePath = getEnvFilePath(extensionName, scope);
let customEnv: Record<string, string> = {};
if (fsSync.existsSync(envFilePath)) {
const envFile = fsSync.readFileSync(envFilePath, 'utf-8');
customEnv = dotenv.parse(envFile);
}
if (extensionConfig.settings) {
for (const setting of extensionConfig.settings) {
if (setting.sensitive) {
const secret = await keychain.getSecret(setting.envVar);
if (secret) {
customEnv[setting.envVar] = secret;
}
}
}
}
return customEnv;
}
export async function getEnvContents(
extensionConfig: ExtensionConfig,
extensionId: string,
): Promise<Record<string, string>> {
if (!extensionConfig.settings || extensionConfig.settings.length === 0) {
return Promise.resolve({});
}
const userSettings = await getScopedEnvContents(
extensionConfig,
extensionId,
ExtensionSettingScope.USER,
);
const workspaceSettings = await getScopedEnvContents(
extensionConfig,
extensionId,
ExtensionSettingScope.WORKSPACE,
);
return { ...userSettings, ...workspaceSettings };
}
export async function updateSetting(
extensionConfig: ExtensionConfig,
extensionId: string,
settingKey: string,
requestSetting: (setting: ExtensionSetting) => Promise<string>,
scope: ExtensionSettingScope,
): Promise<void> {
const { name: extensionName, settings } = extensionConfig;
if (!settings || settings.length === 0) {
console.log('This extension does not have any settings.');
return;
}
const settingToUpdate = settings.find(
(s) => s.name === settingKey || s.envVar === settingKey,
);
if (!settingToUpdate) {
console.log(`Setting ${settingKey} not found.`);
return;
}
const newValue = await requestSetting(settingToUpdate);
const keychain = new KeychainTokenStorage(
getKeychainStorageName(extensionName, extensionId, scope),
);
if (settingToUpdate.sensitive) {
await keychain.setSecret(settingToUpdate.envVar, newValue);
return;
}
// For non-sensitive settings, we need to read the existing .env file,
// update the value, and write it back, preserving any other values.
const envFilePath = getEnvFilePath(extensionName, scope);
let envContent = '';
if (fsSync.existsSync(envFilePath)) {
envContent = await fs.readFile(envFilePath, 'utf-8');
}
const parsedEnv = dotenv.parse(envContent);
parsedEnv[settingToUpdate.envVar] = newValue;
// We only want to write back the variables that are not sensitive.
const nonSensitiveSettings: Record<string, string> = {};
const sensitiveEnvVars = new Set(
settings.filter((s) => s.sensitive).map((s) => s.envVar),
);
for (const [key, value] of Object.entries(parsedEnv)) {
if (!sensitiveEnvVars.has(key)) {
nonSensitiveSettings[key] = value;
}
}
const newEnvContent = formatEnvContent(nonSensitiveSettings);
await fs.writeFile(envFilePath, newEnvContent);
}
interface settingsChanges {
promptForSensitive: ExtensionSetting[];
removeSensitive: ExtensionSetting[];
promptForEnv: ExtensionSetting[];
removeEnv: ExtensionSetting[];
}
function getSettingsChanges(
settings: ExtensionSetting[],
oldSettings: ExtensionSetting[],
): settingsChanges {
const isSameSetting = (a: ExtensionSetting, b: ExtensionSetting) =>
a.envVar === b.envVar && (a.sensitive ?? false) === (b.sensitive ?? false);
const sensitiveOld = oldSettings.filter((s) => s.sensitive ?? false);
const sensitiveNew = settings.filter((s) => s.sensitive ?? false);
const envOld = oldSettings.filter((s) => !(s.sensitive ?? false));
const envNew = settings.filter((s) => !(s.sensitive ?? false));
return {
promptForSensitive: sensitiveNew.filter(
(s) => !sensitiveOld.some((old) => isSameSetting(s, old)),
),
removeSensitive: sensitiveOld.filter(
(s) => !sensitiveNew.some((neu) => isSameSetting(s, neu)),
),
promptForEnv: envNew.filter(
(s) => !envOld.some((old) => isSameSetting(s, old)),
),
removeEnv: envOld.filter(
(s) => !envNew.some((neu) => isSameSetting(s, neu)),
),
};
}
async function clearSettings(
envFilePath: string,
keychain: KeychainTokenStorage,
) {
if (fsSync.existsSync(envFilePath)) {
await fs.writeFile(envFilePath, '');
}
if (!(await keychain.isAvailable())) {
return;
}
const secrets = await keychain.listSecrets();
for (const secret of secrets) {
await keychain.deleteSecret(secret);
}
return;
}

View File

@@ -0,0 +1,179 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as path from 'node:path';
import {
convertGeminiToQwenConfig,
isGeminiExtensionConfig,
type GeminiExtensionConfig,
} from './gemini-converter.js';
// Mock fs module
vi.mock('node:fs', async (importOriginal) => {
const actual = await importOriginal<typeof import('node:fs')>();
return {
...actual,
existsSync: vi.fn(),
readFileSync: vi.fn(),
};
});
describe('convertGeminiToQwenConfig', () => {
beforeEach(() => {
vi.clearAllMocks();
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should convert basic Gemini config from directory', () => {
const mockDir = '/mock/extension/dir';
const geminiConfig: GeminiExtensionConfig = {
name: 'test-extension',
version: '1.0.0',
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(geminiConfig));
const result = convertGeminiToQwenConfig(mockDir);
expect(result.name).toBe('test-extension');
expect(result.version).toBe('1.0.0');
expect(fs.readFileSync).toHaveBeenCalledWith(
path.join(mockDir, 'gemini-extension.json'),
'utf-8',
);
});
it('should convert config with all optional fields', () => {
const mockDir = '/mock/extension/dir';
const geminiConfig = {
name: 'full-extension',
version: '2.0.0',
mcpServers: { server1: {} },
contextFileName: 'context.txt',
excludeTools: ['tool1', 'tool2'],
settings: [
{ name: 'Setting1', envVar: 'VAR1', description: 'Test setting' },
],
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(geminiConfig));
const result = convertGeminiToQwenConfig(mockDir);
expect(result.name).toBe('full-extension');
expect(result.version).toBe('2.0.0');
expect(result.mcpServers).toEqual({ server1: {} });
expect(result.contextFileName).toBe('context.txt');
expect(result.excludeTools).toEqual(['tool1', 'tool2']);
expect(result.settings).toHaveLength(1);
expect(result.settings?.[0].name).toBe('Setting1');
});
it('should throw error for missing name', () => {
const mockDir = '/mock/extension/dir';
const invalidConfig = {
version: '1.0.0',
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
expect(() => convertGeminiToQwenConfig(mockDir)).toThrow(
'Gemini extension config must have name and version fields',
);
});
it('should throw error for missing version', () => {
const mockDir = '/mock/extension/dir';
const invalidConfig = {
name: 'test-extension',
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
expect(() => convertGeminiToQwenConfig(mockDir)).toThrow(
'Gemini extension config must have name and version fields',
);
});
});
describe('isGeminiExtensionConfig', () => {
beforeEach(() => {
vi.clearAllMocks();
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should identify Gemini extension directory with valid config', () => {
const mockDir = '/mock/extension/dir';
const mockConfig = {
name: 'test',
version: '1.0.0',
settings: [{ name: 'Test', envVar: 'TEST', description: 'Test' }],
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(mockConfig));
expect(isGeminiExtensionConfig(mockDir)).toBe(true);
expect(fs.existsSync).toHaveBeenCalledWith(
path.join(mockDir, 'gemini-extension.json'),
);
});
it('should return false when gemini-extension.json does not exist', () => {
const mockDir = '/mock/nonexistent/dir';
vi.mocked(fs.existsSync).mockReturnValue(false);
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
});
it('should return false for invalid config content', () => {
const mockDir = '/mock/invalid/dir';
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue('null');
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
});
it('should return false for config missing required fields', () => {
const mockDir = '/mock/invalid/dir';
const invalidConfig = {
name: 'test',
// missing version
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
});
it('should return true for basic config without settings', () => {
const mockDir = '/mock/extension/dir';
const basicConfig = {
name: 'test',
version: '1.0.0',
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(basicConfig));
expect(isGeminiExtensionConfig(mockDir)).toBe(true);
});
});
// Note: convertGeminiExtensionPackage() is tested through integration tests
// as it requires real file system operations

View File

@@ -0,0 +1,217 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Converter for Gemini CLI extensions to Qwen Code format.
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import { glob } from 'glob';
import type { ExtensionConfig, ExtensionSetting } from './extensionManager.js';
import { ExtensionStorage } from './storage.js';
import { convertTomlToMarkdown } from '../utils/toml-to-markdown-converter.js';
export interface GeminiExtensionConfig {
name: string;
version: string;
mcpServers?: Record<string, unknown>;
contextFileName?: string | string[];
excludeTools?: string[];
settings?: ExtensionSetting[];
}
/**
* Converts a Gemini CLI extension config to Qwen Code format.
* @param extensionDir Path to the Gemini extension directory
* @returns Qwen ExtensionConfig
*/
export function convertGeminiToQwenConfig(
extensionDir: string,
): ExtensionConfig {
const configFilePath = path.join(extensionDir, 'gemini-extension.json');
const configContent = fs.readFileSync(configFilePath, 'utf-8');
const geminiConfig: GeminiExtensionConfig = JSON.parse(configContent);
// Validate required fields
if (!geminiConfig.name || !geminiConfig.version) {
throw new Error(
'Gemini extension config must have name and version fields',
);
}
const settings: ExtensionSetting[] | undefined = geminiConfig.settings;
// Direct field mapping
return {
name: geminiConfig.name,
version: geminiConfig.version,
mcpServers: geminiConfig.mcpServers as ExtensionConfig['mcpServers'],
contextFileName: geminiConfig.contextFileName,
excludeTools: geminiConfig.excludeTools,
settings,
};
}
/**
* Converts a complete Gemini extension package to Qwen Code format.
* Creates a new temporary directory with:
* 1. Converted qwen-extension.json
* 2. Commands converted from TOML to MD
* 3. All other files/folders preserved
*
* @param extensionDir Path to the Gemini extension directory
* @returns Object containing converted config and the temporary directory path
*/
export async function convertGeminiExtensionPackage(
extensionDir: string,
): Promise<{ config: ExtensionConfig; convertedDir: string }> {
const geminiConfig = convertGeminiToQwenConfig(extensionDir);
// Create temporary directory for converted extension
const tmpDir = await ExtensionStorage.createTmpDir();
try {
// Step 1: Copy all files and directories to temporary directory
await copyDirectory(extensionDir, tmpDir);
// Step 2: Convert TOML commands to Markdown in commands folder
const commandsDir = path.join(tmpDir, 'commands');
if (fs.existsSync(commandsDir)) {
await convertCommandsDirectory(commandsDir);
}
// Step 3: Create qwen-extension.json with converted config
const qwenConfigPath = path.join(tmpDir, 'qwen-extension.json');
fs.writeFileSync(
qwenConfigPath,
JSON.stringify(geminiConfig, null, 2),
'utf-8',
);
return {
config: geminiConfig,
convertedDir: tmpDir,
};
} catch (error) {
// Clean up temporary directory on error
try {
fs.rmSync(tmpDir, { recursive: true, force: true });
} catch {
// Ignore cleanup errors
}
throw error;
}
}
/**
* Recursively copies a directory and its contents.
* @param source Source directory path
* @param destination Destination directory path
*/
async function copyDirectory(
source: string,
destination: string,
): Promise<void> {
// Create destination directory if it doesn't exist
if (!fs.existsSync(destination)) {
fs.mkdirSync(destination, { recursive: true });
}
const entries = fs.readdirSync(source, { withFileTypes: true });
for (const entry of entries) {
const sourcePath = path.join(source, entry.name);
const destPath = path.join(destination, entry.name);
if (entry.isDirectory()) {
await copyDirectory(sourcePath, destPath);
} else {
fs.copyFileSync(sourcePath, destPath);
}
}
}
/**
* Converts all TOML command files in a directory to Markdown format.
* @param commandsDir Path to the commands directory
*/
async function convertCommandsDirectory(commandsDir: string): Promise<void> {
// Find all .toml files in the commands directory
const tomlFiles = await glob('**/*.toml', {
cwd: commandsDir,
nodir: true,
dot: false,
});
// Convert each TOML file to Markdown
for (const relativeFile of tomlFiles) {
const tomlPath = path.join(commandsDir, relativeFile);
try {
// Read TOML file
const tomlContent = fs.readFileSync(tomlPath, 'utf-8');
// Convert to Markdown
const markdownContent = convertTomlToMarkdown(tomlContent);
// Generate Markdown file path (same location, .md extension)
const markdownPath = tomlPath.replace(/\.toml$/, '.md');
// Write Markdown file
fs.writeFileSync(markdownPath, markdownContent, 'utf-8');
// Delete original TOML file
fs.unlinkSync(tomlPath);
} catch (error) {
console.warn(
`Warning: Failed to convert command file ${relativeFile}: ${error instanceof Error ? error.message : String(error)}`,
);
// Continue with other files even if one fails
}
}
}
/**
* Checks if a config object is in Gemini format.
* This is a heuristic check based on typical Gemini extension patterns.
* @param config Configuration object to check
* @returns true if config appears to be Gemini format
*/
export function isGeminiExtensionConfig(extensionDir: string) {
const configFilePath = path.join(extensionDir, 'gemini-extension.json');
if (!fs.existsSync(configFilePath)) {
return false;
}
const configContent = fs.readFileSync(configFilePath, 'utf-8');
const parsedConfig = JSON.parse(configContent);
if (typeof parsedConfig !== 'object' || parsedConfig === null) {
return false;
}
const obj = parsedConfig as Record<string, unknown>;
// Must have name and version
if (typeof obj['name'] !== 'string' || typeof obj['version'] !== 'string') {
return false;
}
// Check for Gemini-specific settings format
if (obj['settings'] && Array.isArray(obj['settings'])) {
const firstSetting = obj['settings'][0];
if (
firstSetting &&
typeof firstSetting === 'object' &&
'envVar' in firstSetting
) {
return true;
}
}
// If it has Gemini-specific fields but not Qwen-specific fields, likely Gemini
return true;
}

View File

@@ -13,14 +13,17 @@ import {
parseGitHubRepoForReleases,
} from './github.js';
import { simpleGit, type SimpleGit } from 'simple-git';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import * as os from 'node:os';
import * as fs from 'node:fs/promises';
import * as fsSync from 'node:fs';
import * as path from 'node:path';
import * as tar from 'tar';
import * as archiver from 'archiver';
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import {
ExtensionUpdateState,
type Extension,
type ExtensionManager,
} from './extensionManager.js';
const mockPlatform = vi.hoisted(() => vi.fn());
const mockArch = vi.hoisted(() => vi.fn());
@@ -123,119 +126,170 @@ describe('git extension helpers', () => {
revparse: vi.fn(),
};
const mockExtensionManager = {
loadExtensionConfig: vi.fn(),
} as unknown as ExtensionManager;
beforeEach(() => {
vi.mocked(simpleGit).mockReturnValue(mockGit as unknown as SimpleGit);
});
it('should return NOT_UPDATABLE for non-git extensions', async () => {
const extension: GeminiCLIExtension = {
function createExtension(overrides: Partial<Extension> = {}): Extension {
return {
id: 'test-id',
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
config: { name: 'test', version: '1.0.0' },
contextFiles: [],
...overrides,
};
}
it('should return NOT_UPDATABLE for non-git extensions', async () => {
const extension = createExtension({
installMetadata: {
type: 'link',
source: '',
},
};
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
});
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.NOT_UPDATABLE);
});
it('should return ERROR if no remotes found', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: '',
},
};
});
mockGit.getRemotes.mockResolvedValue([]);
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.ERROR);
});
it('should return UPDATE_AVAILABLE when remote hash is different', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
});
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
]);
mockGit.listRemote.mockResolvedValue('remote-hash\tHEAD');
mockGit.revparse.mockResolvedValue('local-hash');
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.UPDATE_AVAILABLE);
});
it('should return UP_TO_DATE when remote and local hashes are the same', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
});
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
]);
mockGit.listRemote.mockResolvedValue('same-hash\tHEAD');
mockGit.revparse.mockResolvedValue('same-hash');
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.UP_TO_DATE);
});
it('should return ERROR on git error', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
});
mockGit.getRemotes.mockRejectedValue(new Error('git error'));
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.ERROR);
});
it('should return UPDATE_AVAILABLE for local extension with different version', async () => {
const extension = createExtension({
version: '1.0.0',
installMetadata: {
type: 'local',
source: '/path/to/source',
},
});
const mockManager = {
loadExtensionConfig: vi.fn().mockReturnValue({
name: 'test',
version: '2.0.0',
}),
} as unknown as ExtensionManager;
const result = await checkForExtensionUpdate(extension, mockManager);
expect(result).toBe(ExtensionUpdateState.UPDATE_AVAILABLE);
});
it('should return UP_TO_DATE for local extension with same version', async () => {
const extension = createExtension({
version: '1.0.0',
installMetadata: {
type: 'local',
source: '/path/to/source',
},
});
const mockManager = {
loadExtensionConfig: vi.fn().mockReturnValue({
name: 'test',
version: '1.0.0',
}),
} as unknown as ExtensionManager;
const result = await checkForExtensionUpdate(extension, mockManager);
expect(result).toBe(ExtensionUpdateState.UP_TO_DATE);
});
it('should return NOT_UPDATABLE for local extension when source cannot be loaded', async () => {
const extension = createExtension({
version: '1.0.0',
installMetadata: {
type: 'local',
source: '/path/to/source',
},
});
const mockManager = {
loadExtensionConfig: vi.fn().mockImplementation(() => {
throw new Error('Cannot load config');
}),
} as unknown as ExtensionManager;
const result = await checkForExtensionUpdate(extension, mockManager);
expect(result).toBe(ExtensionUpdateState.NOT_UPDATABLE);
});
});
describe('findReleaseAsset', () => {

View File

@@ -5,19 +5,38 @@
*/
import { simpleGit } from 'simple-git';
import { getErrorMessage } from '../../utils/errors.js';
import type {
ExtensionInstallMetadata,
GeminiCLIExtension,
} from '@qwen-code/qwen-code-core';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import { getErrorMessage } from '../utils/errors.js';
import * as os from 'node:os';
import * as https from 'node:https';
import * as fs from 'node:fs';
import * as path from 'node:path';
import { EXTENSIONS_CONFIG_FILENAME, loadExtension } from '../extension.js';
import { EXTENSIONS_CONFIG_FILENAME } from './variables.js';
import * as tar from 'tar';
import extract from 'extract-zip';
import {
ExtensionUpdateState,
type Extension,
type ExtensionConfig,
type ExtensionManager,
} from './extensionManager.js';
import type { ExtensionInstallMetadata } from '../config/config.js';
interface GithubReleaseData {
assets: Asset[];
tag_name: string;
tarball_url?: string;
zipball_url?: string;
}
interface Asset {
name: string;
browser_download_url: string;
}
export interface GitHubDownloadResult {
tagName: string;
type: 'git' | 'github-release';
}
function getGitHubToken(): string | undefined {
return process.env['GITHUB_TOKEN'];
@@ -115,38 +134,40 @@ async function fetchReleaseFromGithub(
}
export async function checkForExtensionUpdate(
extension: GeminiCLIExtension,
setExtensionUpdateState: (updateState: ExtensionUpdateState) => void,
cwd: string = process.cwd(),
): Promise<void> {
setExtensionUpdateState(ExtensionUpdateState.CHECKING_FOR_UPDATES);
extension: Extension,
extensionManager: ExtensionManager,
): Promise<ExtensionUpdateState> {
const installMetadata = extension.installMetadata;
if (installMetadata?.type === 'local') {
const newExtension = loadExtension({
extensionDir: installMetadata.source,
workspaceDir: cwd,
});
if (!newExtension) {
let latestConfig: ExtensionConfig | undefined;
try {
latestConfig = extensionManager.loadExtensionConfig({
extensionDir: installMetadata.source,
});
} catch (e) {
console.error(
`Failed to check for update for local extension "${extension.name}". Could not load extension from source path: ${installMetadata.source}. Error: ${getErrorMessage(e)}`,
);
return ExtensionUpdateState.NOT_UPDATABLE;
}
if (!latestConfig) {
console.error(
`Failed to check for update for local extension "${extension.name}". Could not load extension from source path: ${installMetadata.source}`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.NOT_UPDATABLE;
}
if (newExtension.config.version !== extension.version) {
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
if (latestConfig.version !== extension.version) {
return ExtensionUpdateState.UPDATE_AVAILABLE;
}
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
return ExtensionUpdateState.UP_TO_DATE;
}
if (
!installMetadata ||
(installMetadata.type !== 'git' &&
installMetadata.type !== 'github-release')
) {
setExtensionUpdateState(ExtensionUpdateState.NOT_UPDATABLE);
return;
return ExtensionUpdateState.NOT_UPDATABLE;
}
try {
if (installMetadata.type === 'git') {
@@ -154,14 +175,12 @@ export async function checkForExtensionUpdate(
const remotes = await git.getRemotes(true);
if (remotes.length === 0) {
console.error('No git remotes found.');
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
const remoteUrl = remotes[0].refs.fetch;
if (!remoteUrl) {
console.error(`No fetch URL found for git remote ${remotes[0].name}.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
// Determine the ref to check on the remote.
@@ -171,8 +190,7 @@ export async function checkForExtensionUpdate(
if (typeof lsRemoteOutput !== 'string' || lsRemoteOutput.trim() === '') {
console.error(`Git ref ${refToCheck} not found.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
const remoteHash = lsRemoteOutput.split('\t')[0];
@@ -182,21 +200,17 @@ export async function checkForExtensionUpdate(
console.error(
`Unable to parse hash from git ls-remote output "${lsRemoteOutput}"`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
if (remoteHash === localHash) {
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
return ExtensionUpdateState.UP_TO_DATE;
}
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
return ExtensionUpdateState.UPDATE_AVAILABLE;
} else {
const { source, releaseTag } = installMetadata;
if (!source) {
console.error(`No "source" provided for extension.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
const { owner, repo } = parseGitHubRepoForReleases(source);
@@ -206,30 +220,28 @@ export async function checkForExtensionUpdate(
installMetadata.ref,
);
if (releaseData.tag_name !== releaseTag) {
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
return ExtensionUpdateState.UPDATE_AVAILABLE;
}
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
return ExtensionUpdateState.UP_TO_DATE;
}
} catch (error) {
console.error(
`Failed to check for updates for extension "${installMetadata.source}": ${getErrorMessage(error)}`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
}
export interface GitHubDownloadResult {
tagName: string;
type: 'git' | 'github-release';
}
export async function downloadFromGitHubRelease(
installMetadata: ExtensionInstallMetadata,
destination: string,
): Promise<GitHubDownloadResult> {
const { source, ref } = installMetadata;
const { owner, repo } = parseGitHubRepoForReleases(source);
const { source, ref, marketplace, type } = installMetadata;
const { owner, repo } = parseGitHubRepoForReleases(
type === 'marketplace' && marketplace
? marketplace.marketplaceSource
: source,
);
try {
const releaseData = await fetchReleaseFromGithub(owner, repo, ref);
@@ -276,28 +288,32 @@ export async function downloadFromGitHubRelease(
// For regular github releases, the repository is put inside of a top level
// directory. In this case we should see exactly two file in the destination
// dir, the archive and the directory. If we see that, validate that the
// dir has a qwen extension configuration file and then move all files
// from the directory up one level into the destination directory.
// dir has a qwen extension configuration file (or gemini-extension.json
// which will be converted later) and then move all files from the directory
// up one level into the destination directory.
const entries = await fs.promises.readdir(destination, {
withFileTypes: true,
});
if (entries.length === 2) {
const lonelyDir = entries.find((entry) => entry.isDirectory());
if (
lonelyDir &&
fs.existsSync(
if (lonelyDir) {
const hasQwenConfig = fs.existsSync(
path.join(destination, lonelyDir.name, EXTENSIONS_CONFIG_FILENAME),
)
) {
const dirPathToExtract = path.join(destination, lonelyDir.name);
const extractedDirFiles = await fs.promises.readdir(dirPathToExtract);
for (const file of extractedDirFiles) {
await fs.promises.rename(
path.join(dirPathToExtract, file),
path.join(destination, file),
);
);
const hasGeminiConfig = fs.existsSync(
path.join(destination, lonelyDir.name, 'gemini-extension.json'),
);
if (hasQwenConfig || hasGeminiConfig) {
const dirPathToExtract = path.join(destination, lonelyDir.name);
const extractedDirFiles = await fs.promises.readdir(dirPathToExtract);
for (const file of extractedDirFiles) {
await fs.promises.rename(
path.join(dirPathToExtract, file),
path.join(destination, file),
);
}
await fs.promises.rmdir(dirPathToExtract);
}
await fs.promises.rmdir(dirPathToExtract);
}
}
@@ -313,18 +329,6 @@ export async function downloadFromGitHubRelease(
}
}
interface GithubReleaseData {
assets: Asset[];
tag_name: string;
tarball_url?: string;
zipball_url?: string;
}
interface Asset {
name: string;
browser_download_url: string;
}
export function findReleaseAsset(assets: Asset[]): Asset | undefined {
const platform = os.platform();
const arch = os.arch();

View File

@@ -0,0 +1,3 @@
export * from './extensionManager.js';
export * from './variables.js';
export * from './github.js';

View File

@@ -0,0 +1,78 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import { parseMarketplaceSource } from './marketplace.js';
describe('Marketplace Installation', () => {
describe('parseMarketplaceSource', () => {
it('should parse valid marketplace source with http URL', () => {
const result = parseMarketplaceSource(
'http://example.com/marketplace:my-plugin',
);
expect(result).toEqual({
marketplaceSource: 'http://example.com/marketplace',
pluginName: 'my-plugin',
});
});
it('should parse valid marketplace source with https URL', () => {
const result = parseMarketplaceSource(
'https://github.com/example/marketplace:awesome-plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://github.com/example/marketplace',
pluginName: 'awesome-plugin',
});
});
it('should handle plugin names with hyphens', () => {
const result = parseMarketplaceSource(
'https://example.com:my-super-plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://example.com',
pluginName: 'my-super-plugin',
});
});
it('should handle URLs with ports', () => {
const result = parseMarketplaceSource(
'https://example.com:8080/marketplace:plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://example.com:8080/marketplace',
pluginName: 'plugin',
});
});
it('should return null for source without colon separator', () => {
const result = parseMarketplaceSource('https://example.com/plugin');
expect(result).toBeNull();
});
it('should return null for source without URL', () => {
const result = parseMarketplaceSource('not-a-url:plugin');
expect(result).toBeNull();
});
it('should return null for source with empty plugin name', () => {
const result = parseMarketplaceSource('https://example.com:');
expect(result).toBeNull();
});
it('should use last colon as separator', () => {
// URLs with ports have colons, should use the last one
const result = parseMarketplaceSource(
'https://example.com:8080:my-plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://example.com:8080',
pluginName: 'my-plugin',
});
});
});
});

View File

@@ -0,0 +1,259 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* This module handles installation of extensions from Claude marketplaces.
*
* A marketplace URL format: marketplace-url:plugin-name
* Example: https://github.com/example/marketplace:my-plugin
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import type { ExtensionConfig } from './extensionManager.js';
import {
convertClaudeToQwenConfig,
mergeClaudeConfigs,
type ClaudeMarketplaceConfig,
type ClaudeMarketplacePluginConfig,
type ClaudePluginConfig,
} from './claude-converter.js';
import { cloneFromGit, downloadFromGitHubRelease } from './github.js';
import type { ExtensionInstallMetadata } from '../config/config.js';
export interface MarketplaceInstallOptions {
marketplaceUrl: string;
pluginName: string;
tempDir: string;
requestConsent: (consent: string) => Promise<boolean>;
}
export interface MarketplaceInstallResult {
config: ExtensionConfig;
sourcePath: string;
installMetadata: ExtensionInstallMetadata;
}
/**
* Parse marketplace install source string.
* Format: marketplace-url:plugin-name
*/
export function parseMarketplaceSource(source: string): {
marketplaceSource: string;
pluginName: string;
} | null {
// Check if source contains a colon separator
const lastColonIndex = source.lastIndexOf(':');
if (lastColonIndex === -1) {
return null;
}
// Split at the last colon to separate URL from plugin name
const marketplaceSource = source.substring(0, lastColonIndex);
const pluginName = source.substring(lastColonIndex + 1);
// Validate that marketplace URL looks like a URL
if (
!marketplaceSource.startsWith('http://') &&
!marketplaceSource.startsWith('https://')
) {
return null;
}
if (!pluginName || pluginName.length === 0) {
return null;
}
return { marketplaceSource, pluginName };
}
/**
* Install an extension from a Claude marketplace.
*
* Process:
* 1. Download marketplace repository
* 2. Parse marketplace.json
* 3. Find the specified plugin
* 4. Download/copy plugin source
* 5. Merge configurations (if strict mode)
* 6. Convert to Qwen format
*/
export async function installFromMarketplace(
options: MarketplaceInstallOptions,
): Promise<MarketplaceInstallResult> {
const {
marketplaceUrl,
pluginName,
tempDir,
requestConsent: _requestConsent,
} = options;
// Step 1: Download marketplace repository
const marketplaceDir = path.join(tempDir, 'marketplace');
await fs.promises.mkdir(marketplaceDir, { recursive: true });
console.log(`Downloading marketplace from ${marketplaceUrl}...`);
const installMetadata: ExtensionInstallMetadata = {
source: marketplaceUrl,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, marketplaceDir);
} catch {
await cloneFromGit(installMetadata, marketplaceDir);
}
// Step 2: Parse marketplace.json
const marketplaceConfigPath = path.join(marketplaceDir, 'marketplace.json');
if (!fs.existsSync(marketplaceConfigPath)) {
throw new Error(
`Marketplace configuration not found at ${marketplaceConfigPath}`,
);
}
const marketplaceConfigContent = await fs.promises.readFile(
marketplaceConfigPath,
'utf-8',
);
const marketplaceConfig: ClaudeMarketplaceConfig = JSON.parse(
marketplaceConfigContent,
);
// Step 3: Find the plugin
const pluginConfig = marketplaceConfig.plugins.find(
(p) => p.name.toLowerCase() === pluginName.toLowerCase(),
);
if (!pluginConfig) {
throw new Error(
`Plugin "${pluginName}" not found in marketplace. Available plugins: ${marketplaceConfig.plugins.map((p) => p.name).join(', ')}`,
);
}
// Step 4: Download/copy plugin source
const pluginDir = path.join(tempDir, 'plugin');
await fs.promises.mkdir(pluginDir, { recursive: true });
const pluginSource = await resolvePluginSource(
pluginConfig,
marketplaceDir,
pluginDir,
);
// Step 5: Merge configurations (if strict mode)
let finalPluginConfig: ClaudePluginConfig;
const strict = pluginConfig.strict ?? true;
if (strict) {
// Read plugin.json from plugin source
const pluginJsonPath = path.join(
pluginSource,
'.claude-plugin',
'plugin.json',
);
if (!fs.existsSync(pluginJsonPath)) {
throw new Error(
`Strict mode requires plugin.json at ${pluginJsonPath}, but file not found`,
);
}
const pluginJsonContent = await fs.promises.readFile(
pluginJsonPath,
'utf-8',
);
const basePluginConfig: ClaudePluginConfig = JSON.parse(pluginJsonContent);
// Merge marketplace config with plugin config
finalPluginConfig = mergeClaudeConfigs(pluginConfig, basePluginConfig);
} else {
// Use marketplace config directly
finalPluginConfig = pluginConfig;
}
// Step 6: Convert to Qwen format
const qwenConfig = convertClaudeToQwenConfig(finalPluginConfig);
return {
config: qwenConfig,
sourcePath: pluginSource,
installMetadata: {
source: `${marketplaceUrl}:${pluginName}`,
type: 'git', // Marketplace installs are treated as git installs
},
};
}
/**
* Resolve plugin source from marketplace plugin configuration.
* Returns the absolute path to the plugin source directory.
*/
async function resolvePluginSource(
pluginConfig: ClaudeMarketplacePluginConfig,
marketplaceDir: string,
pluginDir: string,
): Promise<string> {
const source = pluginConfig.source;
// Handle string source (relative path or URL)
if (typeof source === 'string') {
// Check if it's a URL
if (source.startsWith('http://') || source.startsWith('https://')) {
// Download from URL
const installMetadata: ExtensionInstallMetadata = {
source,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, pluginDir);
} catch {
await cloneFromGit(installMetadata, pluginDir);
}
return pluginDir;
}
// Relative path within marketplace
const pluginRoot = marketplaceDir;
const sourcePath = path.join(pluginRoot, source);
if (!fs.existsSync(sourcePath)) {
throw new Error(`Plugin source not found at ${sourcePath}`);
}
// Copy to plugin directory
await fs.promises.cp(sourcePath, pluginDir, { recursive: true });
return pluginDir;
}
// Handle object source (github or url)
if (source.source === 'github') {
const installMetadata: ExtensionInstallMetadata = {
source: `https://github.com/${source.repo}`,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, pluginDir);
} catch {
await cloneFromGit(installMetadata, pluginDir);
}
return pluginDir;
}
if (source.source === 'url') {
const installMetadata: ExtensionInstallMetadata = {
source: source.url,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, pluginDir);
} catch {
await cloneFromGit(installMetadata, pluginDir);
}
return pluginDir;
}
throw new Error(`Unsupported plugin source type: ${JSON.stringify(source)}`);
}

View File

@@ -0,0 +1,110 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, expect, it } from 'vitest';
import { Override } from './override.js';
describe('Override', () => {
it('should create an override from input', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should create a disable override from input', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(true);
expect(override.includeSubdirs).toBe(false);
});
it('should create an override from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir');
expect(override.baseRule).toBe('/path/to/dir');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(false);
});
it('should create a disable override from a file rule', () => {
const override = Override.fromFileRule('!/path/to/dir/');
expect(override.isDisable).toBe(true);
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.includeSubdirs).toBe(false);
});
it('should create an override with subdirs from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir/*');
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should correctly identify conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', false);
expect(override1.conflictsWith(override2)).toBe(true);
});
it('should correctly identify non-conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/another/dir', true);
expect(override1.conflictsWith(override2)).toBe(false);
});
it('should correctly identify equal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(true);
});
it('should correctly identify unequal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('!/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(false);
});
it('should generate the correct regex', () => {
const override = Override.fromInput('/path/to/dir', true);
const regex = override.asRegex();
expect(regex.test('/path/to/dir/')).toBe(true);
expect(regex.test('/path/to/dir/subdir')).toBe(true);
expect(regex.test('/path/to/another/dir')).toBe(false);
});
it('should correctly identify child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify child overrides with glob', () => {
const parent = Override.fromInput('/path/to/dir/*', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify non-child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const other = Override.fromInput('/path/to/another/dir', false);
expect(other.isChildOf(parent)).toBe(false);
});
it('should generate the correct output string', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.output()).toBe(`/path/to/dir/*`);
});
it('should generate the correct output string for a disable override', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
it('should disable a path based on a disable override rule', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
});

View File

@@ -0,0 +1,102 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
export interface ExtensionEnablementConfig {
overrides: string[];
}
export interface AllExtensionsEnablementConfig {
[extensionName: string]: ExtensionEnablementConfig;
}
export class Override {
constructor(
public baseRule: string,
public isDisable: boolean,
public includeSubdirs: boolean,
) {}
static fromInput(inputRule: string, includeSubdirs: boolean): Override {
const isDisable = inputRule.startsWith('!');
let baseRule = isDisable ? inputRule.substring(1) : inputRule;
baseRule = ensureLeadingAndTrailingSlash(baseRule);
return new Override(baseRule, isDisable, includeSubdirs);
}
static fromFileRule(fileRule: string): Override {
const isDisable = fileRule.startsWith('!');
let baseRule = isDisable ? fileRule.substring(1) : fileRule;
const includeSubdirs = baseRule.endsWith('*');
baseRule = includeSubdirs
? baseRule.substring(0, baseRule.length - 1)
: baseRule;
return new Override(baseRule, isDisable, includeSubdirs);
}
conflictsWith(other: Override): boolean {
if (this.baseRule === other.baseRule) {
return (
this.includeSubdirs !== other.includeSubdirs ||
this.isDisable !== other.isDisable
);
}
return false;
}
isEqualTo(other: Override): boolean {
return (
this.baseRule === other.baseRule &&
this.includeSubdirs === other.includeSubdirs &&
this.isDisable === other.isDisable
);
}
asRegex(): RegExp {
return globToRegex(`${this.baseRule}${this.includeSubdirs ? '*' : ''}`);
}
isChildOf(parent: Override) {
if (!parent.includeSubdirs) {
return false;
}
return parent.asRegex().test(this.baseRule);
}
output(): string {
return `${this.isDisable ? '!' : ''}${this.baseRule}${this.includeSubdirs ? '*' : ''}`;
}
matchesPath(path: string) {
return this.asRegex().test(path);
}
}
const ensureLeadingAndTrailingSlash = function (dirPath: string): string {
// Normalize separators to forward slashes for consistent matching across platforms.
let result = dirPath.replace(/\\/g, '/');
if (result.charAt(0) !== '/') {
result = '/' + result;
}
if (result.charAt(result.length - 1) !== '/') {
result = result + '/';
}
return result;
};
/**
* Converts a glob pattern to a RegExp object.
* This is a simplified implementation that supports `*`.
*
* @param glob The glob pattern to convert.
* @returns A RegExp object.
*/
function globToRegex(glob: string): RegExp {
const regexString = glob
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex characters
.replace(/(\/?)\*/g, '($1.*)?'); // Convert * to optional group
return new RegExp(`^${regexString}$`);
}

View File

@@ -0,0 +1,138 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import { parseEnvFile, generateEnvFile, validateSettings } from './settings.js';
import type { ExtensionSetting } from './extensionManager.js';
describe('Extension Settings', () => {
describe('parseEnvFile', () => {
it('should parse simple KEY=VALUE pairs', () => {
const content = 'API_KEY=abc123\nSERVER_URL=http://example.com';
const result = parseEnvFile(content);
expect(result).toEqual({
API_KEY: 'abc123',
SERVER_URL: 'http://example.com',
});
});
it('should skip empty lines and comments', () => {
const content = `
# This is a comment
API_KEY=secret
# Another comment
DEBUG=true
`;
const result = parseEnvFile(content);
expect(result).toEqual({
API_KEY: 'secret',
DEBUG: 'true',
});
});
it('should handle quoted values', () => {
const content = `API_KEY="my secret key"\nPATH='/usr/local/bin'`;
const result = parseEnvFile(content);
expect(result).toEqual({
API_KEY: 'my secret key',
PATH: '/usr/local/bin',
});
});
it('should ignore invalid lines', () => {
const content = 'VALID=value\nINVALID LINE\nANOTHER=valid';
const result = parseEnvFile(content);
expect(result).toEqual({
VALID: 'value',
ANOTHER: 'valid',
});
});
});
describe('generateEnvFile', () => {
it('should generate properly formatted .env content', () => {
const settings = {
API_KEY: 'secret123',
DEBUG: 'true',
};
const result = generateEnvFile(settings);
expect(result).toContain('API_KEY=secret123');
expect(result).toContain('DEBUG=true');
expect(result).toContain('# Extension Settings');
});
it('should quote values with spaces', () => {
const settings = {
MESSAGE: 'Hello World',
PATH: '/usr/local/bin',
};
const result = generateEnvFile(settings);
expect(result).toContain('MESSAGE="Hello World"');
expect(result).toContain('PATH=/usr/local/bin');
});
});
describe('validateSettings', () => {
it('should pass validation for valid string settings', () => {
const settingsConfig: ExtensionSetting[] = [
{
name: 'API Key',
description: 'Your API key for the service',
envVar: 'API_KEY',
},
];
const settings = { API_KEY: 'my-key' };
const errors = validateSettings(settings, settingsConfig);
expect(errors).toHaveLength(0);
});
it('should fail validation for non-string values', () => {
const settingsConfig: ExtensionSetting[] = [
{
name: 'API Key',
description: 'Your API key for the service',
envVar: 'API_KEY',
},
];
// In TypeScript, this would be caught at compile time,
// but at runtime we check the type
const settings = { API_KEY: 123 as unknown as string };
const errors = validateSettings(settings, settingsConfig);
expect(errors).toHaveLength(1);
expect(errors[0]).toContain('API Key');
expect(errors[0]).toContain('string');
});
it('should allow undefined/missing settings (all settings are optional)', () => {
const settingsConfig: ExtensionSetting[] = [
{
name: 'Optional Setting',
description: 'An optional setting',
envVar: 'OPTIONAL_VAR',
},
];
const settings = {};
const errors = validateSettings(settings, settingsConfig);
expect(errors).toHaveLength(0);
});
it('should validate sensitive settings the same way', () => {
const settingsConfig: ExtensionSetting[] = [
{
name: 'Secret Key',
description: 'Your secret key',
envVar: 'SECRET_KEY',
sensitive: true,
},
];
const validSettings = { SECRET_KEY: 'super-secret' };
expect(validateSettings(validSettings, settingsConfig)).toHaveLength(0);
});
});
});

View File

@@ -0,0 +1,149 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* This module handles extension settings management.
* Settings are stored in .env files within extension directories.
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import type { ExtensionSetting } from './extensionManager.js';
/**
* Parse .env file content into key-value pairs.
* Simple parser that handles:
* - KEY=VALUE format
* - Comments starting with #
* - Empty lines
*/
export function parseEnvFile(content: string): Record<string, string> {
const result: Record<string, string> = {};
const lines = content.split('\n');
for (const line of lines) {
const trimmed = line.trim();
// Skip empty lines and comments
if (!trimmed || trimmed.startsWith('#')) {
continue;
}
// Parse KEY=VALUE
const equalIndex = trimmed.indexOf('=');
if (equalIndex === -1) {
continue; // Invalid line, skip
}
const key = trimmed.substring(0, equalIndex).trim();
const value = trimmed.substring(equalIndex + 1).trim();
// Remove quotes if present
let cleanValue = value;
if (
(value.startsWith('"') && value.endsWith('"')) ||
(value.startsWith("'") && value.endsWith("'"))
) {
cleanValue = value.substring(1, value.length - 1);
}
result[key] = cleanValue;
}
return result;
}
/**
* Generate .env file content from key-value pairs.
*/
export function generateEnvFile(settings: Record<string, string>): string {
const lines: string[] = [];
lines.push('# Extension Settings');
lines.push('# Generated by Qwen Code');
lines.push('');
for (const [key, value] of Object.entries(settings)) {
// Quote values that contain spaces
const quotedValue = value.includes(' ') ? `"${value}"` : value;
lines.push(`${key}=${quotedValue}`);
}
return lines.join('\n') + '\n';
}
/**
* Load settings from extension .env file.
*/
export async function loadExtensionSettings(
extensionPath: string,
): Promise<Record<string, string>> {
const envPath = path.join(extensionPath, '.env');
try {
const content = await fs.promises.readFile(envPath, 'utf-8');
return parseEnvFile(content);
} catch (error) {
// If .env file doesn't exist, return empty object
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
return {};
}
throw error;
}
}
/**
* Save settings to extension .env file.
*/
export async function saveExtensionSettings(
extensionPath: string,
settings: Record<string, string>,
): Promise<void> {
const envPath = path.join(extensionPath, '.env');
const content = generateEnvFile(settings);
await fs.promises.writeFile(envPath, content, 'utf-8');
}
/**
* Validate settings against configuration.
* Returns array of validation errors (empty if valid).
*
* Note: This validates that environment variables are properly set.
* In Gemini Extension format, all settings are treated as strings.
*/
export function validateSettings(
settings: Record<string, string>,
settingsConfig: ExtensionSetting[],
): string[] {
const errors: string[] = [];
for (const config of settingsConfig) {
const value = settings[config.envVar];
// Basic validation - check if value exists and is not empty
// Note: All settings are optional in Gemini Extension format
if (value !== undefined && typeof value !== 'string') {
errors.push(
`Setting "${config.name}" (${config.envVar}) must be a string`,
);
}
}
return errors;
}
/**
* Merge extension settings into process environment.
* This allows MCP servers and other extension components to access settings.
*/
export function applySettingsToEnv(settings: Record<string, string>): void {
for (const [key, value] of Object.entries(settings)) {
// Only set if not already defined in process.env
if (process.env[key] === undefined) {
process.env[key] = value;
}
}
}

View File

@@ -0,0 +1,101 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { ExtensionStorage } from './storage.js';
import * as os from 'node:os';
import * as path from 'node:path';
import * as fs from 'node:fs';
import {
EXTENSION_SETTINGS_FILENAME,
EXTENSIONS_CONFIG_FILENAME,
} from './variables.js';
import { Storage } from '../config/storage.js';
vi.mock('node:os');
vi.mock('node:fs', async (importOriginal) => {
const actual = await importOriginal<typeof fs>();
return {
...actual,
promises: {
...actual.promises,
mkdtemp: vi.fn(),
},
};
});
vi.mock('@google/gemini-cli-core');
describe('ExtensionStorage', () => {
const mockHomeDir = '/mock/home';
const extensionName = 'test-extension';
let storage: ExtensionStorage;
beforeEach(() => {
vi.mocked(os.homedir).mockReturnValue(mockHomeDir);
vi.mocked(Storage).mockImplementation(
() =>
({
getExtensionsDir: () =>
path.join(mockHomeDir, '.gemini', 'extensions'),
}) as any, // eslint-disable-line @typescript-eslint/no-explicit-any
);
storage = new ExtensionStorage(extensionName);
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should return the correct extension directory', () => {
const expectedDir = path.join(
mockHomeDir,
'.gemini',
'extensions',
extensionName,
);
expect(storage.getExtensionDir()).toBe(expectedDir);
});
it('should return the correct config path', () => {
const expectedPath = path.join(
mockHomeDir,
'.gemini',
'extensions',
extensionName,
EXTENSIONS_CONFIG_FILENAME, // EXTENSIONS_CONFIG_FILENAME
);
expect(storage.getConfigPath()).toBe(expectedPath);
});
it('should return the correct env file path', () => {
const expectedPath = path.join(
mockHomeDir,
'.gemini',
'extensions',
extensionName,
EXTENSION_SETTINGS_FILENAME, // EXTENSION_SETTINGS_FILENAME
);
expect(storage.getEnvFilePath()).toBe(expectedPath);
});
it('should return the correct user extensions directory', () => {
const expectedDir = path.join(mockHomeDir, '.gemini', 'extensions');
expect(ExtensionStorage.getUserExtensionsDir()).toBe(expectedDir);
});
it('should create a temporary directory', async () => {
const mockTmpDir = '/tmp/gemini-extension-123';
vi.mocked(fs.promises.mkdtemp).mockResolvedValue(mockTmpDir);
vi.mocked(os.tmpdir).mockReturnValue('/tmp');
const result = await ExtensionStorage.createTmpDir();
expect(fs.promises.mkdtemp).toHaveBeenCalledWith(
path.join('/tmp', 'gemini-extension'),
);
expect(result).toBe(mockTmpDir);
});
});

View File

@@ -0,0 +1,40 @@
import { Storage } from '../config/storage.js';
import path from 'node:path';
import * as os from 'node:os';
import {
EXTENSION_SETTINGS_FILENAME,
EXTENSIONS_CONFIG_FILENAME,
} from './variables.js';
import * as fs from 'node:fs';
export class ExtensionStorage {
private readonly extensionName: string;
constructor(extensionName: string) {
this.extensionName = extensionName;
}
getExtensionDir(): string {
return path.join(
ExtensionStorage.getUserExtensionsDir(),
this.extensionName,
);
}
getConfigPath(): string {
return path.join(this.getExtensionDir(), EXTENSIONS_CONFIG_FILENAME);
}
getEnvFilePath(): string {
return path.join(this.getExtensionDir(), EXTENSION_SETTINGS_FILENAME);
}
static getUserExtensionsDir(): string {
const storage = new Storage(os.homedir());
return storage.getExtensionsDir();
}
static async createTmpDir(): Promise<string> {
return await fs.promises.mkdtemp(path.join(os.tmpdir(), 'qwen-extension'));
}
}

View File

@@ -17,7 +17,7 @@ export interface VariableSchema {
export interface LoadExtensionContext {
extensionDir: string;
workspaceDir: string;
workspaceDir?: string;
}
const PATH_SEPARATOR_DEFINITION = {
@@ -30,6 +30,10 @@ export const VARIABLE_SCHEMA = {
type: 'string',
description: 'The path of the extension in the filesystem.',
},
CLAUDE_PLUGIN_ROOT: {
type: 'string',
description: 'The path of the extension in the filesystem.',
},
workspacePath: {
type: 'string',
description: 'The absolute path of the current workspace.',

View File

@@ -5,6 +5,13 @@
*/
import { type VariableSchema, VARIABLE_SCHEMA } from './variableSchema.js';
import path from 'node:path';
import { QWEN_DIR } from '../config/storage.js';
export const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
export const EXTENSIONS_CONFIG_FILENAME = 'qwen-extension.json';
export const INSTALL_METADATA_FILENAME = '.qwen-extension-install.json';
export const EXTENSION_SETTINGS_FILENAME = '.env';
export type JsonObject = { [key: string]: JsonValue };
export type JsonArray = JsonValue[];

View File

@@ -53,6 +53,8 @@ export * from './utils/subagentGenerator.js';
export * from './utils/projectSummary.js';
export * from './utils/promptIdContext.js';
export * from './utils/thoughtUtils.js';
export * from './utils/toml-to-markdown-converter.js';
export * from './utils/yaml-parser.js';
// Export services
export * from './services/fileDiscoveryService.js';
@@ -83,6 +85,9 @@ export * from './subagents/index.js';
// Export skills
export * from './skills/index.js';
// Export extension
export * from './extension/index.js';
// Export prompt logic
export * from './prompts/mcp-prompts.js';
@@ -115,6 +120,7 @@ export type {
OAuthCredentials,
} from './mcp/token-storage/types.js';
export { MCPOAuthTokenStorage } from './mcp/oauth-token-storage.js';
export { KeychainTokenStorage } from './mcp/token-storage/keychain-token-storage.js';
export type { MCPOAuthConfig } from './mcp/oauth-provider.js';
export type {
OAuthAuthorizationServerMetadata,

View File

@@ -22,6 +22,7 @@ interface Keytar {
}
const KEYCHAIN_TEST_PREFIX = '__keychain_test__';
const SECRET_PREFIX = '__secret__';
export class KeychainTokenStorage extends BaseTokenStorage {
private keychainAvailable: boolean | null = null;
@@ -137,6 +138,7 @@ export class KeychainTokenStorage extends BaseTokenStorage {
const credentials = await keytar.findCredentials(this.serviceName);
return credentials
.filter((cred) => !cred.account.startsWith(KEYCHAIN_TEST_PREFIX))
.filter((cred) => !cred.account.startsWith(SECRET_PREFIX))
.map((cred: { account: string }) => cred.account);
} catch (error) {
console.error('Failed to list servers from keychain:', error);
@@ -156,9 +158,9 @@ export class KeychainTokenStorage extends BaseTokenStorage {
const result = new Map<string, OAuthCredentials>();
try {
const credentials = (
await keytar.findCredentials(this.serviceName)
).filter((c) => !c.account.startsWith(KEYCHAIN_TEST_PREFIX));
const credentials = (await keytar.findCredentials(this.serviceName))
.filter((c) => !c.account.startsWith(KEYCHAIN_TEST_PREFIX))
.filter((c) => !c.account.startsWith(SECRET_PREFIX));
for (const cred of credentials) {
try {
@@ -248,4 +250,62 @@ export class KeychainTokenStorage extends BaseTokenStorage {
async isAvailable(): Promise<boolean> {
return this.checkKeychainAvailability();
}
async setSecret(key: string, value: string): Promise<void> {
if (!(await this.checkKeychainAvailability())) {
throw new Error('Keychain is not available');
}
const keytar = await this.getKeytar();
if (!keytar) {
throw new Error('Keytar module not available');
}
await keytar.setPassword(this.serviceName, `${SECRET_PREFIX}${key}`, value);
}
async getSecret(key: string): Promise<string | null> {
if (!(await this.checkKeychainAvailability())) {
throw new Error('Keychain is not available');
}
const keytar = await this.getKeytar();
if (!keytar) {
throw new Error('Keytar module not available');
}
return keytar.getPassword(this.serviceName, `${SECRET_PREFIX}${key}`);
}
async deleteSecret(key: string): Promise<void> {
if (!(await this.checkKeychainAvailability())) {
throw new Error('Keychain is not available');
}
const keytar = await this.getKeytar();
if (!keytar) {
throw new Error('Keytar module not available');
}
const deleted = await keytar.deletePassword(
this.serviceName,
`${SECRET_PREFIX}${key}`,
);
if (!deleted) {
throw new Error(`No secret found for key: ${key}`);
}
}
async listSecrets(): Promise<string[]> {
if (!(await this.checkKeychainAvailability())) {
throw new Error('Keychain is not available');
}
const keytar = await this.getKeytar();
if (!keytar) {
throw new Error('Keytar module not available');
}
try {
const credentials = await keytar.findCredentials(this.serviceName);
return credentials
.filter((cred) => cred.account.startsWith(SECRET_PREFIX))
.map((cred) => cred.account.substring(SECRET_PREFIX.length));
} catch (error) {
console.error('Failed to list secrets from keychain:', error);
return [];
}
}
}

View File

@@ -76,7 +76,7 @@ export class SkillManager {
const levelsToCheck: SkillLevel[] = options.level
? [options.level]
: ['project', 'user'];
: ['project', 'user', 'extension'];
// Check if we should use cache or force refresh
const shouldUseCache = !options.force && this.skillsCache !== null;
@@ -86,12 +86,12 @@ export class SkillManager {
await this.refreshCache();
}
// Collect skills from each level (project takes precedence over user)
// Collect skills from each level (project takes precedence over user over extension)
for (const level of levelsToCheck) {
const levelSkills = this.skillsCache?.get(level) || [];
for (const skill of levelSkills) {
// Skip if we've already seen this name (precedence: project > user)
// Skip if we've already seen this name (precedence: project > user > extension)
if (seenNames.has(skill.name)) {
continue;
}
@@ -210,7 +210,7 @@ export class SkillManager {
const skillsCache = new Map<SkillLevel, SkillConfig[]>();
this.parseErrors.clear();
const levels: SkillLevel[] = ['project', 'user'];
const levels: SkillLevel[] = ['project', 'user', 'extension'];
for (const level of levels) {
const levelSkills = await this.listSkillsAtLevel(level);
@@ -376,12 +376,22 @@ export class SkillManager {
return [];
}
const baseDir = this.getSkillsBaseDir(level);
// if (level === 'extension') {
// const extensions = this.config.getExtensions();
// }
const baseDir = this.getSkillsBaseDir(level);
const skills = await this.loadSkillsFromDir(baseDir, level);
return skills;
}
async loadSkillsFromDir(
baseDir: string,
level: SkillLevel,
): Promise<SkillConfig[]> {
try {
const entries = await fs.readdir(baseDir, { withFileTypes: true });
const skills: SkillConfig[] = [];
for (const entry of entries) {
// Only process directories (each skill is a directory)
if (!entry.isDirectory()) continue;
@@ -409,7 +419,6 @@ export class SkillManager {
continue;
}
}
return skills;
} catch (_error) {
// Directory doesn't exist or can't be read

View File

@@ -8,8 +8,9 @@
* Represents the storage level for a skill configuration.
* - 'project': Stored in `.qwen/skills/` within the project directory
* - 'user': Stored in `~/.qwen/skills/` in the user's home directory
* - 'extension': Provided by an installed extension
*/
export type SkillLevel = 'project' | 'user';
export type SkillLevel = 'project' | 'user' | 'extension';
/**
* Core configuration for a skill as stored in SKILL.md files.
@@ -43,6 +44,11 @@ export interface SkillConfig {
* The markdown body content from SKILL.md (after the frontmatter)
*/
body: string;
/**
* For extension-level skills: the name of the providing extension
*/
extensionName?: string;
}
/**

View File

@@ -345,7 +345,7 @@ export class SubagentManager {
// Normal mode: load from project, user, and builtin levels
const levelsToCheck: SubagentLevel[] = options.level
? [options.level]
: ['project', 'user', 'builtin'];
: ['project', 'user', 'builtin', 'extension'];
// Check if we should use cache or force refresh
const shouldUseCache = !options.force && this.subagentsCache !== null;
@@ -389,8 +389,16 @@ export class SubagentManager {
break;
case 'level': {
// Project comes before user, user comes before builtin, session comes last
const levelOrder = { project: 0, user: 1, builtin: 2, session: 3 };
comparison = levelOrder[a.level] - levelOrder[b.level];
const levelOrder = {
project: 0,
user: 1,
builtin: 2,
session: 3,
extension: 4,
};
comparison =
levelOrder[a.level as SubagentLevel] -
levelOrder[b.level as SubagentLevel];
break;
}
default:
@@ -435,7 +443,7 @@ export class SubagentManager {
private async refreshCache(): Promise<void> {
const subagentsCache = new Map();
const levels: SubagentLevel[] = ['project', 'user', 'builtin'];
const levels: SubagentLevel[] = ['project', 'user', 'builtin', 'extension'];
for (const level of levels) {
const levelSubagents = await this.listSubagentsAtLevel(level);
@@ -824,6 +832,14 @@ export class SubagentManager {
let baseDir = level === 'project' ? projectRoot : homeDir;
baseDir = path.join(baseDir, QWEN_CONFIG_DIR, AGENT_CONFIG_DIR);
const subagents = await this.loadSubagentFromDir(baseDir, level);
return subagents;
}
async loadSubagentFromDir(
baseDir: string,
level: SubagentLevel,
): Promise<SubagentConfig[]> {
try {
const files = await fs.readdir(baseDir);
const subagents: SubagentConfig[] = [];

View File

@@ -8,12 +8,18 @@ import type { Content, FunctionDeclaration } from '@google/genai';
/**
* Represents the storage level for a subagent configuration.
* - 'session': Session-level agents provided at runtime, read-only (highest priority)
* - 'project': Stored in `.qwen/agents/` within the project directory
* - 'user': Stored in `~/.qwen/agents/` in the user's home directory
* - 'builtin': Built-in agents embedded in the codebase, always available
* - 'session': Session-level agents provided at runtime, read-only
* - 'extension': Provided by an installed extension
* - 'builtin': Built-in agents embedded in the codebase, always available (lowest priority)
*/
export type SubagentLevel = 'project' | 'user' | 'builtin' | 'session';
export type SubagentLevel =
| 'session'
| 'project'
| 'user'
| 'extension'
| 'builtin';
/**
* Core configuration for a subagent as stored in Markdown files.
@@ -68,6 +74,11 @@ export interface SubagentConfig {
* Built-in agents cannot be modified or deleted.
*/
readonly isBuiltin?: boolean;
/**
* For extension-level subagents: the name of the providing extension
*/
extensionName?: string;
}
/**

View File

@@ -17,6 +17,7 @@ export const EVENT_EXTENSION_DISABLE = 'qwen-code.extension_disable';
export const EVENT_EXTENSION_ENABLE = 'qwen-code.extension_enable';
export const EVENT_EXTENSION_INSTALL = 'qwen-code.extension_install';
export const EVENT_EXTENSION_UNINSTALL = 'qwen-code.extension_uninstall';
export const EVENT_EXTENSION_UPDATE = 'qwen-code.extension_update';
export const EVENT_FLASH_FALLBACK = 'qwen-code.flash_fallback';
export const EVENT_RIPGREP_FALLBACK = 'qwen-code.ripgrep_fallback';
export const EVENT_NEXT_SPEAKER_CHECK = 'qwen-code.next_speaker_check';

View File

@@ -41,6 +41,8 @@ export {
logExtensionEnable,
logExtensionInstallEvent,
logExtensionUninstall,
logExtensionDisable,
logExtensionUpdateEvent,
logRipgrepFallback,
logNextSpeakerCheck,
logAuth,

View File

@@ -38,6 +38,7 @@ import {
EVENT_INVALID_CHUNK,
EVENT_AUTH,
EVENT_SKILL_LAUNCH,
EVENT_EXTENSION_UPDATE,
} from './constants.js';
import {
recordApiErrorMetrics,
@@ -79,6 +80,7 @@ import type {
ExtensionDisableEvent,
ExtensionEnableEvent,
ExtensionUninstallEvent,
ExtensionUpdateEvent,
ExtensionInstallEvent,
ModelSlashCommandEvent,
SubagentExecutionEvent,
@@ -800,6 +802,32 @@ export function logExtensionUninstall(
logger.emit(logRecord);
}
export async function logExtensionUpdateEvent(
config: Config,
event: ExtensionUpdateEvent,
): Promise<void> {
QwenLogger.getInstance(config)?.logExtensionUpdateEvent(event);
const attributes: LogAttributes = {
...getCommonAttributes(config),
...event,
'event.name': EVENT_EXTENSION_UPDATE,
'event.timestamp': new Date().toISOString(),
extension_name: event.extension_name,
extension_id: event.extension_id,
extension_previous_version: event.extension_previous_version,
extension_version: event.extension_version,
extension_source: event.extension_source,
};
const logger = logs.getLogger(SERVICE_NAME);
const logRecord: LogRecord = {
body: `Updated extension ${event.extension_name} from ${event.extension_previous_version} to ${event.extension_version}`,
attributes,
};
logger.emit(logRecord);
}
export function logExtensionEnable(
config: Config,
event: ExtensionEnableEvent,

View File

@@ -41,6 +41,7 @@ import type {
SkillLaunchEvent,
RipgrepFallbackEvent,
EndSessionEvent,
ExtensionUpdateEvent,
} from '../types.js';
import type {
RumEvent,
@@ -739,6 +740,22 @@ export class QwenLogger {
this.flushIfNeeded();
}
logExtensionUpdateEvent(event: ExtensionUpdateEvent): void {
const rumEvent = this.createActionEvent('extension', 'extension_update', {
properties: {
extension_name: event.extension_name,
status: event.status,
extension_id: event.extension_id,
extension_previous_version: event.extension_previous_version,
extension_version: event.extension_version,
extension_source: event.extension_source,
},
});
this.enqueueLogEvent(rumEvent);
this.flushIfNeeded();
}
logExtensionEnableEvent(event: ExtensionEnableEvent): void {
const rumEvent = this.createActionEvent('extension', 'extension_enable', {
properties: {

View File

@@ -666,6 +666,35 @@ export class ExtensionUninstallEvent implements BaseTelemetryEvent {
}
}
export class ExtensionUpdateEvent implements BaseTelemetryEvent {
'event.name': 'extension_update';
'event.timestamp': string;
extension_name: string;
extension_id: string;
extension_previous_version: string;
extension_version: string;
extension_source: string;
status: 'success' | 'error';
constructor(
extension_name: string,
extension_id: string,
extension_version: string,
extension_previous_version: string,
extension_source: string,
status: 'success' | 'error',
) {
this['event.name'] = 'extension_update';
this['event.timestamp'] = new Date().toISOString();
this.extension_name = extension_name;
this.extension_id = extension_id;
this.extension_version = extension_version;
this.extension_previous_version = extension_previous_version;
this.extension_source = extension_source;
this.status = status;
}
}
export class ExtensionEnableEvent implements BaseTelemetryEvent {
'event.name': 'extension_enable';
'event.timestamp': string;

View File

@@ -0,0 +1,297 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import {
resolveEnvVarsInString,
resolveEnvVarsInObject,
} from './envVarResolver.js';
describe('resolveEnvVarsInString', () => {
let originalEnv: NodeJS.ProcessEnv;
beforeEach(() => {
originalEnv = { ...process.env };
});
afterEach(() => {
process.env = originalEnv;
});
it('should resolve $VAR_NAME format', () => {
process.env['TEST_VAR'] = 'test-value';
const result = resolveEnvVarsInString('Value is $TEST_VAR');
expect(result).toBe('Value is test-value');
});
it('should resolve ${VAR_NAME} format', () => {
process.env['TEST_VAR'] = 'test-value';
const result = resolveEnvVarsInString('Value is ${TEST_VAR}');
expect(result).toBe('Value is test-value');
});
it('should resolve multiple variables in the same string', () => {
process.env['HOST'] = 'localhost';
process.env['PORT'] = '3000';
const result = resolveEnvVarsInString('URL: http://$HOST:${PORT}/api');
expect(result).toBe('URL: http://localhost:3000/api');
});
it('should leave undefined variables unchanged', () => {
const result = resolveEnvVarsInString('Value is $UNDEFINED_VAR');
expect(result).toBe('Value is $UNDEFINED_VAR');
});
it('should leave undefined variables with braces unchanged', () => {
const result = resolveEnvVarsInString('Value is ${UNDEFINED_VAR}');
expect(result).toBe('Value is ${UNDEFINED_VAR}');
});
it('should handle empty string', () => {
const result = resolveEnvVarsInString('');
expect(result).toBe('');
});
it('should handle string without variables', () => {
const result = resolveEnvVarsInString('No variables here');
expect(result).toBe('No variables here');
});
it('should handle mixed defined and undefined variables', () => {
process.env['DEFINED'] = 'value';
const result = resolveEnvVarsInString('$DEFINED and $UNDEFINED mixed');
expect(result).toBe('value and $UNDEFINED mixed');
});
});
describe('resolveEnvVarsInObject', () => {
let originalEnv: NodeJS.ProcessEnv;
beforeEach(() => {
originalEnv = { ...process.env };
});
afterEach(() => {
process.env = originalEnv;
});
it('should resolve variables in nested objects', () => {
process.env['API_KEY'] = 'secret-123';
process.env['DB_URL'] = 'postgresql://localhost/test';
const config = {
server: {
auth: {
key: '$API_KEY',
},
database: '${DB_URL}',
},
port: 3000,
};
const result = resolveEnvVarsInObject(config);
expect(result).toEqual({
server: {
auth: {
key: 'secret-123',
},
database: 'postgresql://localhost/test',
},
port: 3000,
});
});
it('should resolve variables in arrays', () => {
process.env['ENV'] = 'production';
process.env['VERSION'] = '1.0.0';
const config = {
tags: ['$ENV', 'app', '${VERSION}'],
metadata: {
env: '$ENV',
},
};
const result = resolveEnvVarsInObject(config);
expect(result).toEqual({
tags: ['production', 'app', '1.0.0'],
metadata: {
env: 'production',
},
});
});
it('should preserve non-string types', () => {
const config = {
enabled: true,
count: 42,
value: null,
data: undefined,
tags: ['item1', 'item2'],
};
const result = resolveEnvVarsInObject(config);
expect(result).toEqual(config);
});
it('should handle MCP server config structure', () => {
process.env['API_TOKEN'] = 'token-123';
process.env['SERVER_PORT'] = '8080';
const extensionConfig = {
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js', '--port', '${SERVER_PORT}'],
env: {
API_KEY: '$API_TOKEN',
STATIC_VALUE: 'unchanged',
},
timeout: 5000,
},
},
};
const result = resolveEnvVarsInObject(extensionConfig);
expect(result).toEqual({
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js', '--port', '8080'],
env: {
API_KEY: 'token-123',
STATIC_VALUE: 'unchanged',
},
timeout: 5000,
},
},
});
});
it('should handle empty and null values', () => {
const config = {
empty: '',
nullValue: null,
undefinedValue: undefined,
zero: 0,
false: false,
};
const result = resolveEnvVarsInObject(config);
expect(result).toEqual(config);
});
it('should handle circular references in objects without infinite recursion', () => {
process.env['TEST_VAR'] = 'resolved-value';
type ConfigWithCircularRef = {
name: string;
value: number;
self?: ConfigWithCircularRef;
};
const config: ConfigWithCircularRef = {
name: '$TEST_VAR',
value: 42,
};
// Create circular reference
config.self = config;
const result = resolveEnvVarsInObject(config);
expect(result.name).toBe('resolved-value');
expect(result.value).toBe(42);
expect(result.self).toBeDefined();
expect(result.self?.name).toBe('$TEST_VAR'); // Circular reference should be shallow copied
expect(result.self?.value).toBe(42);
// Verify it doesn't create infinite recursion by checking it's not the same object
expect(result.self).not.toBe(result);
});
it('should handle circular references in arrays without infinite recursion', () => {
process.env['ARRAY_VAR'] = 'array-value';
type ArrayWithCircularRef = Array<string | number | ArrayWithCircularRef>;
const arr: ArrayWithCircularRef = ['$ARRAY_VAR', 123];
// Create circular reference
arr.push(arr);
const result = resolveEnvVarsInObject(arr) as ArrayWithCircularRef;
expect(result[0]).toBe('array-value');
expect(result[1]).toBe(123);
expect(Array.isArray(result[2])).toBe(true);
const subArray = result[2] as ArrayWithCircularRef;
expect(subArray[0]).toBe('$ARRAY_VAR'); // Circular reference should be shallow copied
expect(subArray[1]).toBe(123);
// Verify it doesn't create infinite recursion
expect(result[2]).not.toBe(result);
});
it('should handle complex nested circular references', () => {
process.env['NESTED_VAR'] = 'nested-resolved';
type ObjWithRef = {
name: string;
id: number;
ref?: ObjWithRef;
};
const obj1: ObjWithRef = { name: '$NESTED_VAR', id: 1 };
const obj2: ObjWithRef = { name: 'static', id: 2 };
// Create cross-references
obj1.ref = obj2;
obj2.ref = obj1;
const config = {
primary: obj1,
secondary: obj2,
value: '$NESTED_VAR',
};
const result = resolveEnvVarsInObject(config);
expect(result.value).toBe('nested-resolved');
expect(result.primary.name).toBe('nested-resolved');
expect(result.primary.id).toBe(1);
expect(result.secondary.name).toBe('static');
expect(result.secondary.id).toBe(2);
// Check that circular references are handled (shallow copied)
expect(result.primary.ref).toBeDefined();
expect(result.secondary.ref).toBeDefined();
expect(result.primary.ref?.name).toBe('static'); // Should be shallow copy
expect(result.secondary.ref?.name).toBe('nested-resolved'); // The shallow copy still gets processed
// Most importantly: verify no infinite recursion by checking objects are different
expect(result.primary.ref).not.toBe(result.secondary);
expect(result.secondary.ref).not.toBe(result.primary);
expect(result.primary).not.toBe(obj1); // New object created
expect(result.secondary).not.toBe(obj2); // New object created
});
});

View File

@@ -0,0 +1,126 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Resolves environment variables in a string.
* Replaces $VAR_NAME and ${VAR_NAME} with their corresponding environment variable values.
* If the environment variable is not defined, the original placeholder is preserved.
*
* @param value - The string that may contain environment variable placeholders
* @returns The string with environment variables resolved
*
* @example
* resolveEnvVarsInString("Token: $API_KEY") // Returns "Token: secret-123"
* resolveEnvVarsInString("URL: ${BASE_URL}/api") // Returns "URL: https://api.example.com/api"
* resolveEnvVarsInString("Missing: $UNDEFINED_VAR") // Returns "Missing: $UNDEFINED_VAR"
*/
export function resolveEnvVarsInString(
value: string,
customEnv?: Record<string, string>,
): string {
const envVarRegex = /\$(?:(\w+)|{([^}]+)})/g; // Find $VAR_NAME or ${VAR_NAME}
return value.replace(envVarRegex, (match, varName1, varName2) => {
const varName = varName1 || varName2;
if (customEnv && typeof customEnv[varName] === 'string') {
return customEnv[varName];
}
if (process && process.env && typeof process.env[varName] === 'string') {
return process.env[varName]!;
}
return match;
});
}
/**
* Recursively resolves environment variables in an object of any type.
* Handles strings, arrays, nested objects, and preserves other primitive types.
* Protected against circular references using a WeakSet to track visited objects.
*
* @param obj - The object to process for environment variable resolution
* @returns A new object with environment variables resolved
*
* @example
* const config = {
* server: {
* host: "$HOST",
* port: "${PORT}",
* enabled: true,
* tags: ["$ENV", "api"]
* }
* };
* const resolved = resolveEnvVarsInObject(config);
*/
export function resolveEnvVarsInObject<T>(
obj: T,
customEnv?: Record<string, string>,
): T {
return resolveEnvVarsInObjectInternal(obj, new WeakSet(), customEnv);
}
/**
* Internal implementation of resolveEnvVarsInObject with circular reference protection.
*
* @param obj - The object to process
* @param visited - WeakSet to track visited objects and prevent circular references
* @returns A new object with environment variables resolved
*/
function resolveEnvVarsInObjectInternal<T>(
obj: T,
visited: WeakSet<object>,
customEnv?: Record<string, string>,
): T {
if (
obj === null ||
obj === undefined ||
typeof obj === 'boolean' ||
typeof obj === 'number'
) {
return obj;
}
if (typeof obj === 'string') {
return resolveEnvVarsInString(obj, customEnv) as unknown as T;
}
if (Array.isArray(obj)) {
// Check for circular reference
if (visited.has(obj)) {
// Return a shallow copy to break the cycle
return [...obj] as unknown as T;
}
visited.add(obj);
const result = obj.map((item) =>
resolveEnvVarsInObjectInternal(item, visited, customEnv),
) as unknown as T;
visited.delete(obj);
return result;
}
if (typeof obj === 'object') {
// Check for circular reference
if (visited.has(obj as object)) {
// Return a shallow copy to break the cycle
return { ...obj } as T;
}
visited.add(obj as object);
const newObj = { ...obj } as T;
for (const key in newObj) {
if (Object.prototype.hasOwnProperty.call(newObj, key)) {
newObj[key] = resolveEnvVarsInObjectInternal(
newObj[key],
visited,
customEnv,
);
}
}
visited.delete(obj as object);
return newObj;
}
return obj;
}

View File

@@ -0,0 +1,103 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import {
convertTomlToMarkdown,
isTomlFormat,
} from './toml-to-markdown-converter.js';
describe('convertTomlToMarkdown', () => {
it('should convert TOML with description to Markdown', () => {
const tomlContent = `prompt = "This is a test prompt"
description = "Test command"`;
const result = convertTomlToMarkdown(tomlContent);
expect(result).toBe(`---
description: Test command
---
This is a test prompt
`);
});
it('should convert TOML without description to Markdown', () => {
const tomlContent = `prompt = "Simple prompt"`;
const result = convertTomlToMarkdown(tomlContent);
expect(result).toBe('Simple prompt\n');
});
it('should handle multi-line prompts', () => {
const tomlContent = `prompt = """
This is a multi-line
prompt with several
lines of text.
"""
description = "Multi-line test"`;
const result = convertTomlToMarkdown(tomlContent);
expect(result).toContain('This is a multi-line');
expect(result).toContain('description: Multi-line test');
});
it('should throw error for invalid TOML', () => {
const invalidToml = 'this is not valid toml {[}]';
expect(() => convertTomlToMarkdown(invalidToml)).toThrow(
'Failed to parse TOML',
);
});
it('should throw error if prompt field is missing', () => {
const tomlWithoutPrompt = 'description = "No prompt here"';
expect(() => convertTomlToMarkdown(tomlWithoutPrompt)).toThrow(
'TOML must contain a "prompt" field',
);
});
it('should handle special characters in description', () => {
const tomlContent = `prompt = "Test prompt"
description = "Command with: special, characters!"`;
const result = convertTomlToMarkdown(tomlContent);
expect(result).toContain('description: Command with: special, characters!');
});
});
describe('isTomlFormat', () => {
it('should return true for valid TOML', () => {
const validToml = `prompt = "Test"
description = "Description"`;
expect(isTomlFormat(validToml)).toBe(true);
});
it('should return false for invalid TOML', () => {
const invalidToml = '{ this is not toml }';
expect(isTomlFormat(invalidToml)).toBe(false);
});
it('should return true for empty TOML', () => {
expect(isTomlFormat('')).toBe(true);
});
it('should return false for Markdown format', () => {
const markdown = `---
description: Test
---
Prompt content`;
expect(isTomlFormat(markdown)).toBe(false);
});
});

View File

@@ -0,0 +1,74 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Converts TOML command files to Markdown format.
*/
import toml from '@iarna/toml';
export interface TomlCommandFormat {
prompt: string;
description?: string;
}
/**
* Converts a TOML command content to Markdown format.
* @param tomlContent The TOML file content
* @returns The equivalent Markdown content
* @throws Error if TOML parsing fails
*/
export function convertTomlToMarkdown(tomlContent: string): string {
let parsed: unknown;
try {
parsed = toml.parse(tomlContent);
} catch (error) {
throw new Error(
`Failed to parse TOML: ${error instanceof Error ? error.message : String(error)}`,
);
}
if (typeof parsed !== 'object' || parsed === null) {
throw new Error('TOML content must be an object');
}
const obj = parsed as Record<string, unknown>;
if (typeof obj['prompt'] !== 'string') {
throw new Error('TOML must contain a "prompt" field');
}
const prompt = obj['prompt'];
const description =
typeof obj['description'] === 'string' ? obj['description'] : undefined;
// Generate Markdown
if (description) {
return `---
description: ${description}
---
${prompt}
`;
} else {
// No frontmatter if no description
return `${prompt}\n`;
}
}
/**
* Checks if a file content is in TOML format by attempting to parse it.
* @param content File content to check
* @returns true if content is valid TOML
*/
export function isTomlFormat(content: string): boolean {
try {
toml.parse(content);
return true;
} catch {
return false;
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@qwen-code/sdk",
"version": "0.1.1",
"version": "0.1.0",
"description": "TypeScript SDK for programmatic access to qwen-code CLI",
"main": "./dist/index.cjs",
"module": "./dist/index.mjs",