Compare commits

..

8 Commits

Author SHA1 Message Date
LaZzyMan
b37ede07e8 fix/gemini extension install error 2026-01-14 17:48:25 +08:00
LaZzyMan
0a88dd7861 fix: fix tests 2026-01-14 16:50:59 +08:00
LaZzyMan
70991e474f fix/lint error 2026-01-14 15:42:50 +08:00
LaZzyMan
551e546974 feat: move extension to core package 2026-01-14 15:30:27 +08:00
LaZzyMan
74013bd8b2 feat: settings extension 2026-01-08 13:49:59 +08:00
LaZzyMan
18713ef2b0 feat: install from gemini 2026-01-07 19:17:34 +08:00
LaZzyMan
50dac93c80 feat: migrate command format 2026-01-07 13:43:00 +08:00
LaZzyMan
22504b0a5b feat: add extension for gemini and claude 2026-01-07 11:06:17 +08:00
205 changed files with 8797 additions and 16854 deletions

View File

@@ -11,7 +11,6 @@ export default {
type: 'separator',
},
'sdk-typescript': 'Typescript SDK',
'sdk-java': 'Java SDK(alpha)',
'Dive Into Qwen Code': {
title: 'Dive Into Qwen Code',
type: 'separator',

View File

@@ -1,312 +0,0 @@
# Qwen Code Java SDK
The Qwen Code Java SDK is a minimum experimental SDK for programmatic access to Qwen Code functionality. It provides a Java interface to interact with the Qwen Code CLI, allowing developers to integrate Qwen Code capabilities into their Java applications.
## Requirements
- Java >= 1.8
- Maven >= 3.6.0 (for building from source)
- qwen-code >= 0.5.0
### Dependencies
- **Logging**: ch.qos.logback:logback-classic
- **Utilities**: org.apache.commons:commons-lang3
- **JSON Processing**: com.alibaba.fastjson2:fastjson2
- **Testing**: JUnit 5 (org.junit.jupiter:junit-jupiter)
## Installation
Add the following dependency to your Maven `pom.xml`:
```xml
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>qwencode-sdk</artifactId>
<version>{$version}</version>
</dependency>
```
Or if using Gradle, add to your `build.gradle`:
```gradle
implementation 'com.alibaba:qwencode-sdk:{$version}'
```
## Building and Running
### Build Commands
```bash
# Compile the project
mvn compile
# Run tests
mvn test
# Package the JAR
mvn package
# Install to local repository
mvn install
```
## Quick Start
The simplest way to use the SDK is through the `QwenCodeCli.simpleQuery()` method:
```java
public static void runSimpleExample() {
List<String> result = QwenCodeCli.simpleQuery("hello world");
result.forEach(logger::info);
}
```
For more advanced usage with custom transport options:
```java
public static void runTransportOptionsExample() {
TransportOptions options = new TransportOptions()
.setModel("qwen3-coder-flash")
.setPermissionMode(PermissionMode.AUTO_EDIT)
.setCwd("./")
.setEnv(new HashMap<String, String>() {{put("CUSTOM_VAR", "value");}})
.setIncludePartialMessages(true)
.setTurnTimeout(new Timeout(120L, TimeUnit.SECONDS))
.setMessageTimeout(new Timeout(90L, TimeUnit.SECONDS))
.setAllowedTools(Arrays.asList("read_file", "write_file", "list_directory"));
List<String> result = QwenCodeCli.simpleQuery("who are you, what are your capabilities?", options);
result.forEach(logger::info);
}
```
For streaming content handling with custom content consumers:
```java
public static void runStreamingExample() {
QwenCodeCli.simpleQuery("who are you, what are your capabilities?",
new TransportOptions().setMessageTimeout(new Timeout(10L, TimeUnit.SECONDS)), new AssistantContentSimpleConsumers() {
@Override
public void onText(Session session, TextAssistantContent textAssistantContent) {
logger.info("Text content received: {}", textAssistantContent.getText());
}
@Override
public void onThinking(Session session, ThingkingAssistantContent thingkingAssistantContent) {
logger.info("Thinking content received: {}", thingkingAssistantContent.getThinking());
}
@Override
public void onToolUse(Session session, ToolUseAssistantContent toolUseContent) {
logger.info("Tool use content received: {} with arguments: {}",
toolUseContent, toolUseContent.getInput());
}
@Override
public void onToolResult(Session session, ToolResultAssistantContent toolResultContent) {
logger.info("Tool result content received: {}", toolResultContent.getContent());
}
@Override
public void onOtherContent(Session session, AssistantContent<?> other) {
logger.info("Other content received: {}", other);
}
@Override
public void onUsage(Session session, AssistantUsage assistantUsage) {
logger.info("Usage information received: Input tokens: {}, Output tokens: {}",
assistantUsage.getUsage().getInputTokens(), assistantUsage.getUsage().getOutputTokens());
}
}.setDefaultPermissionOperation(Operation.allow));
logger.info("Streaming example completed.");
}
```
other examples see src/test/java/com/alibaba/qwen/code/cli/example
## Architecture
The SDK follows a layered architecture:
- **API Layer**: Provides the main entry points through `QwenCodeCli` class with simple static methods for basic usage
- **Session Layer**: Manages communication sessions with the Qwen Code CLI through the `Session` class
- **Transport Layer**: Handles the communication mechanism between the SDK and CLI process (currently using process transport via `ProcessTransport`)
- **Protocol Layer**: Defines data structures for communication based on the CLI protocol
- **Utils**: Common utilities for concurrent execution, timeout handling, and error management
## Key Features
### Permission Modes
The SDK supports different permission modes for controlling tool execution:
- **`default`**: Write tools are denied unless approved via `canUseTool` callback or in `allowedTools`. Read-only tools execute without confirmation.
- **`plan`**: Blocks all write tools, instructing AI to present a plan first.
- **`auto-edit`**: Auto-approve edit tools (edit, write_file) while other tools require confirmation.
- **`yolo`**: All tools execute automatically without confirmation.
### Session Event Consumers and Assistant Content Consumers
The SDK provides two key interfaces for handling events and content from the CLI:
#### SessionEventConsumers Interface
The `SessionEventConsumers` interface provides callbacks for different types of messages during a session:
- `onSystemMessage`: Handles system messages from the CLI (receives Session and SDKSystemMessage)
- `onResultMessage`: Handles result messages from the CLI (receives Session and SDKResultMessage)
- `onAssistantMessage`: Handles assistant messages (AI responses) (receives Session and SDKAssistantMessage)
- `onPartialAssistantMessage`: Handles partial assistant messages during streaming (receives Session and SDKPartialAssistantMessage)
- `onUserMessage`: Handles user messages (receives Session and SDKUserMessage)
- `onOtherMessage`: Handles other types of messages (receives Session and String message)
- `onControlResponse`: Handles control responses (receives Session and CLIControlResponse)
- `onControlRequest`: Handles control requests (receives Session and CLIControlRequest, returns CLIControlResponse)
- `onPermissionRequest`: Handles permission requests (receives Session and CLIControlRequest<CLIControlPermissionRequest>, returns Behavior)
#### AssistantContentConsumers Interface
The `AssistantContentConsumers` interface handles different types of content within assistant messages:
- `onText`: Handles text content (receives Session and TextAssistantContent)
- `onThinking`: Handles thinking content (receives Session and ThingkingAssistantContent)
- `onToolUse`: Handles tool use content (receives Session and ToolUseAssistantContent)
- `onToolResult`: Handles tool result content (receives Session and ToolResultAssistantContent)
- `onOtherContent`: Handles other content types (receives Session and AssistantContent)
- `onUsage`: Handles usage information (receives Session and AssistantUsage)
- `onPermissionRequest`: Handles permission requests (receives Session and CLIControlPermissionRequest, returns Behavior)
- `onOtherControlRequest`: Handles other control requests (receives Session and ControlRequestPayload, returns ControlResponsePayload)
#### Relationship Between the Interfaces
**Important Note on Event Hierarchy:**
- `SessionEventConsumers` is the **high-level** event processor that handles different message types (system, assistant, user, etc.)
- `AssistantContentConsumers` is the **low-level** content processor that handles different types of content within assistant messages (text, tools, thinking, etc.)
**Processor Relationship:**
- `SessionEventConsumers``AssistantContentConsumers` (SessionEventConsumers uses AssistantContentConsumers to process content within assistant messages)
**Event Derivation Relationships:**
- `onAssistantMessage``onText`, `onThinking`, `onToolUse`, `onToolResult`, `onOtherContent`, `onUsage`
- `onPartialAssistantMessage``onText`, `onThinking`, `onToolUse`, `onToolResult`, `onOtherContent`
- `onControlRequest``onPermissionRequest`, `onOtherControlRequest`
**Event Timeout Relationships:**
Each event handler method has a corresponding timeout method that allows customizing the timeout behavior for that specific event:
- `onSystemMessage``onSystemMessageTimeout`
- `onResultMessage``onResultMessageTimeout`
- `onAssistantMessage``onAssistantMessageTimeout`
- `onPartialAssistantMessage``onPartialAssistantMessageTimeout`
- `onUserMessage``onUserMessageTimeout`
- `onOtherMessage``onOtherMessageTimeout`
- `onControlResponse``onControlResponseTimeout`
- `onControlRequest``onControlRequestTimeout`
For AssistantContentConsumers timeout methods:
- `onText``onTextTimeout`
- `onThinking``onThinkingTimeout`
- `onToolUse``onToolUseTimeout`
- `onToolResult``onToolResultTimeout`
- `onOtherContent``onOtherContentTimeout`
- `onPermissionRequest``onPermissionRequestTimeout`
- `onOtherControlRequest``onOtherControlRequestTimeout`
**Default Timeout Values:**
- `SessionEventSimpleConsumers` default timeout: 180 seconds (Timeout.TIMEOUT_180_SECONDS)
- `AssistantContentSimpleConsumers` default timeout: 60 seconds (Timeout.TIMEOUT_60_SECONDS)
**Timeout Hierarchy Requirements:**
For proper operation, the following timeout relationships should be maintained:
- `onAssistantMessageTimeout` return value should be greater than `onTextTimeout`, `onThinkingTimeout`, `onToolUseTimeout`, `onToolResultTimeout`, and `onOtherContentTimeout` return values
- `onControlRequestTimeout` return value should be greater than `onPermissionRequestTimeout` and `onOtherControlRequestTimeout` return values
### Transport Options
The `TransportOptions` class allows configuration of how the SDK communicates with the Qwen Code CLI:
- `pathToQwenExecutable`: Path to the Qwen Code CLI executable
- `cwd`: Working directory for the CLI process
- `model`: AI model to use for the session
- `permissionMode`: Permission mode that controls tool execution
- `env`: Environment variables to pass to the CLI process
- `maxSessionTurns`: Limits the number of conversation turns in a session
- `coreTools`: List of core tools that should be available to the AI
- `excludeTools`: List of tools to exclude from being available to the AI
- `allowedTools`: List of tools that are pre-approved for use without additional confirmation
- `authType`: Authentication type to use for the session
- `includePartialMessages`: Enables receiving partial messages during streaming responses
- `skillsEnable`: Enables or disables skills functionality for the session
- `turnTimeout`: Timeout for a complete turn of conversation
- `messageTimeout`: Timeout for individual messages within a turn
- `resumeSessionId`: ID of a previous session to resume
- `otherOptions`: Additional command-line options to pass to the CLI
### Session Control Features
- **Session creation**: Use `QwenCodeCli.newSession()` to create a new session with custom options
- **Session management**: The `Session` class provides methods to send prompts, handle responses, and manage session state
- **Session cleanup**: Always close sessions using `session.close()` to properly terminate the CLI process
- **Session resumption**: Use `setResumeSessionId()` in `TransportOptions` to resume a previous session
- **Session interruption**: Use `session.interrupt()` to interrupt a currently running prompt
- **Dynamic model switching**: Use `session.setModel()` to change the model during a session
- **Dynamic permission mode switching**: Use `session.setPermissionMode()` to change the permission mode during a session
### Thread Pool Configuration
The SDK uses a thread pool for managing concurrent operations with the following default configuration:
- **Core Pool Size**: 30 threads
- **Maximum Pool Size**: 100 threads
- **Keep-Alive Time**: 60 seconds
- **Queue Capacity**: 300 tasks (using LinkedBlockingQueue)
- **Thread Naming**: "qwen_code_cli-pool-{number}"
- **Daemon Threads**: false
- **Rejected Execution Handler**: CallerRunsPolicy
## Error Handling
The SDK provides specific exception types for different error scenarios:
- `SessionControlException`: Thrown when there's an issue with session control (creation, initialization, etc.)
- `SessionSendPromptException`: Thrown when there's an issue sending a prompt or receiving a response
- `SessionClosedException`: Thrown when attempting to use a closed session
## FAQ / Troubleshooting
### Q: Do I need to install the Qwen CLI separately?
A: yes, requires Qwen CLI 0.5.5 or higher.
### Q: What Java versions are supported?
A: The SDK requires Java 1.8 or higher.
### Q: How do I handle long-running requests?
A: The SDK includes timeout utilities. You can configure timeouts using the `Timeout` class in `TransportOptions`.
### Q: Why are some tools not executing?
A: This is likely due to permission modes. Check your permission mode settings and consider using `allowedTools` to pre-approve certain tools.
### Q: How do I resume a previous session?
A: Use the `setResumeSessionId()` method in `TransportOptions` to resume a previous session.
### Q: Can I customize the environment for the CLI process?
A: Yes, use the `setEnv()` method in `TransportOptions` to pass environment variables to the CLI process.
## License
Apache-2.0 - see [LICENSE](./LICENSE) for details.

View File

@@ -381,7 +381,7 @@ Arguments passed directly when running the CLI can override other configurations
| `--telemetry-otlp-protocol` | | Sets the OTLP protocol for telemetry (`grpc` or `http`). | | Defaults to `grpc`. See [telemetry](../../developers/development/telemetry) for more information. |
| `--telemetry-log-prompts` | | Enables logging of prompts for telemetry. | | See [telemetry](../../developers/development/telemetry) for more information. |
| `--checkpointing` | | Enables [checkpointing](../features/checkpointing). | | |
| `--acp` | | Enables ACP mode (Agent Control Protocol). Useful for IDE/editor integrations like [Zed](../integration-zed). | | Stable. Replaces the deprecated `--experimental-acp` flag. |
| `--experimental-acp` | | Enables ACP mode (Agent Control Protocol). Useful for IDE/editor integrations like [Zed](../integration-zed). | | Experimental. |
| `--experimental-skills` | | Enables experimental [Agent Skills](../features/skills) (registers the `skill` tool and loads Skills from `.qwen/skills/` and `~/.qwen/skills/`). | | Experimental. |
| `--extensions` | `-e` | Specifies a list of extensions to use for the session. | Extension names | If not provided, all available extensions are used. Use the special term `qwen -e none` to disable all extensions. Example: `qwen -e my-extension -e my-other-extension` |
| `--list-extensions` | `-l` | Lists all available extensions and exits. | | |

View File

@@ -32,7 +32,7 @@
"Qwen Code": {
"type": "custom",
"command": "qwen",
"args": ["--acp"],
"args": ["--experimental-acp"],
"env": {}
}
```

View File

@@ -80,11 +80,10 @@ type PermissionHandler = (
/**
* Sets up an ACP test environment with all necessary utilities.
* @param useNewFlag - If true, uses --acp; if false, uses --experimental-acp (for backward compatibility testing)
*/
function setupAcpTest(
rig: TestRig,
options?: { permissionHandler?: PermissionHandler; useNewFlag?: boolean },
options?: { permissionHandler?: PermissionHandler },
) {
const pending = new Map<number, PendingRequest>();
let nextRequestId = 1;
@@ -96,13 +95,9 @@ function setupAcpTest(
const permissionHandler =
options?.permissionHandler ?? (() => ({ optionId: 'proceed_once' }));
// Use --acp by default, but allow testing with --experimental-acp for backward compatibility
const acpFlag =
options?.useNewFlag !== false ? '--acp' : '--experimental-acp';
const agent = spawn(
'node',
[rig.bundlePath, acpFlag, '--no-chat-recording'],
[rig.bundlePath, '--experimental-acp', '--no-chat-recording'],
{
cwd: rig.testDir!,
stdio: ['pipe', 'pipe', 'pipe'],
@@ -626,99 +621,3 @@ function setupAcpTest(
}
});
});
(IS_SANDBOX ? describe.skip : describe)(
'acp flag backward compatibility',
() => {
it('should work with deprecated --experimental-acp flag and show warning', async () => {
const rig = new TestRig();
rig.setup('acp backward compatibility');
const { sendRequest, cleanup, stderr } = setupAcpTest(rig, {
useNewFlag: false,
});
try {
const initResult = await sendRequest('initialize', {
protocolVersion: 1,
clientCapabilities: {
fs: { readTextFile: true, writeTextFile: true },
},
});
expect(initResult).toBeDefined();
// Verify deprecation warning is shown
const stderrOutput = stderr.join('');
expect(stderrOutput).toContain('--experimental-acp is deprecated');
expect(stderrOutput).toContain('Please use --acp instead');
await sendRequest('authenticate', { methodId: 'openai' });
const newSession = (await sendRequest('session/new', {
cwd: rig.testDir!,
mcpServers: [],
})) as { sessionId: string };
expect(newSession.sessionId).toBeTruthy();
// Verify functionality still works
const promptResult = await sendRequest('session/prompt', {
sessionId: newSession.sessionId,
prompt: [{ type: 'text', text: 'Say hello.' }],
});
expect(promptResult).toBeDefined();
} catch (e) {
if (stderr.length) {
console.error('Agent stderr:', stderr.join(''));
}
throw e;
} finally {
await cleanup();
}
});
it('should work with new --acp flag without warnings', async () => {
const rig = new TestRig();
rig.setup('acp new flag');
const { sendRequest, cleanup, stderr } = setupAcpTest(rig, {
useNewFlag: true,
});
try {
const initResult = await sendRequest('initialize', {
protocolVersion: 1,
clientCapabilities: {
fs: { readTextFile: true, writeTextFile: true },
},
});
expect(initResult).toBeDefined();
// Verify no deprecation warning is shown
const stderrOutput = stderr.join('');
expect(stderrOutput).not.toContain('--experimental-acp is deprecated');
await sendRequest('authenticate', { methodId: 'openai' });
const newSession = (await sendRequest('session/new', {
cwd: rig.testDir!,
mcpServers: [],
})) as { sessionId: string };
expect(newSession.sessionId).toBeTruthy();
// Verify functionality works
const promptResult = await sendRequest('session/prompt', {
sessionId: newSession.sessionId,
prompt: [{ type: 'text', text: 'Say hello.' }],
});
expect(promptResult).toBeDefined();
} catch (e) {
if (stderr.length) {
console.error('Agent stderr:', stderr.join(''));
}
throw e;
} finally {
await cleanup();
}
});
},
);

53
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "@qwen-code/qwen-code",
"version": "0.7.0",
"version": "0.6.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@qwen-code/qwen-code",
"version": "0.7.0",
"version": "0.6.0",
"workspaces": [
"packages/*"
],
@@ -3875,6 +3875,17 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/prompts": {
"version": "2.4.9",
"resolved": "https://registry.npmjs.org/@types/prompts/-/prompts-2.4.9.tgz",
"integrity": "sha512-qTxFi6Buiu8+50/+3DGIWLHM6QuWsEKugJnnP6iv2Mc4ncxE4A/OJkjuVOA+5X0X1S/nq5VJRa8Lu+nwcvbrKA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*",
"kleur": "^3.0.3"
}
},
"node_modules/@types/prop-types": {
"version": "15.7.15",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz",
@@ -10984,6 +10995,15 @@
"json-buffer": "3.0.1"
}
},
"node_modules/kleur": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz",
"integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==",
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/ky": {
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/ky/-/ky-1.8.1.tgz",
@@ -13393,6 +13413,19 @@
"dev": true,
"license": "MIT"
},
"node_modules/prompts": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz",
"integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==",
"license": "MIT",
"dependencies": {
"kleur": "^3.0.3",
"sisteransi": "^1.0.5"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/prop-types": {
"version": "15.8.1",
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz",
@@ -14753,6 +14786,12 @@
"url": "https://github.com/steveukx/git-js?sponsor=1"
}
},
"node_modules/sisteransi": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
"integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==",
"license": "MIT"
},
"node_modules/slash": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz",
@@ -17316,7 +17355,7 @@
},
"packages/cli": {
"name": "@qwen-code/qwen-code",
"version": "0.7.0",
"version": "0.6.0",
"dependencies": {
"@google/genai": "1.30.0",
"@iarna/toml": "^2.2.5",
@@ -17338,6 +17377,7 @@
"ink-spinner": "^5.0.0",
"lowlight": "^3.3.0",
"open": "^10.1.2",
"prompts": "^2.4.2",
"qrcode-terminal": "^0.12.0",
"react": "^19.1.0",
"read-package-up": "^11.0.0",
@@ -17366,6 +17406,7 @@
"@types/diff": "^7.0.2",
"@types/dotenv": "^6.1.1",
"@types/node": "^20.11.24",
"@types/prompts": "^2.4.9",
"@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6",
"@types/semver": "^7.7.0",
@@ -17953,7 +17994,7 @@
},
"packages/core": {
"name": "@qwen-code/qwen-code-core",
"version": "0.7.0",
"version": "0.6.0",
"hasInstallScript": true,
"dependencies": {
"@anthropic-ai/sdk": "^0.36.1",
@@ -21413,7 +21454,7 @@
},
"packages/test-utils": {
"name": "@qwen-code/qwen-code-test-utils",
"version": "0.7.0",
"version": "0.6.0",
"dev": true,
"license": "Apache-2.0",
"devDependencies": {
@@ -21425,7 +21466,7 @@
},
"packages/vscode-ide-companion": {
"name": "qwen-code-vscode-ide-companion",
"version": "0.7.0",
"version": "0.6.0",
"license": "LICENSE",
"dependencies": {
"@modelcontextprotocol/sdk": "^1.25.1",

View File

@@ -1,6 +1,6 @@
{
"name": "@qwen-code/qwen-code",
"version": "0.7.0",
"version": "0.6.0",
"engines": {
"node": ">=20.0.0"
},
@@ -13,7 +13,7 @@
"url": "git+https://github.com/QwenLM/qwen-code.git"
},
"config": {
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.7.0"
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.6.0"
},
"scripts": {
"start": "cross-env node scripts/start.js",

View File

@@ -1,6 +1,6 @@
{
"name": "@qwen-code/qwen-code",
"version": "0.7.0",
"version": "0.6.0",
"description": "Qwen Code",
"repository": {
"type": "git",
@@ -33,7 +33,7 @@
"dist"
],
"config": {
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.7.0"
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.6.0"
},
"dependencies": {
"@google/genai": "1.30.0",
@@ -46,6 +46,7 @@
"comment-json": "^4.2.5",
"diff": "^7.0.0",
"dotenv": "^17.1.0",
"prompts": "^2.4.2",
"fzf": "^0.5.2",
"glob": "^10.5.0",
"highlight.js": "^11.11.1",
@@ -79,6 +80,7 @@
"@types/command-exists": "^1.2.3",
"@types/diff": "^7.0.2",
"@types/dotenv": "^6.1.1",
"@types/prompts": "^2.4.9",
"@types/node": "^20.11.24",
"@types/react": "^19.1.8",
"@types/react-dom": "^19.1.6",

View File

@@ -27,10 +27,8 @@ import { Readable, Writable } from 'node:stream';
import type { LoadedSettings } from '../config/settings.js';
import { SettingScope } from '../config/settings.js';
import { z } from 'zod';
import { ExtensionStorage, type Extension } from '../config/extension.js';
import type { CliArgs } from '../config/config.js';
import { loadCliConfig } from '../config/config.js';
import { ExtensionEnablementManager } from '../config/extensions/extensionEnablement.js';
// Import the modular Session class
import { Session } from './session/Session.js';
@@ -38,7 +36,6 @@ import { Session } from './session/Session.js';
export async function runAcpAgent(
config: Config,
settings: LoadedSettings,
extensions: Extension[],
argv: CliArgs,
) {
const stdout = Writable.toWeb(process.stdout) as WritableStream;
@@ -51,8 +48,7 @@ export async function runAcpAgent(
console.debug = console.error;
new acp.AgentSideConnection(
(client: acp.Client) =>
new GeminiAgent(config, settings, extensions, argv, client),
(client: acp.Client) => new GeminiAgent(config, settings, argv, client),
stdout,
stdin,
);
@@ -65,7 +61,6 @@ class GeminiAgent {
constructor(
private config: Config,
private settings: LoadedSettings,
private extensions: Extension[],
private argv: CliArgs,
private client: acp.Client,
) {}
@@ -215,16 +210,7 @@ class GeminiAgent {
continue: false,
};
const config = await loadCliConfig(
settings,
this.extensions,
new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
this.argv.extensions,
),
argvForSession,
cwd,
);
const config = await loadCliConfig(settings, argvForSession, cwd);
await config.initialize();
return config;

View File

@@ -0,0 +1,87 @@
import type { ConfirmationRequest } from '../../ui/types.js';
/**
* Requests consent from the user to perform an action, by reading a Y/n
* character from stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param consentDescription The description of the thing they will be consenting to.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentNonInteractive(
consentDescription: string,
): Promise<boolean> {
console.info(consentDescription);
const result = await promptForConsentNonInteractive(
'Do you want to continue? [Y/n]: ',
);
return result;
}
/**
* Requests consent from the user to perform an action, in interactive mode.
*
* This should not be called from non-interactive mode as it will not work.
*
* @param consentDescription The description of the thing they will be consenting to.
* @param addExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentInteractive(
consentDescription: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return promptForConsentInteractive(
consentDescription + '\n\nDo you want to continue?',
addExtensionUpdateConfirmationRequest,
);
}
/**
* Asks users a prompt and awaits for a y/n response on stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param prompt A yes/no prompt to ask the user
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
*/
async function promptForConsentNonInteractive(
prompt: string,
): Promise<boolean> {
const readline = await import('node:readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise((resolve) => {
rl.question(prompt, (answer) => {
rl.close();
resolve(['y', ''].includes(answer.trim().toLowerCase()));
});
});
}
/**
* Asks users an interactive yes/no prompt.
*
* This should not be called from non-interactive mode as it will break the CLI.
*
* @param prompt A markdown prompt to ask the user
* @param addExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
* @returns Whether or not the user answers yes.
*/
async function promptForConsentInteractive(
prompt: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return new Promise<boolean>((resolve) => {
addExtensionUpdateConfirmationRequest({
prompt,
onConfirm: (resolvedConfirmed) => {
resolve(resolvedConfirmed);
},
});
});
}

View File

@@ -5,21 +5,22 @@
*/
import { type CommandModule } from 'yargs';
import { disableExtension } from '../../config/extension.js';
import { SettingScope } from '../../config/settings.js';
import { getErrorMessage } from '../../utils/errors.js';
import { getExtensionManager } from './utils.js';
interface DisableArgs {
name: string;
scope?: string;
}
export function handleDisable(args: DisableArgs) {
export async function handleDisable(args: DisableArgs) {
const extensionManager = await getExtensionManager();
try {
if (args.scope?.toLowerCase() === 'workspace') {
disableExtension(args.name, SettingScope.Workspace);
extensionManager.disableExtension(args.name, SettingScope.Workspace);
} else {
disableExtension(args.name, SettingScope.User);
extensionManager.disableExtension(args.name, SettingScope.User);
}
console.log(
`Extension "${args.name}" successfully disabled for scope "${args.scope}".`,
@@ -61,8 +62,8 @@ export const disableCommand: CommandModule = {
}
return true;
}),
handler: (argv) => {
handleDisable({
handler: async (argv) => {
await handleDisable({
name: argv['name'] as string,
scope: argv['scope'] as string,
});

View File

@@ -6,20 +6,22 @@
import { type CommandModule } from 'yargs';
import { FatalConfigError, getErrorMessage } from '@qwen-code/qwen-code-core';
import { enableExtension } from '../../config/extension.js';
import { SettingScope } from '../../config/settings.js';
import { getExtensionManager } from './utils.js';
interface EnableArgs {
name: string;
scope?: string;
}
export function handleEnable(args: EnableArgs) {
export async function handleEnable(args: EnableArgs) {
const extensionManager = await getExtensionManager();
try {
if (args.scope?.toLowerCase() === 'workspace') {
enableExtension(args.name, SettingScope.Workspace);
extensionManager.enableExtension(args.name, SettingScope.Workspace);
} else {
enableExtension(args.name, SettingScope.User);
extensionManager.enableExtension(args.name, SettingScope.User);
}
if (args.scope) {
console.log(
@@ -66,8 +68,8 @@ export const enableCommand: CommandModule = {
}
return true;
}),
handler: (argv) => {
handleEnable({
handler: async (argv) => {
await handleEnable({
name: argv['name'] as string,
scope: argv['scope'] as string,
});

View File

@@ -5,58 +5,64 @@
*/
import type { CommandModule } from 'yargs';
import {
installExtension,
requestConsentNonInteractive,
} from '../../config/extension.js';
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
ExtensionManager,
parseInstallSource,
} from '@qwen-code/qwen-code-core';
import { getErrorMessage } from '../../utils/errors.js';
import { stat } from 'node:fs/promises';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
import { loadSettings } from '../../config/settings.js';
import { requestConsentNonInteractive } from './consent.js';
interface InstallArgs {
source: string;
ref?: string;
autoUpdate?: boolean;
allowPreRelease?: boolean;
consent?: boolean;
}
export async function handleInstall(args: InstallArgs) {
try {
let installMetadata: ExtensionInstallMetadata;
const { source } = args;
const installMetadata = await parseInstallSource(args.source);
if (
source.startsWith('http://') ||
source.startsWith('https://') ||
source.startsWith('git@') ||
source.startsWith('sso://')
installMetadata.type !== 'git' &&
installMetadata.type !== 'github-release'
) {
installMetadata = {
source,
type: 'git',
ref: args.ref,
autoUpdate: args.autoUpdate,
};
} else {
if (args.ref || args.autoUpdate) {
throw new Error(
'--ref and --auto-update are not applicable for local extensions.',
'--ref and --auto-update are not applicable for marketplace extensions.',
);
}
try {
await stat(source);
installMetadata = {
source,
type: 'local',
};
} catch {
throw new Error('Install source not found.');
}
}
const name = await installExtension(
installMetadata,
requestConsentNonInteractive,
const requestConsent = args.consent
? () => Promise.resolve(true)
: requestConsentNonInteractive;
const workspaceDir = process.cwd();
const extensionManager = new ExtensionManager({
workspaceDir,
isWorkspaceTrusted: !!isWorkspaceTrusted(
loadSettings(workspaceDir).merged,
),
requestConsent,
});
await extensionManager.refreshCache();
const extension = await extensionManager.installExtension(
{
...installMetadata,
ref: args.ref,
autoUpdate: args.autoUpdate,
allowPreRelease: args.allowPreRelease,
},
requestConsent,
);
console.log(
`Extension "${extension.name}" installed successfully and enabled.`,
);
console.log(`Extension "${name}" installed successfully and enabled.`);
} catch (error) {
console.error(getErrorMessage(error));
process.exit(1);
@@ -65,11 +71,13 @@ export async function handleInstall(args: InstallArgs) {
export const installCommand: CommandModule = {
command: 'install <source>',
describe: 'Installs an extension from a git repository URL or a local path.',
describe:
'Installs an extension from a git repository URL, local path, or claude marketplace (marketplace-url:plugin-name).',
builder: (yargs) =>
yargs
.positional('source', {
describe: 'The github URL or local path of the extension to install.',
describe:
'The github URL, local path, or marketplace source (marketplace-url:plugin-name) of the extension to install.',
type: 'string',
demandOption: true,
})
@@ -81,6 +89,16 @@ export const installCommand: CommandModule = {
describe: 'Enable auto-update for this extension.',
type: 'boolean',
})
.option('pre-release', {
describe: 'Enable pre-release versions for this extension.',
type: 'boolean',
})
.option('consent', {
describe:
'Acknowledge the security risks of installing an extension and skip the confirmation prompt.',
type: 'boolean',
default: false,
})
.check((argv) => {
if (!argv.source) {
throw new Error('The source argument must be provided.');
@@ -92,6 +110,8 @@ export const installCommand: CommandModule = {
source: argv['source'] as string,
ref: argv['ref'] as string | undefined,
autoUpdate: argv['auto-update'] as boolean | undefined,
allowPreRelease: argv['pre-release'] as boolean | undefined,
consent: argv['consent'] as boolean | undefined,
});
},
};

View File

@@ -5,13 +5,10 @@
*/
import type { CommandModule } from 'yargs';
import {
installExtension,
requestConsentNonInteractive,
} from '../../config/extension.js';
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
import { type ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
import { getErrorMessage } from '../../utils/errors.js';
import { requestConsentNonInteractive } from './consent.js';
import { getExtensionManager } from './utils.js';
interface InstallArgs {
path: string;
@@ -23,12 +20,14 @@ export async function handleLink(args: InstallArgs) {
source: args.path,
type: 'link',
};
const extensionName = await installExtension(
const extensionManager = await getExtensionManager();
const extension = await extensionManager.installExtension(
installMetadata,
requestConsentNonInteractive,
);
console.log(
`Extension "${extensionName}" linked successfully and enabled.`,
`Extension "${extension.name}" linked successfully and enabled.`,
);
} catch (error) {
console.error(getErrorMessage(error));

View File

@@ -5,19 +5,23 @@
*/
import type { CommandModule } from 'yargs';
import { loadUserExtensions, toOutputString } from '../../config/extension.js';
import { getErrorMessage } from '../../utils/errors.js';
import { getExtensionManager } from './utils.js';
export async function handleList() {
try {
const extensions = loadUserExtensions();
const extensionManager = await getExtensionManager();
const extensions = extensionManager.getLoadedExtensions();
if (extensions.length === 0) {
console.log('No extensions installed.');
return;
}
console.log(
extensions
.map((extension, _): string => toOutputString(extension, process.cwd()))
.map((extension, _): string =>
extensionManager.toOutputString(extension, process.cwd()),
)
.join('\n\n'),
);
} catch (error) {

View File

@@ -5,8 +5,11 @@
*/
import type { CommandModule } from 'yargs';
import { uninstallExtension } from '../../config/extension.js';
import { getErrorMessage } from '../../utils/errors.js';
import { ExtensionManager } from '@qwen-code/qwen-code-core';
import { requestConsentNonInteractive } from './consent.js';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
import { loadSettings } from '../../config/settings.js';
interface UninstallArgs {
name: string; // can be extension name or source URL.
@@ -14,7 +17,16 @@ interface UninstallArgs {
export async function handleUninstall(args: UninstallArgs) {
try {
await uninstallExtension(args.name);
const workspaceDir = process.cwd();
const extensionManager = new ExtensionManager({
workspaceDir,
requestConsent: requestConsentNonInteractive,
isWorkspaceTrusted: !!isWorkspaceTrusted(
loadSettings(workspaceDir).merged,
),
});
await extensionManager.refreshCache();
await extensionManager.uninstallExtension(args.name, false);
console.log(`Extension "${args.name}" successfully uninstalled.`);
} catch (error) {
console.error(getErrorMessage(error));

View File

@@ -5,22 +5,13 @@
*/
import type { CommandModule } from 'yargs';
import {
loadExtensions,
annotateActiveExtensions,
ExtensionStorage,
requestConsentNonInteractive,
} from '../../config/extension.js';
import {
updateAllUpdatableExtensions,
type ExtensionUpdateInfo,
checkForAllExtensionUpdates,
updateExtension,
} from '../../config/extensions/update.js';
import { checkForExtensionUpdate } from '../../config/extensions/github.js';
import { getErrorMessage } from '../../utils/errors.js';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
import {
checkForExtensionUpdate,
type ExtensionUpdateInfo,
} from '@qwen-code/qwen-code-core';
import { getExtensionManager } from './utils.js';
interface UpdateArgs {
name?: string;
@@ -31,19 +22,9 @@ const updateOutput = (info: ExtensionUpdateInfo) =>
`Extension "${info.name}" successfully updated: ${info.originalVersion}${info.updatedVersion}.`;
export async function handleUpdate(args: UpdateArgs) {
const workingDir = process.cwd();
const extensionEnablementManager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
// Force enable named extensions, otherwise we will only update the enabled
// ones.
args.name ? [args.name] : [],
);
const allExtensions = loadExtensions(extensionEnablementManager);
const extensions = annotateActiveExtensions(
allExtensions,
workingDir,
extensionEnablementManager,
);
const extensionManager = await getExtensionManager();
const extensions = extensionManager.getLoadedExtensions();
if (args.name) {
try {
const extension = extensions.find(
@@ -53,25 +34,23 @@ export async function handleUpdate(args: UpdateArgs) {
console.log(`Extension "${args.name}" not found.`);
return;
}
let updateState: ExtensionUpdateState | undefined;
if (!extension.installMetadata) {
console.log(
`Unable to install extension "${args.name}" due to missing install metadata`,
);
return;
}
await checkForExtensionUpdate(extension, (newState) => {
updateState = newState;
});
const updateState = await checkForExtensionUpdate(
extension,
extensionManager,
);
if (updateState !== ExtensionUpdateState.UPDATE_AVAILABLE) {
console.log(`Extension "${args.name}" is already up to date.`);
return;
}
// TODO(chrstnb): we should list extensions if the requested extension is not installed.
const updatedExtensionInfo = (await updateExtension(
const updatedExtensionInfo = (await extensionManager.updateExtension(
extension,
workingDir,
requestConsentNonInteractive,
updateState,
() => {},
))!;
@@ -92,18 +71,15 @@ export async function handleUpdate(args: UpdateArgs) {
if (args.all) {
try {
const extensionState = new Map();
await checkForAllExtensionUpdates(extensions, (action) => {
if (action.type === 'SET_STATE') {
extensionState.set(action.payload.name, {
status: action.payload.state,
await extensionManager.checkForAllExtensionUpdates(
(extensionName, state) => {
extensionState.set(extensionName, {
status: state,
processed: true, // No need to process as we will force the update.
});
}
});
let updateInfos = await updateAllUpdatableExtensions(
workingDir,
requestConsentNonInteractive,
extensions,
},
);
let updateInfos = await extensionManager.updateAllUpdatableExtensions(
extensionState,
() => {},
);

View File

@@ -0,0 +1,21 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { ExtensionManager } from '@qwen-code/qwen-code-core';
import { loadSettings } from '../../config/settings.js';
import { requestConsentNonInteractive } from './consent.js';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
export async function getExtensionManager(): Promise<ExtensionManager> {
const workspaceDir = process.cwd();
const extensionManager = new ExtensionManager({
workspaceDir,
requestConsent: requestConsentNonInteractive,
isWorkspaceTrusted: !!isWorkspaceTrusted(loadSettings(workspaceDir).merged),
});
await extensionManager.refreshCache();
return extensionManager;
}

View File

@@ -7,7 +7,8 @@
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
import { listMcpServers } from './list.js';
import { loadSettings } from '../../config/settings.js';
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
import { loadExtensions } from '../../config/extension.js';
import { ExtensionStorage } from '../../config/extensions/storage.js';
import { createTransport } from '@qwen-code/qwen-code-core';
import { Client } from '@modelcontextprotocol/sdk/client/index.js';

View File

@@ -8,10 +8,13 @@
import type { CommandModule } from 'yargs';
import { loadSettings } from '../../config/settings.js';
import type { MCPServerConfig } from '@qwen-code/qwen-code-core';
import { MCPServerStatus, createTransport } from '@qwen-code/qwen-code-core';
import {
MCPServerStatus,
createTransport,
ExtensionManager,
} from '@qwen-code/qwen-code-core';
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
const COLOR_GREEN = '\u001b[32m';
const COLOR_YELLOW = '\u001b[33m';
@@ -22,22 +25,27 @@ async function getMcpServersFromConfig(): Promise<
Record<string, MCPServerConfig>
> {
const settings = loadSettings();
const extensions = loadExtensions(
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
);
const extensionManager = new ExtensionManager({
isWorkspaceTrusted: !!isWorkspaceTrusted(settings.merged),
telemetrySettings: settings.merged.telemetry,
});
await extensionManager.refreshCache();
const extensions = extensionManager.getLoadedExtensions();
const mcpServers = { ...(settings.merged.mcpServers || {}) };
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) {
return;
}
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
if (extension.isActive) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) {
return;
}
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
}
return mcpServers;
}

View File

@@ -16,7 +16,8 @@ import {
} from '@qwen-code/qwen-code-core';
import { loadCliConfig, parseArguments, type CliArgs } from './config.js';
import type { Settings } from './settings.js';
import { ExtensionStorage, type Extension } from './extension.js';
import type { Extension } from './extension.js';
import { ExtensionStorage } from './extensions/storage.js';
import * as ServerConfig from '@qwen-code/qwen-code-core';
import { isWorkspaceTrusted } from './trustedFolders.js';
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
@@ -1597,58 +1598,6 @@ describe('Approval mode tool exclusion logic', () => {
expect(excludedTools).toContain(WriteFileTool.Name);
});
it('should not exclude a tool explicitly allowed in tools.allowed', async () => {
process.argv = ['node', 'script.js', '-p', 'test'];
const argv = await parseArguments({} as Settings);
const settings: Settings = {
tools: {
allowed: [ShellTool.Name],
},
};
const extensions: Extension[] = [];
const config = await loadCliConfig(
settings,
extensions,
new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
argv.extensions,
),
argv,
);
const excludedTools = config.getExcludeTools();
expect(excludedTools).not.toContain(ShellTool.Name);
expect(excludedTools).toContain(EditTool.Name);
expect(excludedTools).toContain(WriteFileTool.Name);
});
it('should not exclude a tool explicitly allowed in tools.core', async () => {
process.argv = ['node', 'script.js', '-p', 'test'];
const argv = await parseArguments({} as Settings);
const settings: Settings = {
tools: {
core: [ShellTool.Name],
},
};
const extensions: Extension[] = [];
const config = await loadCliConfig(
settings,
extensions,
new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
argv.extensions,
),
argv,
);
const excludedTools = config.getExcludeTools();
expect(excludedTools).not.toContain(ShellTool.Name);
expect(excludedTools).toContain(EditTool.Name);
expect(excludedTools).toContain(WriteFileTool.Name);
});
it('should exclude only shell tools in non-interactive mode with auto-edit approval mode', async () => {
process.argv = [
'node',

View File

@@ -9,25 +9,21 @@ import {
AuthType,
Config,
DEFAULT_QWEN_EMBEDDING_MODEL,
DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
EditTool,
FileDiscoveryService,
getCurrentGeminiMdFilename,
loadServerHierarchicalMemory,
setGeminiMdFilename as setServerGeminiMdFilename,
ShellTool,
WriteFileTool,
resolveTelemetrySettings,
FatalConfigError,
Storage,
InputFormat,
OutputFormat,
isToolEnabled,
SessionService,
type ResumedSessionData,
type FileFilteringOptions,
type MCPServerConfig,
type ToolName,
EditTool,
ShellTool,
WriteFileTool,
} from '@qwen-code/qwen-code-core';
import { extensionsCommand } from '../commands/extensions.js';
import type { Settings } from './settings.js';
@@ -39,14 +35,11 @@ import { homedir } from 'node:os';
import { resolvePath } from '../utils/resolvePath.js';
import { getCliVersion } from '../utils/version.js';
import type { Extension } from './extension.js';
import { annotateActiveExtensions } from './extension.js';
import { loadSandboxConfig } from './sandboxConfig.js';
import { appEvents } from '../utils/events.js';
import { mcpCommand } from '../commands/mcp.js';
import { isWorkspaceTrusted } from './trustedFolders.js';
import type { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
import { buildWebSearchConfig } from './webSearch.js';
// Simple console logger for now - replace with actual logger if available
@@ -113,7 +106,6 @@ export interface CliArgs {
telemetryOutfile: string | undefined;
allowedMcpServerNames: string[] | undefined;
allowedTools: string[] | undefined;
acp: boolean | undefined;
experimentalAcp: boolean | undefined;
experimentalSkills: boolean | undefined;
extensions: string[] | undefined;
@@ -165,7 +157,7 @@ function normalizeOutputFormat(
return OutputFormat.TEXT;
}
export async function parseArguments(settings: Settings): Promise<CliArgs> {
export async function parseArguments(): Promise<CliArgs> {
const rawArgv = hideBin(process.argv);
const yargsInstance = yargs(rawArgv)
.locale('en')
@@ -307,15 +299,9 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
description: 'Enables checkpointing of file edits',
default: false,
})
.option('acp', {
type: 'boolean',
description: 'Starts the agent in ACP mode',
})
.option('experimental-acp', {
type: 'boolean',
description:
'Starts the agent in ACP mode (deprecated, use --acp instead)',
hidden: true,
description: 'Starts the agent in ACP mode',
})
.option('experimental-skills', {
type: 'boolean',
@@ -546,11 +532,9 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
}),
)
// Register MCP subcommands
.command(mcpCommand);
if (settings?.experimental?.extensionManagement ?? true) {
yargsInstance.command(extensionsCommand);
}
.command(mcpCommand)
// Register Extension subcommands
.command(extensionsCommand);
yargsInstance
.version(await getCliVersion()) // This will enable the --version flag based on package.json
@@ -598,19 +582,8 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
// The import format is now only controlled by settings.memoryImportFormat
// We no longer accept it as a CLI argument
// Handle deprecated --experimental-acp flag
if (result['experimentalAcp']) {
console.warn(
'\x1b[33m⚠ Warning: --experimental-acp is deprecated and will be removed in a future release. Please use --acp instead.\x1b[0m',
);
// Map experimental-acp to acp if acp is not explicitly set
if (!result['acp']) {
(result as Record<string, unknown>)['acp'] = true;
}
}
// Apply ACP fallback: if acp or experimental-acp is present but no explicit --channel, treat as ACP
if ((result['acp'] || result['experimentalAcp']) && !result['channel']) {
// Apply ACP fallback: if experimental-acp is present but no explicit --channel, treat as ACP
if (result['experimentalAcp'] && !result['channel']) {
(result as Record<string, unknown>)['channel'] = 'ACP';
}
@@ -625,11 +598,11 @@ export async function loadHierarchicalGeminiMemory(
includeDirectoriesToReadGemini: readonly string[] = [],
debugMode: boolean,
fileService: FileDiscoveryService,
settings: Settings,
extensionContextFilePaths: string[] = [],
folderTrust: boolean,
memoryImportFormat: 'flat' | 'tree' = 'tree',
fileFilteringOptions?: FileFilteringOptions,
maxDirs: number = 200,
): Promise<{ memoryContent: string; fileCount: number }> {
// FIX: Use real, canonical paths for a reliable comparison to handle symlinks.
const realCwd = fs.realpathSync(path.resolve(currentWorkingDirectory));
@@ -656,7 +629,7 @@ export async function loadHierarchicalGeminiMemory(
folderTrust,
memoryImportFormat,
fileFilteringOptions,
settings.context?.discoveryMaxDirs,
maxDirs,
);
}
@@ -671,30 +644,17 @@ export function isDebugMode(argv: CliArgs): boolean {
export async function loadCliConfig(
settings: Settings,
extensions: Extension[],
extensionEnablementManager: ExtensionEnablementManager,
argv: CliArgs,
cwd: string = process.cwd(),
overrideExtensions?: string[],
): Promise<Config> {
const debugMode = isDebugMode(argv);
const memoryImportFormat = settings.context?.importFormat || 'tree';
const ideMode = settings.ide?.enabled ?? false;
const folderTrust = settings.security?.folderTrust?.enabled ?? false;
const trustedFolder = isWorkspaceTrusted(settings)?.isTrusted ?? true;
const allExtensions = annotateActiveExtensions(
extensions,
cwd,
extensionEnablementManager,
);
const activeExtensions = extensions.filter(
(_, i) => allExtensions[i].isActive,
);
// Set the context filename in the server's memoryTool module BEFORE loading memory
// TODO(b/343434939): This is a bit of a hack. The contextFileName should ideally be passed
// directly to the Config constructor in core, and have core handle setGeminiMdFilename.
@@ -706,51 +666,27 @@ export async function loadCliConfig(
setServerGeminiMdFilename(getCurrentGeminiMdFilename());
}
const extensionContextFilePaths = activeExtensions.flatMap(
(e) => e.contextFiles,
);
// Automatically load output-language.md if it exists
const outputLanguageFilePath = path.join(
let outputLanguageFilePath: string | undefined = path.join(
Storage.getGlobalQwenDir(),
'output-language.md',
);
if (fs.existsSync(outputLanguageFilePath)) {
extensionContextFilePaths.push(outputLanguageFilePath);
if (debugMode) {
logger.debug(
`Found output-language.md, adding to context files: ${outputLanguageFilePath}`,
);
}
} else {
outputLanguageFilePath = undefined;
}
const fileService = new FileDiscoveryService(cwd);
const fileFiltering = {
...DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
...settings.context?.fileFiltering,
};
const includeDirectories = (settings.context?.includeDirectories || [])
.map(resolvePath)
.concat((argv.includeDirectories || []).map(resolvePath));
// Call the (now wrapper) loadHierarchicalGeminiMemory which calls the server's version
const { memoryContent, fileCount } = await loadHierarchicalGeminiMemory(
cwd,
settings.context?.loadMemoryFromIncludeDirectories
? includeDirectories
: [],
debugMode,
fileService,
settings,
extensionContextFilePaths,
trustedFolder,
memoryImportFormat,
fileFiltering,
);
let mcpServers = mergeMcpServers(settings, activeExtensions);
const question = argv.promptInteractive || argv.prompt || '';
const inputFormat: InputFormat =
(argv.inputFormat as InputFormat | undefined) ?? InputFormat.TEXT;
@@ -838,28 +774,6 @@ export async function loadCliConfig(
// However, if stream-json input is used, control can be requested via JSON messages,
// so tools should not be excluded in that case.
const extraExcludes: string[] = [];
const resolvedCoreTools = argv.coreTools || settings.tools?.core || [];
const resolvedAllowedTools =
argv.allowedTools || settings.tools?.allowed || [];
const isExplicitlyEnabled = (toolName: ToolName): boolean => {
if (resolvedCoreTools.length > 0) {
if (isToolEnabled(toolName, resolvedCoreTools, [])) {
return true;
}
}
if (resolvedAllowedTools.length > 0) {
if (isToolEnabled(toolName, resolvedAllowedTools, [])) {
return true;
}
}
return false;
};
const excludeUnlessExplicit = (toolName: ToolName): void => {
if (!isExplicitlyEnabled(toolName)) {
extraExcludes.push(toolName);
}
};
if (
!interactive &&
!argv.experimentalAcp &&
@@ -868,15 +782,12 @@ export async function loadCliConfig(
switch (approvalMode) {
case ApprovalMode.PLAN:
case ApprovalMode.DEFAULT:
// In default non-interactive mode, all tools that require approval are excluded,
// unless explicitly enabled via coreTools/allowedTools.
excludeUnlessExplicit(ShellTool.Name as ToolName);
excludeUnlessExplicit(EditTool.Name as ToolName);
excludeUnlessExplicit(WriteFileTool.Name as ToolName);
// In default non-interactive mode, all tools that require approval are excluded.
extraExcludes.push(ShellTool.Name, EditTool.Name, WriteFileTool.Name);
break;
case ApprovalMode.AUTO_EDIT:
// In auto-edit non-interactive mode, only tools that still require a prompt are excluded.
excludeUnlessExplicit(ShellTool.Name as ToolName);
extraExcludes.push(ShellTool.Name);
break;
case ApprovalMode.YOLO:
// No extra excludes for YOLO mode.
@@ -889,38 +800,18 @@ export async function loadCliConfig(
const excludeTools = mergeExcludeTools(
settings,
activeExtensions,
extraExcludes.length > 0 ? extraExcludes : undefined,
argv.excludeTools,
);
const blockedMcpServers: Array<{ name: string; extensionName: string }> = [];
if (!argv.allowedMcpServerNames) {
if (settings.mcp?.allowed) {
mcpServers = allowedMcpServers(
mcpServers,
settings.mcp.allowed,
blockedMcpServers,
);
}
if (settings.mcp?.excluded) {
const excludedNames = new Set(settings.mcp.excluded.filter(Boolean));
if (excludedNames.size > 0) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key]) => !excludedNames.has(key)),
);
}
}
}
if (argv.allowedMcpServerNames) {
mcpServers = allowedMcpServers(
mcpServers,
argv.allowedMcpServerNames,
blockedMcpServers,
);
}
const allowedMcpServers = argv.allowedMcpServerNames
? new Set(argv.allowedMcpServerNames.filter(Boolean))
: settings.mcp?.allowed
? new Set(settings.mcp.allowed.filter(Boolean))
: undefined;
const excludedMcpServers = settings.mcp?.excluded
? new Set(settings.mcp.excluded.filter(Boolean))
: undefined;
const selectedAuthType =
(argv.authType as AuthType | undefined) ||
@@ -988,6 +879,8 @@ export async function loadCliConfig(
includeDirectories,
loadMemoryFromIncludeDirectories:
settings.context?.loadMemoryFromIncludeDirectories || false,
importFormat: settings.context?.importFormat || 'tree',
discoveryMaxDirs: settings.context?.discoveryMaxDirs || 200,
debugMode,
question,
fullContext: argv.allFiles || false,
@@ -997,9 +890,13 @@ export async function loadCliConfig(
toolDiscoveryCommand: settings.tools?.discoveryCommand,
toolCallCommand: settings.tools?.callCommand,
mcpServerCommand: settings.mcp?.serverCommand,
mcpServers,
userMemory: memoryContent,
geminiMdFileCount: fileCount,
mcpServers: settings.mcpServers || {},
allowedMcpServers: allowedMcpServers
? Array.from(allowedMcpServers)
: undefined,
excludedMcpServers: excludedMcpServers
? Array.from(excludedMcpServers)
: undefined,
approvalMode,
showMemoryUsage:
argv.showMemoryUsage || settings.ui?.showMemoryUsage || false,
@@ -1022,15 +919,14 @@ export async function loadCliConfig(
fileDiscoveryService: fileService,
bugCommand: settings.advanced?.bugCommand,
model: resolvedModel,
extensionContextFilePaths,
outputLanguageFilePath,
sessionTokenLimit: settings.model?.sessionTokenLimit ?? -1,
maxSessionTurns:
argv.maxSessionTurns ?? settings.model?.maxSessionTurns ?? -1,
experimentalZedIntegration: argv.acp || argv.experimentalAcp || false,
experimentalZedIntegration: argv.experimentalAcp || false,
experimentalSkills: argv.experimentalSkills || false,
listExtensions: argv.listExtensions || false,
extensions: allExtensions,
blockedMcpServers,
overrideExtensions: overrideExtensions || argv.extensions,
noBrowser: !!process.env['NO_BROWSER'],
authType: selectedAuthType,
inputFormat,
@@ -1085,61 +981,8 @@ export async function loadCliConfig(
});
}
function allowedMcpServers(
mcpServers: { [x: string]: MCPServerConfig },
allowMCPServers: string[],
blockedMcpServers: Array<{ name: string; extensionName: string }>,
) {
const allowedNames = new Set(allowMCPServers.filter(Boolean));
if (allowedNames.size > 0) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key, server]) => {
const isAllowed = allowedNames.has(key);
if (!isAllowed) {
blockedMcpServers.push({
name: key,
extensionName: server.extensionName || '',
});
}
return isAllowed;
}),
);
} else {
blockedMcpServers.push(
...Object.entries(mcpServers).map(([key, server]) => ({
name: key,
extensionName: server.extensionName || '',
})),
);
mcpServers = {};
}
return mcpServers;
}
function mergeMcpServers(settings: Settings, extensions: Extension[]) {
const mcpServers = { ...(settings.mcpServers || {}) };
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) {
logger.warn(
`Skipping extension MCP config for server with key "${key}" as it already exists.`,
);
return;
}
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
return mcpServers;
}
function mergeExcludeTools(
settings: Settings,
extensions: Extension[],
extraExcludes?: string[] | undefined,
cliExcludeTools?: string[] | undefined,
): string[] {
@@ -1148,10 +991,5 @@ function mergeExcludeTools(
...(settings.tools?.exclude || []),
...(extraExcludes || []),
]);
for (const extension of extensions) {
for (const tool of extension.config.excludeTools || []) {
allExcludeTools.add(tool);
}
}
return [...allExcludeTools];
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,786 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import type {
MCPServerConfig,
GeminiCLIExtension,
ExtensionInstallMetadata,
} from '@qwen-code/qwen-code-core';
import {
QWEN_DIR,
Storage,
Config,
ExtensionInstallEvent,
ExtensionUninstallEvent,
ExtensionDisableEvent,
ExtensionEnableEvent,
logExtensionEnable,
logExtensionInstallEvent,
logExtensionUninstall,
logExtensionDisable,
} from '@qwen-code/qwen-code-core';
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { SettingScope, loadSettings } from '../config/settings.js';
import { getErrorMessage } from '../utils/errors.js';
import { recursivelyHydrateStrings } from './extensions/variables.js';
import { isWorkspaceTrusted } from './trustedFolders.js';
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
import {
cloneFromGit,
downloadFromGitHubRelease,
} from './extensions/github.js';
import type { LoadExtensionContext } from './extensions/variableSchema.js';
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
import chalk from 'chalk';
import type { ConfirmationRequest } from '../ui/types.js';
export const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
export const EXTENSIONS_CONFIG_FILENAME = 'qwen-extension.json';
export const INSTALL_METADATA_FILENAME = '.qwen-extension-install.json';
export interface Extension {
path: string;
config: ExtensionConfig;
contextFiles: string[];
installMetadata?: ExtensionInstallMetadata | undefined;
}
export interface ExtensionConfig {
name: string;
version: string;
mcpServers?: Record<string, MCPServerConfig>;
contextFileName?: string | string[];
excludeTools?: string[];
}
export interface ExtensionUpdateInfo {
name: string;
originalVersion: string;
updatedVersion: string;
}
export class ExtensionStorage {
private readonly extensionName: string;
constructor(extensionName: string) {
this.extensionName = extensionName;
}
getExtensionDir(): string {
return path.join(
ExtensionStorage.getUserExtensionsDir(),
this.extensionName,
);
}
getConfigPath(): string {
return path.join(this.getExtensionDir(), EXTENSIONS_CONFIG_FILENAME);
}
static getUserExtensionsDir(): string {
const storage = new Storage(os.homedir());
return storage.getExtensionsDir();
}
static async createTmpDir(): Promise<string> {
return await fs.promises.mkdtemp(path.join(os.tmpdir(), 'qwen-extension'));
}
}
export function getWorkspaceExtensions(workspaceDir: string): Extension[] {
// If the workspace dir is the user extensions dir, there are no workspace extensions.
if (path.resolve(workspaceDir) === path.resolve(os.homedir())) {
return [];
}
return loadExtensionsFromDir(workspaceDir);
}
export async function copyExtension(
source: string,
destination: string,
): Promise<void> {
await fs.promises.cp(source, destination, { recursive: true });
}
export async function performWorkspaceExtensionMigration(
extensions: Extension[],
requestConsent: (consent: string) => Promise<boolean>,
): Promise<string[]> {
const failedInstallNames: string[] = [];
for (const extension of extensions) {
try {
const installMetadata: ExtensionInstallMetadata = {
source: extension.path,
type: 'local',
};
await installExtension(installMetadata, requestConsent);
} catch (_) {
failedInstallNames.push(extension.config.name);
}
}
return failedInstallNames;
}
function getTelemetryConfig(cwd: string) {
const settings = loadSettings(cwd);
const config = new Config({
telemetry: settings.merged.telemetry,
interactive: false,
targetDir: cwd,
cwd,
model: '',
debugMode: false,
});
return config;
}
export function loadExtensions(
extensionEnablementManager: ExtensionEnablementManager,
workspaceDir: string = process.cwd(),
): Extension[] {
const settings = loadSettings(workspaceDir).merged;
const allExtensions = [...loadUserExtensions()];
if (
(isWorkspaceTrusted(settings) ?? true) &&
// Default management setting to true
!(settings.experimental?.extensionManagement ?? true)
) {
allExtensions.push(...getWorkspaceExtensions(workspaceDir));
}
const uniqueExtensions = new Map<string, Extension>();
for (const extension of allExtensions) {
if (
!uniqueExtensions.has(extension.config.name) &&
extensionEnablementManager.isEnabled(extension.config.name, workspaceDir)
) {
uniqueExtensions.set(extension.config.name, extension);
}
}
return Array.from(uniqueExtensions.values());
}
export function loadUserExtensions(): Extension[] {
const userExtensions = loadExtensionsFromDir(os.homedir());
const uniqueExtensions = new Map<string, Extension>();
for (const extension of userExtensions) {
if (!uniqueExtensions.has(extension.config.name)) {
uniqueExtensions.set(extension.config.name, extension);
}
}
return Array.from(uniqueExtensions.values());
}
export function loadExtensionsFromDir(dir: string): Extension[] {
const storage = new Storage(dir);
const extensionsDir = storage.getExtensionsDir();
if (!fs.existsSync(extensionsDir)) {
return [];
}
const extensions: Extension[] = [];
for (const subdir of fs.readdirSync(extensionsDir)) {
const extensionDir = path.join(extensionsDir, subdir);
const extension = loadExtension({ extensionDir, workspaceDir: dir });
if (extension != null) {
extensions.push(extension);
}
}
return extensions;
}
export function loadExtension(context: LoadExtensionContext): Extension | null {
const { extensionDir, workspaceDir } = context;
if (!fs.statSync(extensionDir).isDirectory()) {
return null;
}
const installMetadata = loadInstallMetadata(extensionDir);
let effectiveExtensionPath = extensionDir;
if (installMetadata?.type === 'link') {
effectiveExtensionPath = installMetadata.source;
}
try {
let config = loadExtensionConfig({
extensionDir: effectiveExtensionPath,
workspaceDir,
});
config = resolveEnvVarsInObject(config);
if (config.mcpServers) {
config.mcpServers = Object.fromEntries(
Object.entries(config.mcpServers).map(([key, value]) => [
key,
filterMcpConfig(value),
]),
);
}
const contextFiles = getContextFileNames(config)
.map((contextFileName) =>
path.join(effectiveExtensionPath, contextFileName),
)
.filter((contextFilePath) => fs.existsSync(contextFilePath));
return {
path: effectiveExtensionPath,
config,
contextFiles,
installMetadata,
};
} catch (e) {
console.error(
`Warning: Skipping extension in ${effectiveExtensionPath}: ${getErrorMessage(
e,
)}`,
);
return null;
}
}
export function loadExtensionByName(
name: string,
workspaceDir: string = process.cwd(),
): Extension | null {
const userExtensionsDir = ExtensionStorage.getUserExtensionsDir();
if (!fs.existsSync(userExtensionsDir)) {
return null;
}
for (const subdir of fs.readdirSync(userExtensionsDir)) {
const extensionDir = path.join(userExtensionsDir, subdir);
if (!fs.statSync(extensionDir).isDirectory()) {
continue;
}
const extension = loadExtension({ extensionDir, workspaceDir });
if (
extension &&
extension.config.name.toLowerCase() === name.toLowerCase()
) {
return extension;
}
}
return null;
}
function filterMcpConfig(original: MCPServerConfig): MCPServerConfig {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { trust, ...rest } = original;
return Object.freeze(rest);
}
export function loadInstallMetadata(
extensionDir: string,
): ExtensionInstallMetadata | undefined {
const metadataFilePath = path.join(extensionDir, INSTALL_METADATA_FILENAME);
try {
const configContent = fs.readFileSync(metadataFilePath, 'utf-8');
const metadata = JSON.parse(configContent) as ExtensionInstallMetadata;
return metadata;
} catch (_e) {
return undefined;
}
}
function getContextFileNames(config: ExtensionConfig): string[] {
if (!config.contextFileName) {
return ['QWEN.md'];
} else if (!Array.isArray(config.contextFileName)) {
return [config.contextFileName];
}
return config.contextFileName;
}
/**
* Returns an annotated list of extensions. If an extension is listed in enabledExtensionNames, it will be active.
* If enabledExtensionNames is empty, an extension is active unless it is disabled.
* @param extensions The base list of extensions.
* @param enabledExtensionNames The names of explicitly enabled extensions.
* @param workspaceDir The current workspace directory.
*/
export function annotateActiveExtensions(
extensions: Extension[],
workspaceDir: string,
manager: ExtensionEnablementManager,
): GeminiCLIExtension[] {
manager.validateExtensionOverrides(extensions);
return extensions.map((extension) => ({
name: extension.config.name,
version: extension.config.version,
isActive: manager.isEnabled(extension.config.name, workspaceDir),
path: extension.path,
installMetadata: extension.installMetadata,
}));
}
/**
* Requests consent from the user to perform an action, by reading a Y/n
* character from stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param consentDescription The description of the thing they will be consenting to.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentNonInteractive(
consentDescription: string,
): Promise<boolean> {
console.info(consentDescription);
const result = await promptForConsentNonInteractive(
'Do you want to continue? [Y/n]: ',
);
return result;
}
/**
* Requests consent from the user to perform an action, in interactive mode.
*
* This should not be called from non-interactive mode as it will not work.
*
* @param consentDescription The description of the thing they will be consenting to.
* @param setExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
* @returns boolean, whether they consented or not.
*/
export async function requestConsentInteractive(
consentDescription: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return await promptForConsentInteractive(
consentDescription + '\n\nDo you want to continue?',
addExtensionUpdateConfirmationRequest,
);
}
/**
* Asks users a prompt and awaits for a y/n response on stdin.
*
* This should not be called from interactive mode as it will break the CLI.
*
* @param prompt A yes/no prompt to ask the user
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
*/
async function promptForConsentNonInteractive(
prompt: string,
): Promise<boolean> {
const readline = await import('node:readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise((resolve) => {
rl.question(prompt, (answer) => {
rl.close();
resolve(['y', ''].includes(answer.trim().toLowerCase()));
});
});
}
/**
* Asks users an interactive yes/no prompt.
*
* This should not be called from non-interactive mode as it will break the CLI.
*
* @param prompt A markdown prompt to ask the user
* @param setExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
* @returns Whether or not the user answers yes.
*/
async function promptForConsentInteractive(
prompt: string,
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
): Promise<boolean> {
return await new Promise<boolean>((resolve) => {
addExtensionUpdateConfirmationRequest({
prompt,
onConfirm: (resolvedConfirmed) => {
resolve(resolvedConfirmed);
},
});
});
}
export async function installExtension(
installMetadata: ExtensionInstallMetadata,
requestConsent: (consent: string) => Promise<boolean>,
cwd: string = process.cwd(),
previousExtensionConfig?: ExtensionConfig,
): Promise<string> {
const telemetryConfig = getTelemetryConfig(cwd);
let newExtensionConfig: ExtensionConfig | null = null;
let localSourcePath: string | undefined;
try {
const settings = loadSettings(cwd).merged;
if (!isWorkspaceTrusted(settings)) {
throw new Error(
`Could not install extension from untrusted folder at ${installMetadata.source}`,
);
}
const extensionsDir = ExtensionStorage.getUserExtensionsDir();
await fs.promises.mkdir(extensionsDir, { recursive: true });
if (
!path.isAbsolute(installMetadata.source) &&
(installMetadata.type === 'local' || installMetadata.type === 'link')
) {
installMetadata.source = path.resolve(cwd, installMetadata.source);
}
let tempDir: string | undefined;
if (
installMetadata.type === 'git' ||
installMetadata.type === 'github-release'
) {
tempDir = await ExtensionStorage.createTmpDir();
try {
const result = await downloadFromGitHubRelease(
installMetadata,
tempDir,
);
installMetadata.type = result.type;
installMetadata.releaseTag = result.tagName;
} catch (_error) {
await cloneFromGit(installMetadata, tempDir);
installMetadata.type = 'git';
}
localSourcePath = tempDir;
} else if (
installMetadata.type === 'local' ||
installMetadata.type === 'link'
) {
localSourcePath = installMetadata.source;
} else {
throw new Error(`Unsupported install type: ${installMetadata.type}`);
}
try {
newExtensionConfig = loadExtensionConfig({
extensionDir: localSourcePath,
workspaceDir: cwd,
});
const newExtensionName = newExtensionConfig.name;
const extensionStorage = new ExtensionStorage(newExtensionName);
const destinationPath = extensionStorage.getExtensionDir();
const installedExtensions = loadUserExtensions();
if (
installedExtensions.some(
(installed) => installed.config.name === newExtensionName,
)
) {
throw new Error(
`Extension "${newExtensionName}" is already installed. Please uninstall it first.`,
);
}
await maybeRequestConsentOrFail(
newExtensionConfig,
requestConsent,
previousExtensionConfig,
);
await fs.promises.mkdir(destinationPath, { recursive: true });
if (
installMetadata.type === 'local' ||
installMetadata.type === 'git' ||
installMetadata.type === 'github-release'
) {
await copyExtension(localSourcePath, destinationPath);
}
const metadataString = JSON.stringify(installMetadata, null, 2);
const metadataPath = path.join(
destinationPath,
INSTALL_METADATA_FILENAME,
);
await fs.promises.writeFile(metadataPath, metadataString);
} finally {
if (tempDir) {
await fs.promises.rm(tempDir, { recursive: true, force: true });
}
}
logExtensionInstallEvent(
telemetryConfig,
new ExtensionInstallEvent(
newExtensionConfig!.name,
newExtensionConfig!.version,
installMetadata.source,
'success',
),
);
enableExtension(newExtensionConfig!.name, SettingScope.User);
return newExtensionConfig!.name;
} catch (error) {
// Attempt to load config from the source path even if installation fails
// to get the name and version for logging.
if (!newExtensionConfig && localSourcePath) {
try {
newExtensionConfig = loadExtensionConfig({
extensionDir: localSourcePath,
workspaceDir: cwd,
});
} catch {
// Ignore error, this is just for logging.
}
}
logExtensionInstallEvent(
telemetryConfig,
new ExtensionInstallEvent(
newExtensionConfig?.name ?? '',
newExtensionConfig?.version ?? '',
installMetadata.source,
'error',
),
);
throw error;
}
}
/**
* Builds a consent string for installing an extension based on it's
* extensionConfig.
*/
function extensionConsentString(extensionConfig: ExtensionConfig): string {
const output: string[] = [];
const mcpServerEntries = Object.entries(extensionConfig.mcpServers || {});
output.push(`Installing extension "${extensionConfig.name}".`);
output.push(
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**',
);
if (mcpServerEntries.length) {
output.push('This extension will run the following MCP servers:');
for (const [key, mcpServer] of mcpServerEntries) {
const isLocal = !!mcpServer.command;
const source =
mcpServer.httpUrl ??
`${mcpServer.command || ''}${mcpServer.args ? ' ' + mcpServer.args.join(' ') : ''}`;
output.push(` * ${key} (${isLocal ? 'local' : 'remote'}): ${source}`);
}
}
if (extensionConfig.contextFileName) {
output.push(
`This extension will append info to your QWEN.md context using ${extensionConfig.contextFileName}`,
);
}
if (extensionConfig.excludeTools) {
output.push(
`This extension will exclude the following core tools: ${extensionConfig.excludeTools}`,
);
}
return output.join('\n');
}
/**
* Requests consent from the user to install an extension (extensionConfig), if
* there is any difference between the consent string for `extensionConfig` and
* `previousExtensionConfig`.
*
* Always requests consent if previousExtensionConfig is null.
*
* Throws if the user does not consent.
*/
async function maybeRequestConsentOrFail(
extensionConfig: ExtensionConfig,
requestConsent: (consent: string) => Promise<boolean>,
previousExtensionConfig?: ExtensionConfig,
) {
const extensionConsent = extensionConsentString(extensionConfig);
if (previousExtensionConfig) {
const previousExtensionConsent = extensionConsentString(
previousExtensionConfig,
);
if (previousExtensionConsent === extensionConsent) {
return;
}
}
if (!(await requestConsent(extensionConsent))) {
throw new Error(`Installation cancelled for "${extensionConfig.name}".`);
}
}
export function validateName(name: string) {
if (!/^[a-zA-Z0-9-]+$/.test(name)) {
throw new Error(
`Invalid extension name: "${name}". Only letters (a-z, A-Z), numbers (0-9), and dashes (-) are allowed.`,
);
}
}
export function loadExtensionConfig(
context: LoadExtensionContext,
): ExtensionConfig {
const { extensionDir, workspaceDir } = context;
const configFilePath = path.join(extensionDir, EXTENSIONS_CONFIG_FILENAME);
if (!fs.existsSync(configFilePath)) {
throw new Error(`Configuration file not found at ${configFilePath}`);
}
try {
const configContent = fs.readFileSync(configFilePath, 'utf-8');
const config = recursivelyHydrateStrings(JSON.parse(configContent), {
extensionPath: extensionDir,
workspacePath: workspaceDir,
'/': path.sep,
pathSeparator: path.sep,
}) as unknown as ExtensionConfig;
if (!config.name || !config.version) {
throw new Error(
`Invalid configuration in ${configFilePath}: missing ${!config.name ? '"name"' : '"version"'}`,
);
}
validateName(config.name);
return config;
} catch (e) {
throw new Error(
`Failed to load extension config from ${configFilePath}: ${getErrorMessage(
e,
)}`,
);
}
}
export async function uninstallExtension(
extensionIdentifier: string,
cwd: string = process.cwd(),
): Promise<void> {
const telemetryConfig = getTelemetryConfig(cwd);
const installedExtensions = loadUserExtensions();
const extensionName = installedExtensions.find(
(installed) =>
installed.config.name.toLowerCase() ===
extensionIdentifier.toLowerCase() ||
installed.installMetadata?.source.toLowerCase() ===
extensionIdentifier.toLowerCase(),
)?.config.name;
if (!extensionName) {
throw new Error(`Extension not found.`);
}
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
[extensionName],
);
manager.remove(extensionName);
const storage = new ExtensionStorage(extensionName);
await fs.promises.rm(storage.getExtensionDir(), {
recursive: true,
force: true,
});
logExtensionUninstall(
telemetryConfig,
new ExtensionUninstallEvent(extensionName, 'success'),
);
}
export function toOutputString(
extension: Extension,
workspaceDir: string,
): string {
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
);
const userEnabled = manager.isEnabled(extension.config.name, os.homedir());
const workspaceEnabled = manager.isEnabled(
extension.config.name,
workspaceDir,
);
const status = workspaceEnabled ? chalk.green('✓') : chalk.red('✗');
let output = `${status} ${extension.config.name} (${extension.config.version})`;
output += `\n Path: ${extension.path}`;
if (extension.installMetadata) {
output += `\n Source: ${extension.installMetadata.source} (Type: ${extension.installMetadata.type})`;
if (extension.installMetadata.ref) {
output += `\n Ref: ${extension.installMetadata.ref}`;
}
if (extension.installMetadata.releaseTag) {
output += `\n Release tag: ${extension.installMetadata.releaseTag}`;
}
}
output += `\n Enabled (User): ${userEnabled}`;
output += `\n Enabled (Workspace): ${workspaceEnabled}`;
if (extension.contextFiles.length > 0) {
output += `\n Context files:`;
extension.contextFiles.forEach((contextFile) => {
output += `\n ${contextFile}`;
});
}
if (extension.config.mcpServers) {
output += `\n MCP servers:`;
Object.keys(extension.config.mcpServers).forEach((key) => {
output += `\n ${key}`;
});
}
if (extension.config.excludeTools) {
output += `\n Excluded tools:`;
extension.config.excludeTools.forEach((tool) => {
output += `\n ${tool}`;
});
}
return output;
}
export function disableExtension(
name: string,
scope: SettingScope,
cwd: string = process.cwd(),
) {
const config = getTelemetryConfig(cwd);
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
throw new Error('System and SystemDefaults scopes are not supported.');
}
const extension = loadExtensionByName(name, cwd);
if (!extension) {
throw new Error(`Extension with name ${name} does not exist.`);
}
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
[name],
);
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
manager.disable(name, true, scopePath);
logExtensionDisable(config, new ExtensionDisableEvent(name, scope));
}
export function enableExtension(
name: string,
scope: SettingScope,
cwd: string = process.cwd(),
) {
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
throw new Error('System and SystemDefaults scopes are not supported.');
}
const extension = loadExtensionByName(name, cwd);
if (!extension) {
throw new Error(`Extension with name ${name} does not exist.`);
}
const manager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
);
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
manager.enable(name, true, scopePath);
const config = getTelemetryConfig(cwd);
logExtensionEnable(config, new ExtensionEnableEvent(name, scope));
}

View File

@@ -1,424 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as path from 'node:path';
import fs from 'node:fs';
import os from 'node:os';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { ExtensionEnablementManager, Override } from './extensionEnablement.js';
import type { Extension } from '../extension.js';
// Helper to create a temporary directory for testing
function createTestDir() {
const dirPath = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-test-'));
return {
path: dirPath,
cleanup: () => fs.rmSync(dirPath, { recursive: true, force: true }),
};
}
let testDir: { path: string; cleanup: () => void };
let configDir: string;
let manager: ExtensionEnablementManager;
describe('ExtensionEnablementManager', () => {
beforeEach(() => {
testDir = createTestDir();
configDir = path.join(testDir.path, '.gemini');
manager = new ExtensionEnablementManager(configDir);
});
afterEach(() => {
testDir.cleanup();
// Reset the singleton instance for test isolation
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(ExtensionEnablementManager as any).instance = undefined;
});
describe('isEnabled', () => {
it('should return true if extension is not configured', () => {
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
});
it('should return true if no overrides match', () => {
manager.disable('ext-test', false, '/another/path');
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
});
it('should enable a path based on an override rule', () => {
manager.disable('ext-test', true, '/');
manager.enable('ext-test', true, '/home/user/projects/');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
true,
);
});
it('should disable a path based on a disable override rule', () => {
manager.enable('ext-test', true, '/');
manager.disable('ext-test', true, '/home/user/projects/');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
});
it('should respect the last matching rule (enable wins)', () => {
manager.disable('ext-test', true, '/home/user/projects/');
manager.enable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
true,
);
});
it('should respect the last matching rule (disable wins)', () => {
manager.enable('ext-test', true, '/home/user/projects/');
manager.disable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
});
it('should handle', () => {
manager.enable('ext-test', true, '/home/user/projects');
manager.disable('ext-test', false, '/home/user/projects/my-app');
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
false,
);
expect(
manager.isEnabled('ext-test', '/home/user/projects/something-else'),
).toBe(true);
});
});
describe('includeSubdirs', () => {
it('should add a glob when enabling with includeSubdirs', () => {
manager.enable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
});
it('should not add a glob when enabling without includeSubdirs', () => {
manager.enable('ext-test', false, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should add a glob when disabling with includeSubdirs', () => {
manager.disable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('!/path/to/dir/*');
});
it('should remove conflicting glob rule when enabling without subdirs', () => {
manager.enable('ext-test', true, '/path/to/dir'); // Adds /path/to/dir*
manager.enable('ext-test', false, '/path/to/dir'); // Should remove the glob
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should remove conflicting non-glob rule when enabling with subdirs', () => {
manager.enable('ext-test', false, '/path/to/dir'); // Adds /path/to/dir
manager.enable('ext-test', true, '/path/to/dir'); // Should remove the non-glob
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/');
});
it('should remove conflicting rules when disabling', () => {
manager.enable('ext-test', true, '/path/to/dir'); // enabled with glob
manager.disable('ext-test', false, '/path/to/dir'); // disabled without
const config = manager.readConfig();
expect(config['ext-test'].overrides).toContain('!/path/to/dir/');
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
});
it('should correctly evaluate isEnabled with subdirs', () => {
manager.disable('ext-test', true, '/');
manager.enable('ext-test', true, '/path/to/dir');
expect(manager.isEnabled('ext-test', '/path/to/dir/')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/dir/sub/')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/another/')).toBe(false);
});
it('should correctly evaluate isEnabled without subdirs', () => {
manager.disable('ext-test', true, '/*');
manager.enable('ext-test', false, '/path/to/dir');
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(true);
expect(manager.isEnabled('ext-test', '/path/to/dir/sub')).toBe(false);
});
});
describe('pruning child rules', () => {
it('should remove child rules when enabling a parent with subdirs', () => {
// Pre-existing rules for children
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.disable('ext-test', true, '/path/to/dir/subdir2');
manager.enable('ext-test', false, '/path/to/another/dir');
// Enable the parent directory
manager.enable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
// The new parent rule should be present
expect(overrides).toContain(`/path/to/dir/*`);
// Child rules should be removed
expect(overrides).not.toContain('/path/to/dir/subdir1/');
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
// Unrelated rules should remain
expect(overrides).toContain('/path/to/another/dir/');
});
it('should remove child rules when disabling a parent with subdirs', () => {
// Pre-existing rules for children
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.disable('ext-test', true, '/path/to/dir/subdir2');
manager.enable('ext-test', false, '/path/to/another/dir');
// Disable the parent directory
manager.disable('ext-test', true, '/path/to/dir');
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
// The new parent rule should be present
expect(overrides).toContain(`!/path/to/dir/*`);
// Child rules should be removed
expect(overrides).not.toContain('/path/to/dir/subdir1/');
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
// Unrelated rules should remain
expect(overrides).toContain('/path/to/another/dir/');
});
it('should not remove child rules if includeSubdirs is false', () => {
manager.enable('ext-test', false, '/path/to/dir/subdir1');
manager.enable('ext-test', false, '/path/to/dir'); // Not including subdirs
const config = manager.readConfig();
const overrides = config['ext-test'].overrides;
expect(overrides).toContain('/path/to/dir/subdir1/');
expect(overrides).toContain('/path/to/dir/');
});
});
it('should enable a path based on an enable override', () => {
manager.disable('ext-test', true, '/Users/chrstn');
manager.enable('ext-test', true, '/Users/chrstn/gemini-cli');
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
true,
);
});
it('should ignore subdirs', () => {
manager.disable('ext-test', false, '/Users/chrstn');
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
true,
);
});
describe('extension overrides (-e <name>)', () => {
beforeEach(() => {
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
});
it('can enable extensions, case-insensitive', () => {
manager.disable('ext-test', true, '/');
expect(manager.isEnabled('ext-test', '/')).toBe(true);
expect(manager.isEnabled('Ext-Test', '/')).toBe(true);
// Double check that it would have been disabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
).toBe(false);
});
it('disable all other extensions', () => {
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
manager.enable('ext-test-2', true, '/');
expect(manager.isEnabled('ext-test-2', '/')).toBe(false);
// Double check that it would have been enabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test-2', '/'),
).toBe(true);
});
it('none disables all extensions', () => {
manager = new ExtensionEnablementManager(configDir, ['none']);
manager.enable('ext-test', true, '/');
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(false);
// Double check that it would have been enabled otherwise
expect(
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
).toBe(true);
});
});
describe('validateExtensionOverrides', () => {
let consoleErrorSpy: ReturnType<typeof vi.spyOn>;
beforeEach(() => {
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
});
afterEach(() => {
consoleErrorSpy.mockRestore();
});
it('should not log an error if enabledExtensionNamesOverride is empty', () => {
const manager = new ExtensionEnablementManager(configDir, []);
manager.validateExtensionOverrides([]);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
it('should not log an error if all enabledExtensionNamesOverride are valid', () => {
const manager = new ExtensionEnablementManager(configDir, [
'ext-one',
'ext-two',
]);
const extensions = [
{ config: { name: 'ext-one' } },
{ config: { name: 'ext-two' } },
] as Extension[];
manager.validateExtensionOverrides(extensions);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
it('should log an error for each invalid extension name in enabledExtensionNamesOverride', () => {
const manager = new ExtensionEnablementManager(configDir, [
'ext-one',
'ext-invalid',
'ext-another-invalid',
]);
const extensions = [
{ config: { name: 'ext-one' } },
{ config: { name: 'ext-two' } },
] as Extension[];
manager.validateExtensionOverrides(extensions);
expect(consoleErrorSpy).toHaveBeenCalledTimes(2);
expect(consoleErrorSpy).toHaveBeenCalledWith(
'Extension not found: ext-invalid',
);
expect(consoleErrorSpy).toHaveBeenCalledWith(
'Extension not found: ext-another-invalid',
);
});
it('should not log an error if "none" is in enabledExtensionNamesOverride', () => {
const manager = new ExtensionEnablementManager(configDir, ['none']);
manager.validateExtensionOverrides([]);
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
});
});
describe('Override', () => {
it('should create an override from input', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should create a disable override from input', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.baseRule).toBe(`/path/to/dir/`);
expect(override.isDisable).toBe(true);
expect(override.includeSubdirs).toBe(false);
});
it('should create an override from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir');
expect(override.baseRule).toBe('/path/to/dir');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(false);
});
it('should create a disable override from a file rule', () => {
const override = Override.fromFileRule('!/path/to/dir/');
expect(override.isDisable).toBe(true);
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.includeSubdirs).toBe(false);
});
it('should create an override with subdirs from a file rule', () => {
const override = Override.fromFileRule('/path/to/dir/*');
expect(override.baseRule).toBe('/path/to/dir/');
expect(override.isDisable).toBe(false);
expect(override.includeSubdirs).toBe(true);
});
it('should correctly identify conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', false);
expect(override1.conflictsWith(override2)).toBe(true);
});
it('should correctly identify non-conflicting overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/another/dir', true);
expect(override1.conflictsWith(override2)).toBe(false);
});
it('should correctly identify equal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(true);
});
it('should correctly identify unequal overrides', () => {
const override1 = Override.fromInput('/path/to/dir', true);
const override2 = Override.fromInput('!/path/to/dir', true);
expect(override1.isEqualTo(override2)).toBe(false);
});
it('should generate the correct regex', () => {
const override = Override.fromInput('/path/to/dir', true);
const regex = override.asRegex();
expect(regex.test('/path/to/dir/')).toBe(true);
expect(regex.test('/path/to/dir/subdir')).toBe(true);
expect(regex.test('/path/to/another/dir')).toBe(false);
});
it('should correctly identify child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify child overrides with glob', () => {
const parent = Override.fromInput('/path/to/dir/*', true);
const child = Override.fromInput('/path/to/dir/subdir', false);
expect(child.isChildOf(parent)).toBe(true);
});
it('should correctly identify non-child overrides', () => {
const parent = Override.fromInput('/path/to/dir', true);
const other = Override.fromInput('/path/to/another/dir', false);
expect(other.isChildOf(parent)).toBe(false);
});
it('should generate the correct output string', () => {
const override = Override.fromInput('/path/to/dir', true);
expect(override.output()).toBe(`/path/to/dir/*`);
});
it('should generate the correct output string for a disable override', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
it('should disable a path based on a disable override rule', () => {
const override = Override.fromInput('!/path/to/dir', false);
expect(override.output()).toBe(`!/path/to/dir/`);
});
});

View File

@@ -1,239 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import fs from 'node:fs';
import path from 'node:path';
import { type Extension } from '../extension.js';
export interface ExtensionEnablementConfig {
overrides: string[];
}
export interface AllExtensionsEnablementConfig {
[extensionName: string]: ExtensionEnablementConfig;
}
export class Override {
constructor(
public baseRule: string,
public isDisable: boolean,
public includeSubdirs: boolean,
) {}
static fromInput(inputRule: string, includeSubdirs: boolean): Override {
const isDisable = inputRule.startsWith('!');
let baseRule = isDisable ? inputRule.substring(1) : inputRule;
baseRule = ensureLeadingAndTrailingSlash(baseRule);
return new Override(baseRule, isDisable, includeSubdirs);
}
static fromFileRule(fileRule: string): Override {
const isDisable = fileRule.startsWith('!');
let baseRule = isDisable ? fileRule.substring(1) : fileRule;
const includeSubdirs = baseRule.endsWith('*');
baseRule = includeSubdirs
? baseRule.substring(0, baseRule.length - 1)
: baseRule;
return new Override(baseRule, isDisable, includeSubdirs);
}
conflictsWith(other: Override): boolean {
if (this.baseRule === other.baseRule) {
return (
this.includeSubdirs !== other.includeSubdirs ||
this.isDisable !== other.isDisable
);
}
return false;
}
isEqualTo(other: Override): boolean {
return (
this.baseRule === other.baseRule &&
this.includeSubdirs === other.includeSubdirs &&
this.isDisable === other.isDisable
);
}
asRegex(): RegExp {
return globToRegex(`${this.baseRule}${this.includeSubdirs ? '*' : ''}`);
}
isChildOf(parent: Override) {
if (!parent.includeSubdirs) {
return false;
}
return parent.asRegex().test(this.baseRule);
}
output(): string {
return `${this.isDisable ? '!' : ''}${this.baseRule}${this.includeSubdirs ? '*' : ''}`;
}
matchesPath(path: string) {
return this.asRegex().test(path);
}
}
const ensureLeadingAndTrailingSlash = function (dirPath: string): string {
// Normalize separators to forward slashes for consistent matching across platforms.
let result = dirPath.replace(/\\/g, '/');
if (result.charAt(0) !== '/') {
result = '/' + result;
}
if (result.charAt(result.length - 1) !== '/') {
result = result + '/';
}
return result;
};
/**
* Converts a glob pattern to a RegExp object.
* This is a simplified implementation that supports `*`.
*
* @param glob The glob pattern to convert.
* @returns A RegExp object.
*/
function globToRegex(glob: string): RegExp {
const regexString = glob
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex characters
.replace(/(\/?)\*/g, '($1.*)?'); // Convert * to optional group
return new RegExp(`^${regexString}$`);
}
export class ExtensionEnablementManager {
private configFilePath: string;
private configDir: string;
// If non-empty, this overrides all other extension configuration and enables
// only the ones in this list.
private enabledExtensionNamesOverride: string[];
constructor(configDir: string, enabledExtensionNames?: string[]) {
this.configDir = configDir;
this.configFilePath = path.join(configDir, 'extension-enablement.json');
this.enabledExtensionNamesOverride =
enabledExtensionNames?.map((name) => name.toLowerCase()) ?? [];
}
validateExtensionOverrides(extensions: Extension[]) {
for (const name of this.enabledExtensionNamesOverride) {
if (name === 'none') continue;
if (
!extensions.some(
(ext) => ext.config.name.toLowerCase() === name.toLowerCase(),
)
) {
console.error(`Extension not found: ${name}`);
}
}
}
/**
* Determines if an extension is enabled based on its name and the current
* path. The last matching rule in the overrides list wins.
*
* @param extensionName The name of the extension.
* @param currentPath The absolute path of the current working directory.
* @returns True if the extension is enabled, false otherwise.
*/
isEnabled(extensionName: string, currentPath: string): boolean {
// If we have a single override called 'none', this disables all extensions.
// Typically, this comes from the user passing `-e none`.
if (
this.enabledExtensionNamesOverride.length === 1 &&
this.enabledExtensionNamesOverride[0] === 'none'
) {
return false;
}
// If we have explicit overrides, only enable those extensions.
if (this.enabledExtensionNamesOverride.length > 0) {
// When checking against overrides ONLY, we use a case insensitive match.
// The override names are already lowercased in the constructor.
return this.enabledExtensionNamesOverride.includes(
extensionName.toLocaleLowerCase(),
);
}
// Otherwise, we use the configuration settings
const config = this.readConfig();
const extensionConfig = config[extensionName];
// Extensions are enabled by default.
let enabled = true;
const allOverrides = extensionConfig?.overrides ?? [];
for (const rule of allOverrides) {
const override = Override.fromFileRule(rule);
if (override.matchesPath(ensureLeadingAndTrailingSlash(currentPath))) {
enabled = !override.isDisable;
}
}
return enabled;
}
readConfig(): AllExtensionsEnablementConfig {
try {
const content = fs.readFileSync(this.configFilePath, 'utf-8');
return JSON.parse(content);
} catch (error) {
if (
error instanceof Error &&
'code' in error &&
error.code === 'ENOENT'
) {
return {};
}
console.error('Error reading extension enablement config:', error);
return {};
}
}
writeConfig(config: AllExtensionsEnablementConfig): void {
fs.mkdirSync(this.configDir, { recursive: true });
fs.writeFileSync(this.configFilePath, JSON.stringify(config, null, 2));
}
enable(
extensionName: string,
includeSubdirs: boolean,
scopePath: string,
): void {
const config = this.readConfig();
if (!config[extensionName]) {
config[extensionName] = { overrides: [] };
}
const override = Override.fromInput(scopePath, includeSubdirs);
const overrides = config[extensionName].overrides.filter((rule) => {
const fileOverride = Override.fromFileRule(rule);
if (
fileOverride.conflictsWith(override) ||
fileOverride.isEqualTo(override)
) {
return false; // Remove conflicts and equivalent values.
}
return !fileOverride.isChildOf(override);
});
overrides.push(override.output());
config[extensionName].overrides = overrides;
this.writeConfig(config);
}
disable(
extensionName: string,
includeSubdirs: boolean,
scopePath: string,
): void {
this.enable(extensionName, includeSubdirs, `!${scopePath}`);
}
remove(extensionName: string): void {
const config = this.readConfig();
if (config[extensionName]) {
delete config[extensionName];
this.writeConfig(config);
}
}
}

View File

@@ -1,468 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { vi } from 'vitest';
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import {
EXTENSIONS_CONFIG_FILENAME,
ExtensionStorage,
INSTALL_METADATA_FILENAME,
annotateActiveExtensions,
loadExtension,
} from '../extension.js';
import { checkForAllExtensionUpdates, updateExtension } from './update.js';
import { QWEN_DIR } from '@qwen-code/qwen-code-core';
import { isWorkspaceTrusted } from '../trustedFolders.js';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import { createExtension } from '../../test-utils/createExtension.js';
import { ExtensionEnablementManager } from './extensionEnablement.js';
const mockGit = {
clone: vi.fn(),
getRemotes: vi.fn(),
fetch: vi.fn(),
checkout: vi.fn(),
listRemote: vi.fn(),
revparse: vi.fn(),
// Not a part of the actual API, but we need to use this to do the correct
// file system interactions.
path: vi.fn(),
};
vi.mock('simple-git', () => ({
simpleGit: vi.fn((path: string) => {
mockGit.path.mockReturnValue(path);
return mockGit;
}),
}));
vi.mock('../extensions/github.js', async (importOriginal) => {
const actual =
await importOriginal<typeof import('../extensions/github.js')>();
return {
...actual,
downloadFromGitHubRelease: vi
.fn()
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
};
});
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
return {
...mockedOs,
homedir: vi.fn(),
};
});
vi.mock('../trustedFolders.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('../trustedFolders.js')>();
return {
...actual,
isWorkspaceTrusted: vi.fn(),
};
});
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
const actual =
await importOriginal<typeof import('@qwen-code/qwen-code-core')>();
return {
...actual,
logExtensionInstallEvent: mockLogExtensionInstallEvent,
logExtensionUninstall: mockLogExtensionUninstall,
ExtensionInstallEvent: vi.fn(),
ExtensionUninstallEvent: vi.fn(),
};
});
describe('update tests', () => {
let tempHomeDir: string;
let tempWorkspaceDir: string;
let userExtensionsDir: string;
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'qwen-code-test-home-'),
);
tempWorkspaceDir = fs.mkdtempSync(
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
);
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
// Clean up before each test
fs.rmSync(userExtensionsDir, { recursive: true, force: true });
fs.mkdirSync(userExtensionsDir, { recursive: true });
vi.mocked(isWorkspaceTrusted).mockReturnValue({
isTrusted: true,
source: 'file',
});
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
Object.values(mockGit).forEach((fn) => fn.mockReset());
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
});
describe('updateExtension', () => {
it('should update a git-installed extension', async () => {
const gitUrl = 'https://github.com/google/gemini-extensions.git';
const extensionName = 'qwen-extensions';
const targetExtDir = path.join(userExtensionsDir, extensionName);
const metadataPath = path.join(targetExtDir, INSTALL_METADATA_FILENAME);
fs.mkdirSync(targetExtDir, { recursive: true });
fs.writeFileSync(
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.0.0' }),
);
fs.writeFileSync(
metadataPath,
JSON.stringify({ source: gitUrl, type: 'git' }),
);
mockGit.clone.mockImplementation(async (_, destination) => {
fs.mkdirSync(path.join(mockGit.path(), destination), {
recursive: true,
});
fs.writeFileSync(
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.1.0' }),
);
});
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: targetExtDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const updateInfo = await updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
() => {},
);
expect(updateInfo).toEqual({
name: 'qwen-extensions',
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
});
const updatedConfig = JSON.parse(
fs.readFileSync(
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
'utf-8',
),
);
expect(updatedConfig.version).toBe('1.1.0');
});
it('should call setExtensionUpdateState with UPDATING and then UPDATED_NEEDS_RESTART on success', async () => {
const extensionName = 'test-extension';
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: extensionName,
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
mockGit.clone.mockImplementation(async (_, destination) => {
fs.mkdirSync(path.join(mockGit.path(), destination), {
recursive: true,
});
fs.writeFileSync(
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name: extensionName, version: '1.1.0' }),
);
});
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const dispatch = vi.fn();
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
await updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
dispatch,
);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATING,
},
});
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
},
});
});
it('should call setExtensionUpdateState with ERROR on failure', async () => {
const extensionName = 'test-extension';
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: extensionName,
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
mockGit.clone.mockRejectedValue(new Error('Git clone failed'));
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
const dispatch = vi.fn();
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
await expect(
updateExtension(
extension,
tempHomeDir,
async (_) => true,
ExtensionUpdateState.UPDATE_AVAILABLE,
dispatch,
),
).rejects.toThrow();
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.UPDATING,
},
});
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: extensionName,
state: ExtensionUpdateState.ERROR,
},
});
});
});
describe('checkForAllExtensionUpdates', () => {
it('should return UpdateAvailable for a git extension with updates', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
]);
mockGit.listRemote.mockResolvedValue('remoteHash HEAD');
mockGit.revparse.mockResolvedValue('localHash');
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
});
it('should return UpToDate for a git extension with no updates', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
]);
mockGit.listRemote.mockResolvedValue('sameHash HEAD');
mockGit.revparse.mockResolvedValue('sameHash');
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UP_TO_DATE,
},
});
});
it('should return UpToDate for a local extension with no updates', async () => {
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
const sourceExtensionDir = createExtension({
extensionsDir: localExtensionSourcePath,
name: 'my-local-ext',
version: '1.0.0',
});
const installedExtensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'local-extension',
version: '1.0.0',
installMetadata: { source: sourceExtensionDir, type: 'local' },
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: installedExtensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'local-extension',
state: ExtensionUpdateState.UP_TO_DATE,
},
});
});
it('should return UpdateAvailable for a local extension with updates', async () => {
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
const sourceExtensionDir = createExtension({
extensionsDir: localExtensionSourcePath,
name: 'my-local-ext',
version: '1.1.0',
});
const installedExtensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'local-extension',
version: '1.0.0',
installMetadata: { source: sourceExtensionDir, type: 'local' },
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir: installedExtensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'local-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
});
it('should return Error when git check fails', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
name: 'error-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
},
});
const extension = annotateActiveExtensions(
[
loadExtension({
extensionDir,
workspaceDir: tempWorkspaceDir,
})!,
],
process.cwd(),
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
mockGit.getRemotes.mockRejectedValue(new Error('Git error'));
const dispatch = vi.fn();
await checkForAllExtensionUpdates([extension], dispatch);
expect(dispatch).toHaveBeenCalledWith({
type: 'SET_STATE',
payload: {
name: 'error-extension',
state: ExtensionUpdateState.ERROR,
},
});
});
});
});

View File

@@ -1,182 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import {
type ExtensionUpdateAction,
ExtensionUpdateState,
type ExtensionUpdateStatus,
} from '../../ui/state/extensions.js';
import {
copyExtension,
installExtension,
uninstallExtension,
loadExtension,
loadInstallMetadata,
ExtensionStorage,
loadExtensionConfig,
} from '../extension.js';
import { checkForExtensionUpdate } from './github.js';
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import * as fs from 'node:fs';
import { getErrorMessage } from '../../utils/errors.js';
export interface ExtensionUpdateInfo {
name: string;
originalVersion: string;
updatedVersion: string;
}
export async function updateExtension(
extension: GeminiCLIExtension,
cwd: string = process.cwd(),
requestConsent: (consent: string) => Promise<boolean>,
currentState: ExtensionUpdateState,
dispatchExtensionStateUpdate: (action: ExtensionUpdateAction) => void,
): Promise<ExtensionUpdateInfo | undefined> {
if (currentState === ExtensionUpdateState.UPDATING) {
return undefined;
}
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.UPDATING },
});
const installMetadata = loadInstallMetadata(extension.path);
if (!installMetadata?.type) {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
throw new Error(
`Extension ${extension.name} cannot be updated, type is unknown.`,
);
}
if (installMetadata?.type === 'link') {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.UP_TO_DATE },
});
throw new Error(`Extension is linked so does not need to be updated`);
}
const originalVersion = extension.version;
const tempDir = await ExtensionStorage.createTmpDir();
try {
await copyExtension(extension.path, tempDir);
const previousExtensionConfig = await loadExtensionConfig({
extensionDir: extension.path,
workspaceDir: cwd,
});
await uninstallExtension(extension.name, cwd);
await installExtension(
installMetadata,
requestConsent,
cwd,
previousExtensionConfig,
);
const updatedExtensionStorage = new ExtensionStorage(extension.name);
const updatedExtension = loadExtension({
extensionDir: updatedExtensionStorage.getExtensionDir(),
workspaceDir: cwd,
});
if (!updatedExtension) {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
throw new Error('Updated extension not found after installation.');
}
const updatedVersion = updatedExtension.config.version;
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: {
name: extension.name,
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
},
});
return {
name: extension.name,
originalVersion,
updatedVersion,
};
} catch (e) {
console.error(
`Error updating extension, rolling back. ${getErrorMessage(e)}`,
);
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
});
await copyExtension(tempDir, extension.path);
throw e;
} finally {
await fs.promises.rm(tempDir, { recursive: true, force: true });
}
}
export async function updateAllUpdatableExtensions(
cwd: string = process.cwd(),
requestConsent: (consent: string) => Promise<boolean>,
extensions: GeminiCLIExtension[],
extensionsState: Map<string, ExtensionUpdateStatus>,
dispatch: (action: ExtensionUpdateAction) => void,
): Promise<ExtensionUpdateInfo[]> {
return (
await Promise.all(
extensions
.filter(
(extension) =>
extensionsState.get(extension.name)?.status ===
ExtensionUpdateState.UPDATE_AVAILABLE,
)
.map((extension) =>
updateExtension(
extension,
cwd,
requestConsent,
extensionsState.get(extension.name)!.status,
dispatch,
),
),
)
).filter((updateInfo) => !!updateInfo);
}
export interface ExtensionUpdateCheckResult {
state: ExtensionUpdateState;
error?: string;
}
export async function checkForAllExtensionUpdates(
extensions: GeminiCLIExtension[],
dispatch: (action: ExtensionUpdateAction) => void,
): Promise<void> {
dispatch({ type: 'BATCH_CHECK_START' });
const promises: Array<Promise<void>> = [];
for (const extension of extensions) {
if (!extension.installMetadata) {
dispatch({
type: 'SET_STATE',
payload: {
name: extension.name,
state: ExtensionUpdateState.NOT_UPDATABLE,
},
});
continue;
}
promises.push(
checkForExtensionUpdate(extension, (updatedState) => {
dispatch({
type: 'SET_STATE',
payload: { name: extension.name, state: updatedState },
});
}),
);
}
await Promise.all(promises);
dispatch({ type: 'BATCH_CHECK_END' });
}

View File

@@ -51,7 +51,6 @@ import {
import * as fs from 'node:fs'; // fs will be mocked separately
import stripJsonComments from 'strip-json-comments'; // Will be mocked separately
import { isWorkspaceTrusted } from './trustedFolders.js';
import { disableExtension } from './extension.js';
// These imports will get the versions from the vi.mock('./settings.js', ...) factory.
import {
@@ -64,8 +63,6 @@ import {
needsMigration,
type Settings,
loadEnvironment,
migrateDeprecatedSettings,
SettingScope,
SETTINGS_VERSION,
SETTINGS_VERSION_KEY,
} from './settings.js';
@@ -2649,122 +2646,4 @@ describe('Settings Loading and Merging', () => {
});
});
});
describe('migrateDeprecatedSettings', () => {
let mockFsExistsSync: Mocked<typeof fs.existsSync>;
let mockFsReadFileSync: Mocked<typeof fs.readFileSync>;
let mockDisableExtension: Mocked<typeof disableExtension>;
beforeEach(() => {
vi.resetAllMocks();
mockFsExistsSync = vi.mocked(fs.existsSync);
mockFsReadFileSync = vi.mocked(fs.readFileSync);
mockDisableExtension = vi.mocked(disableExtension);
(mockFsExistsSync as Mock).mockReturnValue(true);
vi.mocked(isWorkspaceTrusted).mockReturnValue(true);
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should migrate disabled extensions from user and workspace settings', () => {
const userSettingsContent = {
extensions: {
disabled: ['user-ext-1', 'shared-ext'],
},
};
const workspaceSettingsContent = {
extensions: {
disabled: ['workspace-ext-1', 'shared-ext'],
},
};
(mockFsReadFileSync as Mock).mockImplementation(
(p: fs.PathOrFileDescriptor) => {
if (p === USER_SETTINGS_PATH)
return JSON.stringify(userSettingsContent);
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
return JSON.stringify(workspaceSettingsContent);
return '{}';
},
);
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
// Check user settings migration
expect(mockDisableExtension).toHaveBeenCalledWith(
'user-ext-1',
SettingScope.User,
MOCK_WORKSPACE_DIR,
);
expect(mockDisableExtension).toHaveBeenCalledWith(
'shared-ext',
SettingScope.User,
MOCK_WORKSPACE_DIR,
);
// Check workspace settings migration
expect(mockDisableExtension).toHaveBeenCalledWith(
'workspace-ext-1',
SettingScope.Workspace,
MOCK_WORKSPACE_DIR,
);
expect(mockDisableExtension).toHaveBeenCalledWith(
'shared-ext',
SettingScope.Workspace,
MOCK_WORKSPACE_DIR,
);
// Check that setValue was called to remove the deprecated setting
expect(setValueSpy).toHaveBeenCalledWith(
SettingScope.User,
'extensions',
{
disabled: undefined,
},
);
expect(setValueSpy).toHaveBeenCalledWith(
SettingScope.Workspace,
'extensions',
{
disabled: undefined,
},
);
});
it('should not do anything if there are no deprecated settings', () => {
const userSettingsContent = {
extensions: {
enabled: ['user-ext-1'],
},
};
const workspaceSettingsContent = {
someOtherSetting: 'value',
};
(mockFsReadFileSync as Mock).mockImplementation(
(p: fs.PathOrFileDescriptor) => {
if (p === USER_SETTINGS_PATH)
return JSON.stringify(userSettingsContent);
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
return JSON.stringify(workspaceSettingsContent);
return '{}';
},
);
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
expect(mockDisableExtension).not.toHaveBeenCalled();
expect(setValueSpy).not.toHaveBeenCalled();
});
});
});

View File

@@ -30,7 +30,6 @@ import {
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
import { customDeepMerge, type MergeableObject } from '../utils/deepMerge.js';
import { updateSettingsFilePreservingFormat } from '../utils/commentJson.js';
import { disableExtension } from './extension.js';
function getMergeStrategyForPath(path: string[]): MergeStrategy | undefined {
let current: SettingDefinition | undefined = undefined;
@@ -81,7 +80,6 @@ const MIGRATION_MAP: Record<string, string> = {
excludeTools: 'tools.exclude',
excludeMCPServers: 'mcp.excluded',
excludedProjectEnvVars: 'advanced.excludedEnvVars',
extensionManagement: 'experimental.extensionManagement',
extensions: 'extensions',
fileFiltering: 'context.fileFiltering',
folderTrustFeature: 'security.folderTrust.featureEnabled',
@@ -812,31 +810,6 @@ export function loadSettings(
);
}
export function migrateDeprecatedSettings(
loadedSettings: LoadedSettings,
workspaceDir: string = process.cwd(),
): void {
const processScope = (scope: SettingScope) => {
const settings = loadedSettings.forScope(scope).settings;
if (settings.extensions?.disabled) {
console.log(
`Migrating deprecated extensions.disabled settings from ${scope} settings...`,
);
for (const extension of settings.extensions.disabled ?? []) {
disableExtension(extension, scope, workspaceDir);
}
const newExtensionsValue = { ...settings.extensions };
newExtensionsValue.disabled = undefined;
loadedSettings.setValue(scope, 'extensions', newExtensionsValue);
}
};
processScope(SettingScope.User);
processScope(SettingScope.Workspace);
}
export function saveSettings(settingsFile: SettingsFile): void {
try {
// Ensure the directory exists

View File

@@ -202,7 +202,6 @@ const SETTINGS_SCHEMA = {
{ value: 'en', label: 'English' },
{ value: 'zh', label: '中文 (Chinese)' },
{ value: 'ru', label: 'Русский (Russian)' },
{ value: 'de', label: 'Deutsch (German)' },
],
},
terminalBell: {
@@ -1193,15 +1192,6 @@ const SETTINGS_SCHEMA = {
description: 'Setting to enable experimental features',
showInDialog: false,
properties: {
extensionManagement: {
type: 'boolean',
label: 'Extension Management',
category: 'Experimental',
requiresRestart: true,
default: true,
description: 'Enable extension management features.',
showInDialog: false,
},
visionModelPreview: {
type: 'boolean',
label: 'Vision Model Preview',
@@ -1224,39 +1214,6 @@ const SETTINGS_SCHEMA = {
},
},
},
extensions: {
type: 'object',
label: 'Extensions',
category: 'Extensions',
requiresRestart: true,
default: {},
description: 'Settings for extensions.',
showInDialog: false,
properties: {
disabled: {
type: 'array',
label: 'Disabled Extensions',
category: 'Extensions',
requiresRestart: true,
default: [] as string[],
description: 'List of disabled extensions.',
showInDialog: false,
mergeStrategy: MergeStrategy.UNION,
},
workspacesWithMigrationNudge: {
type: 'array',
label: 'Workspaces with Migration Nudge',
category: 'Extensions',
requiresRestart: false,
default: [] as string[],
description:
'List of workspaces for which the migration nudge has been shown.',
showInDialog: false,
mergeStrategy: MergeStrategy.UNION,
},
},
},
} as const satisfies SettingsSchema;
export type SettingsSchemaType = typeof SETTINGS_SCHEMA;

View File

@@ -262,7 +262,6 @@ describe('gemini.tsx main function', () => {
);
const { loadSettings } = await import('./config/settings.js');
const cleanupModule = await import('./utils/cleanup.js');
const extensionModule = await import('./config/extension.js');
const validatorModule = await import('./validateNonInterActiveAuth.js');
const streamJsonModule = await import('./nonInteractive/session.js');
const initializerModule = await import('./core/initializer.js');
@@ -275,11 +274,6 @@ describe('gemini.tsx main function', () => {
vi.mocked(cleanupModule.registerCleanup).mockImplementation(() => {});
const runExitCleanupMock = vi.mocked(cleanupModule.runExitCleanup);
runExitCleanupMock.mockResolvedValue(undefined);
vi.spyOn(extensionModule, 'loadExtensions').mockReturnValue([]);
vi.spyOn(
extensionModule.ExtensionStorage,
'getUserExtensionsDir',
).mockReturnValue('/tmp/extensions');
vi.spyOn(initializerModule, 'initializeApp').mockResolvedValue({
authError: null,
themeError: null,
@@ -460,7 +454,6 @@ describe('gemini.tsx main function kitty protocol', () => {
telemetryOutfile: undefined,
allowedMcpServerNames: undefined,
allowedTools: undefined,
acp: undefined,
experimentalAcp: undefined,
experimentalSkills: undefined,
extensions: undefined,
@@ -640,37 +633,4 @@ describe('startInteractiveUI', () => {
await new Promise((resolve) => setTimeout(resolve, 0));
expect(checkForUpdates).toHaveBeenCalledTimes(1);
});
it('should not check for updates when update nag is disabled', async () => {
const { checkForUpdates } = await import('./ui/utils/updateCheck.js');
const mockInitializationResult = {
authError: null,
themeError: null,
shouldOpenAuthDialog: false,
geminiMdFileCount: 0,
};
const settingsWithUpdateNagDisabled = {
merged: {
general: {
disableUpdateNag: true,
},
ui: {
hideWindowTitle: false,
},
},
} as LoadedSettings;
await startInteractiveUI(
mockConfig,
settingsWithUpdateNagDisabled,
mockStartupWarnings,
mockWorkspaceRoot,
mockInitializationResult,
);
await new Promise((resolve) => setTimeout(resolve, 0));
expect(checkForUpdates).not.toHaveBeenCalled();
});
});

View File

@@ -15,9 +15,8 @@ import React from 'react';
import { validateAuthMethod } from './config/auth.js';
import * as cliConfig from './config/config.js';
import { loadCliConfig, parseArguments } from './config/config.js';
import { ExtensionStorage, loadExtensions } from './config/extension.js';
import type { DnsResolutionOrder, LoadedSettings } from './config/settings.js';
import { loadSettings, migrateDeprecatedSettings } from './config/settings.js';
import { loadSettings } from './config/settings.js';
import {
initializeApp,
type InitializationResult,
@@ -103,7 +102,6 @@ function getNodeMemoryArgs(isDebugMode: boolean): string[] {
return [];
}
import { ExtensionEnablementManager } from './config/extensions/extensionEnablement.js';
import { loadSandboxConfig } from './config/sandboxConfig.js';
import { runAcpAgent } from './acp-integration/acpAgent.js';
@@ -183,18 +181,16 @@ export async function startInteractiveUI(
},
);
if (!settings.merged.general?.disableUpdateNag) {
checkForUpdates()
.then((info) => {
handleAutoUpdate(info, settings, config.getProjectRoot());
})
.catch((err) => {
// Silently ignore update check errors.
if (config.getDebugMode()) {
console.error('Update check failed:', err);
}
});
}
checkForUpdates()
.then((info) => {
handleAutoUpdate(info, settings, config.getProjectRoot());
})
.catch((err) => {
// Silently ignore update check errors.
if (config.getDebugMode()) {
console.error('Update check failed:', err);
}
});
registerCleanup(() => instance.unmount());
}
@@ -202,10 +198,9 @@ export async function startInteractiveUI(
export async function main() {
setupUnhandledRejectionHandler();
const settings = loadSettings();
migrateDeprecatedSettings(settings);
await cleanupCheckpoints();
let argv = await parseArguments(settings.merged);
let argv = await parseArguments();
// Check for invalid input combinations early to prevent crashes
if (argv.promptInteractive && !process.stdin.isTTY) {
@@ -247,9 +242,9 @@ export async function main() {
if (sandboxConfig) {
const partialConfig = await loadCliConfig(
settings.merged,
[],
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
argv,
undefined,
[],
);
if (
@@ -333,25 +328,21 @@ export async function main() {
// to run Gemini CLI. It is now safe to perform expensive initialization that
// may have side effects.
{
const extensionEnablementManager = new ExtensionEnablementManager(
ExtensionStorage.getUserExtensionsDir(),
argv.extensions,
);
const extensions = loadExtensions(extensionEnablementManager);
const config = await loadCliConfig(
settings.merged,
extensions,
extensionEnablementManager,
argv,
process.cwd(),
argv.extensions,
);
if (config.getListExtensions()) {
console.log('Installed extensions:');
for (const extension of extensions) {
console.log(`- ${extension.config.name}`);
}
process.exit(0);
}
// FIXME: list extensions after the config initialize
// if (config.getListExtensions()) {
// console.log('Installed extensions:');
// for (const extension of extensions) {
// console.log(`- ${extension.config.name}`);
// }
// process.exit(0);
// }
// Setup unified ConsolePatcher based on interactive mode
const isInteractive = config.isInteractive();
@@ -397,7 +388,7 @@ export async function main() {
}
if (config.getExperimentalZedIntegration()) {
return runAcpAgent(config, settings, extensions, argv);
return runAcpAgent(config, settings, argv);
}
let input = config.getQuestion();

File diff suppressed because it is too large Load Diff

View File

@@ -89,9 +89,6 @@ export default {
'No tools available': 'No tools available',
'View or change the approval mode for tool usage':
'View or change the approval mode for tool usage',
'Invalid approval mode "{{arg}}". Valid modes: {{modes}}':
'Invalid approval mode "{{arg}}". Valid modes: {{modes}}',
'Approval mode set to "{{mode}}"': 'Approval mode set to "{{mode}}"',
'View or change the language setting': 'View or change the language setting',
'change the theme': 'change the theme',
'Select Theme': 'Select Theme',
@@ -1040,6 +1037,7 @@ export default {
'Applying percussive maintenance...',
'Searching for the correct USB orientation...',
'Ensuring the magic smoke stays inside the wires...',
'Rewriting in Rust for no particular reason...',
'Trying to exit Vim...',
'Spinning up the hamster wheel...',
"That's not a bug, it's an undocumented feature...",

View File

@@ -89,10 +89,6 @@ export default {
'No tools available': 'Нет доступных инструментов',
'View or change the approval mode for tool usage':
'Просмотр или изменение режима подтверждения для использования инструментов',
'Invalid approval mode "{{arg}}". Valid modes: {{modes}}':
'Недопустимый режим подтверждения "{{arg}}". Допустимые режимы: {{modes}}',
'Approval mode set to "{{mode}}"':
'Режим подтверждения установлен на "{{mode}}"',
'View or change the language setting':
'Просмотр или изменение настроек языка',
'change the theme': 'Изменение темы',
@@ -1060,6 +1056,7 @@ export default {
'Провожу настройку методом тыка...',
'Ищем, какой стороной вставлять флешку...',
'Следим, чтобы волшебный дым не вышел из проводов...',
'Переписываем всё на Rust без особой причины...',
'Пытаемся выйти из Vim...',
'Раскручиваем колесо для хомяка...',
'Это не баг, а фича...',

View File

@@ -88,9 +88,6 @@ export default {
'No tools available': '没有可用工具',
'View or change the approval mode for tool usage':
'查看或更改工具使用的审批模式',
'Invalid approval mode "{{arg}}". Valid modes: {{modes}}':
'无效的审批模式 "{{arg}}"。有效模式:{{modes}}',
'Approval mode set to "{{mode}}"': '审批模式已设置为 "{{mode}}"',
'View or change the language setting': '查看或更改语言设置',
'change the theme': '更改主题',
'Select Theme': '选择主题',

View File

@@ -630,67 +630,6 @@ describe('BaseJsonOutputAdapter', () => {
expect(state.blocks).toHaveLength(0);
});
it('should preserve whitespace in thinking content', () => {
const state = adapter.exposeCreateMessageState();
adapter.startAssistantMessage();
adapter.exposeAppendThinking(
state,
'',
'The user just said "Hello"',
null,
);
expect(state.blocks).toHaveLength(1);
expect(state.blocks[0]).toMatchObject({
type: 'thinking',
thinking: 'The user just said "Hello"',
});
// Verify spaces are preserved
const block = state.blocks[0] as { thinking: string };
expect(block.thinking).toContain('user just');
expect(block.thinking).not.toContain('userjust');
});
it('should preserve whitespace when appending multiple thinking fragments', () => {
const state = adapter.exposeCreateMessageState();
adapter.startAssistantMessage();
// Simulate streaming thinking content in fragments
adapter.exposeAppendThinking(state, '', 'The user just', null);
adapter.exposeAppendThinking(state, '', ' said "Hello"', null);
adapter.exposeAppendThinking(
state,
'',
'. This is a simple greeting',
null,
);
expect(state.blocks).toHaveLength(1);
const block = state.blocks[0] as { thinking: string };
// Verify the complete text with all spaces preserved
expect(block.thinking).toBe(
'The user just said "Hello". This is a simple greeting',
);
// Verify specific space preservation
expect(block.thinking).toContain('user just ');
expect(block.thinking).toContain(' said');
expect(block.thinking).toContain('". This');
expect(block.thinking).not.toContain('userjust');
expect(block.thinking).not.toContain('justsaid');
});
it('should preserve leading and trailing whitespace in description', () => {
const state = adapter.exposeCreateMessageState();
adapter.startAssistantMessage();
adapter.exposeAppendThinking(state, '', ' content with spaces ', null);
expect(state.blocks).toHaveLength(1);
const block = state.blocks[0] as { thinking: string };
expect(block.thinking).toBe(' content with spaces ');
});
});
describe('appendToolUse', () => {

View File

@@ -816,18 +816,9 @@ export abstract class BaseJsonOutputAdapter {
parentToolUseId?: string | null,
): void {
const actualParentToolUseId = parentToolUseId ?? null;
// Build fragment without trimming to preserve whitespace in streaming content
// Only filter out null/undefined/empty values
const parts: string[] = [];
if (subject && subject.length > 0) {
parts.push(subject);
}
if (description && description.length > 0) {
parts.push(description);
}
const fragment = parts.join(': ');
const fragment = [subject?.trim(), description?.trim()]
.filter((value) => value && value.length > 0)
.join(': ');
if (!fragment) {
return;
}

View File

@@ -323,68 +323,6 @@ describe('StreamJsonOutputAdapter', () => {
});
});
it('should preserve whitespace in thinking content (issue #1356)', () => {
adapter.processEvent({
type: GeminiEventType.Thought,
value: {
subject: '',
description: 'The user just said "Hello"',
},
});
const message = adapter.finalizeAssistantMessage();
expect(message.message.content).toHaveLength(1);
const block = message.message.content[0] as {
type: string;
thinking: string;
};
expect(block.type).toBe('thinking');
expect(block.thinking).toBe('The user just said "Hello"');
// Verify spaces are preserved
expect(block.thinking).toContain('user just');
expect(block.thinking).not.toContain('userjust');
});
it('should preserve whitespace when streaming multiple thinking fragments (issue #1356)', () => {
// Simulate streaming thinking content in multiple events
adapter.processEvent({
type: GeminiEventType.Thought,
value: {
subject: '',
description: 'The user just',
},
});
adapter.processEvent({
type: GeminiEventType.Thought,
value: {
subject: '',
description: ' said "Hello"',
},
});
adapter.processEvent({
type: GeminiEventType.Thought,
value: {
subject: '',
description: '. This is a simple greeting',
},
});
const message = adapter.finalizeAssistantMessage();
expect(message.message.content).toHaveLength(1);
const block = message.message.content[0] as {
type: string;
thinking: string;
};
expect(block.thinking).toBe(
'The user just said "Hello". This is a simple greeting',
);
// Verify specific spaces are preserved
expect(block.thinking).toContain('user just ');
expect(block.thinking).toContain(' said');
expect(block.thinking).not.toContain('userjust');
expect(block.thinking).not.toContain('justsaid');
});
it('should append tool use from ToolCallRequest events', () => {
adapter.processEvent({
type: GeminiEventType.ToolCallRequest,

View File

@@ -298,9 +298,7 @@ describe('runNonInteractive', () => {
mockConfig,
expect.objectContaining({ name: 'testTool' }),
expect.any(AbortSignal),
expect.objectContaining({
outputUpdateHandler: expect.any(Function),
}),
undefined,
);
// Verify first call has isContinuation: false
expect(mockGeminiClient.sendMessageStream).toHaveBeenNthCalledWith(
@@ -773,52 +771,6 @@ describe('runNonInteractive', () => {
);
});
it('should handle API errors in text mode and exit with error code', async () => {
(mockConfig.getOutputFormat as Mock).mockReturnValue(OutputFormat.TEXT);
setupMetricsMock();
// Simulate an API error event (like 401 unauthorized)
const apiErrorEvent: ServerGeminiStreamEvent = {
type: GeminiEventType.Error,
value: {
error: {
message: '401 Incorrect API key provided',
status: 401,
},
},
};
mockGeminiClient.sendMessageStream.mockReturnValue(
createStreamFromEvents([apiErrorEvent]),
);
let thrownError: Error | null = null;
try {
await runNonInteractive(
mockConfig,
mockSettings,
'Test input',
'prompt-id-api-error',
);
// Should not reach here
expect.fail('Expected error to be thrown');
} catch (error) {
thrownError = error as Error;
}
// Should throw with the API error message
expect(thrownError).toBeTruthy();
expect(thrownError?.message).toContain('401');
expect(thrownError?.message).toContain('Incorrect API key provided');
// Verify error was written to stderr
expect(processStderrSpy).toHaveBeenCalled();
const stderrCalls = processStderrSpy.mock.calls;
const errorOutput = stderrCalls.map((call) => call[0]).join('');
expect(errorOutput).toContain('401');
expect(errorOutput).toContain('Incorrect API key provided');
});
it('should handle FatalInputError with custom exit code in JSON format', async () => {
(mockConfig.getOutputFormat as Mock).mockReturnValue(OutputFormat.JSON);
setupMetricsMock();
@@ -1825,84 +1777,4 @@ describe('runNonInteractive', () => {
{ isContinuation: false },
);
});
it('should print tool output to console in text mode (non-Task tools)', async () => {
// Test that tool output is printed to stdout in text mode
const toolCallEvent: ServerGeminiStreamEvent = {
type: GeminiEventType.ToolCallRequest,
value: {
callId: 'tool-1',
name: 'run_in_terminal',
args: { command: 'npm outdated' },
isClientInitiated: false,
prompt_id: 'prompt-id-tool-output',
},
};
// Mock tool execution with outputUpdateHandler being called
mockCoreExecuteToolCall.mockImplementation(
async (_config, _request, _signal, options) => {
// Simulate tool calling outputUpdateHandler with output chunks
if (options?.outputUpdateHandler) {
options.outputUpdateHandler('tool-1', 'Package outdated\n');
options.outputUpdateHandler('tool-1', 'npm@1.0.0 -> npm@2.0.0\n');
}
return {
responseParts: [
{
functionResponse: {
id: 'tool-1',
name: 'run_in_terminal',
response: {
output: 'Package outdated\nnpm@1.0.0 -> npm@2.0.0',
},
},
},
],
};
},
);
const firstCallEvents: ServerGeminiStreamEvent[] = [
toolCallEvent,
{
type: GeminiEventType.Finished,
value: { reason: undefined, usageMetadata: { totalTokenCount: 5 } },
},
];
const secondCallEvents: ServerGeminiStreamEvent[] = [
{ type: GeminiEventType.Content, value: 'Dependencies checked' },
{
type: GeminiEventType.Finished,
value: { reason: undefined, usageMetadata: { totalTokenCount: 3 } },
},
];
mockGeminiClient.sendMessageStream
.mockReturnValueOnce(createStreamFromEvents(firstCallEvents))
.mockReturnValueOnce(createStreamFromEvents(secondCallEvents));
await runNonInteractive(
mockConfig,
mockSettings,
'Check dependencies',
'prompt-id-tool-output',
);
// Verify that executeToolCall was called with outputUpdateHandler
expect(mockCoreExecuteToolCall).toHaveBeenCalledWith(
mockConfig,
expect.objectContaining({ name: 'run_in_terminal' }),
expect.any(AbortSignal),
expect.objectContaining({
outputUpdateHandler: expect.any(Function),
}),
);
// Verify tool output was written to stdout
expect(processStdoutSpy).toHaveBeenCalledWith('Package outdated\n');
expect(processStdoutSpy).toHaveBeenCalledWith('npm@1.0.0 -> npm@2.0.0\n');
expect(processStdoutSpy).toHaveBeenCalledWith('Dependencies checked');
});
});

View File

@@ -4,11 +4,7 @@
* SPDX-License-Identifier: Apache-2.0
*/
import type {
Config,
ToolCallRequestInfo,
ToolResultDisplay,
} from '@qwen-code/qwen-code-core';
import type { Config, ToolCallRequestInfo } from '@qwen-code/qwen-code-core';
import { isSlashCommand } from './ui/utils/commandUtils.js';
import type { LoadedSettings } from './config/settings.js';
import {
@@ -312,8 +308,6 @@ export async function runNonInteractive(
config.getContentGeneratorConfig()?.authType,
);
process.stderr.write(`${errorText}\n`);
// Throw error to exit with non-zero code
throw new Error(errorText);
}
}
}
@@ -339,7 +333,7 @@ export async function runNonInteractive(
? options.controlService.permission.getToolCallUpdateCallback()
: undefined;
// Create output handler for Task tool (for subagent execution)
// Only pass outputUpdateHandler for Task tool
const isTaskTool = finalRequestInfo.name === 'task';
const taskToolProgress = isTaskTool
? createTaskToolProgressHandler(
@@ -349,41 +343,20 @@ export async function runNonInteractive(
)
: undefined;
const taskToolProgressHandler = taskToolProgress?.handler;
// Create output handler for non-Task tools in text mode (for console output)
const nonTaskOutputHandler =
!isTaskTool && !adapter
? (callId: string, outputChunk: ToolResultDisplay) => {
// Print tool output to console in text mode
if (typeof outputChunk === 'string') {
process.stdout.write(outputChunk);
} else if (
outputChunk &&
typeof outputChunk === 'object' &&
'ansiOutput' in outputChunk
) {
// Handle ANSI output - just print as string for now
process.stdout.write(String(outputChunk.ansiOutput));
}
}
: undefined;
// Combine output handlers
const outputUpdateHandler =
taskToolProgressHandler || nonTaskOutputHandler;
const toolResponse = await executeToolCall(
config,
finalRequestInfo,
abortController.signal,
outputUpdateHandler || toolCallUpdateCallback
isTaskTool && taskToolProgressHandler
? {
...(outputUpdateHandler && { outputUpdateHandler }),
...(toolCallUpdateCallback && {
onToolCallsUpdate: toolCallUpdateCallback,
}),
outputUpdateHandler: taskToolProgressHandler,
onToolCallsUpdate: toolCallUpdateCallback,
}
: undefined,
: toolCallUpdateCallback
? {
onToolCallsUpdate: toolCallUpdateCallback,
}
: undefined,
);
// Note: In JSON mode, subagent messages are automatically added to the main

View File

@@ -0,0 +1,327 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { FileCommandLoader } from './FileCommandLoader.js';
import type { Config } from '@qwen-code/qwen-code-core';
import { Storage } from '@qwen-code/qwen-code-core';
describe('FileCommandLoader - Extension Commands Support', () => {
let tempDir: string;
let mockConfig: Partial<Config>;
beforeEach(async () => {
tempDir = await fs.promises.mkdtemp(
path.join(os.tmpdir(), 'file-command-loader-ext-test-'),
);
mockConfig = {
getFolderTrustFeature: () => false,
getFolderTrust: () => true,
getProjectRoot: () => tempDir,
storage: new Storage(tempDir),
getExtensions: () => [],
};
});
afterEach(async () => {
await fs.promises.rm(tempDir, { recursive: true, force: true });
});
it('should load commands from extension with config.commands path', async () => {
// Setup extension structure
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'test-ext');
const customCommandsDir = path.join(extensionDir, 'custom-cmds');
await fs.promises.mkdir(customCommandsDir, { recursive: true });
// Create extension config with custom commands path
const extensionConfig = {
name: 'test-ext',
version: '1.0.0',
commands: 'custom-cmds',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Create a test command in custom directory
const commandContent =
'---\ndescription: Test command from extension\n---\nDo something';
await fs.promises.writeFile(
path.join(customCommandsDir, 'test.md'),
commandContent,
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'test-ext',
config: extensionConfig,
name: 'test-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
contextFiles: [],
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('test-ext:test');
expect(commands[0].description).toBe(
'[test-ext] Test command from extension',
);
});
it('should load commands from extension with multiple commands paths', async () => {
// Setup extension structure
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'multi-ext');
const cmdsDir1 = path.join(extensionDir, 'commands1');
const cmdsDir2 = path.join(extensionDir, 'commands2');
await fs.promises.mkdir(cmdsDir1, { recursive: true });
await fs.promises.mkdir(cmdsDir2, { recursive: true });
// Create extension config with multiple commands paths
const extensionConfig = {
name: 'multi-ext',
version: '1.0.0',
commands: ['commands1', 'commands2'],
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Create test commands in both directories
await fs.promises.writeFile(
path.join(cmdsDir1, 'cmd1.md'),
'---\n---\nCommand 1',
);
await fs.promises.writeFile(
path.join(cmdsDir2, 'cmd2.md'),
'---\n---\nCommand 2',
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'multi-ext',
config: extensionConfig,
contextFiles: [],
name: 'multi-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(2);
const commandNames = commands.map((c) => c.name).sort();
expect(commandNames).toEqual(['multi-ext:cmd1', 'multi-ext:cmd2']);
});
it('should fallback to default "commands" directory when config.commands not specified', async () => {
// Setup extension structure with default commands directory
const extensionDir = path.join(
tempDir,
'.qwen',
'extensions',
'default-ext',
);
const defaultCommandsDir = path.join(extensionDir, 'commands');
await fs.promises.mkdir(defaultCommandsDir, { recursive: true });
// Create extension config without commands field
const extensionConfig = {
name: 'default-ext',
version: '1.0.0',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Create a test command in default directory
await fs.promises.writeFile(
path.join(defaultCommandsDir, 'default.md'),
'---\n---\nDefault command',
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'default-ext',
config: extensionConfig,
contextFiles: [],
name: 'default-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('default-ext:default');
});
it('should handle extension without commands directory gracefully', async () => {
// Setup extension structure without commands directory
const extensionDir = path.join(
tempDir,
'.qwen',
'extensions',
'no-cmds-ext',
);
await fs.promises.mkdir(extensionDir, { recursive: true });
// Create extension config
const extensionConfig = {
name: 'no-cmds-ext',
version: '1.0.0',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
// Mock config to return the extension
mockConfig.getExtensions = () => [
{
id: 'no-cmds-ext',
config: extensionConfig,
contextFiles: [],
name: 'no-cmds-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
// Should not throw and return empty array
expect(commands).toHaveLength(0);
});
it('should prefix extension commands with extension name', async () => {
// Setup extension
const extensionDir = path.join(
tempDir,
'.qwen',
'extensions',
'prefix-ext',
);
const commandsDir = path.join(extensionDir, 'commands');
await fs.promises.mkdir(commandsDir, { recursive: true });
const extensionConfig = {
name: 'prefix-ext',
version: '1.0.0',
};
await fs.promises.writeFile(
path.join(extensionDir, 'qwen-extension.json'),
JSON.stringify(extensionConfig),
);
await fs.promises.writeFile(
path.join(commandsDir, 'mycommand.md'),
'---\n---\nMy command',
);
mockConfig.getExtensions = () => [
{
id: 'prefix-ext',
config: extensionConfig,
contextFiles: [],
name: 'prefix-ext',
version: '1.0.0',
isActive: true,
path: extensionDir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('prefix-ext:mycommand');
});
it('should load commands from multiple extensions in alphabetical order', async () => {
// Setup two extensions
const ext1Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-b');
const ext2Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-a');
await fs.promises.mkdir(path.join(ext1Dir, 'commands'), {
recursive: true,
});
await fs.promises.mkdir(path.join(ext2Dir, 'commands'), {
recursive: true,
});
// Extension B
await fs.promises.writeFile(
path.join(ext1Dir, 'qwen-extension.json'),
JSON.stringify({ name: 'ext-b', version: '1.0.0' }),
);
await fs.promises.writeFile(
path.join(ext1Dir, 'commands', 'cmd.md'),
'---\n---\nCommand B',
);
// Extension A
await fs.promises.writeFile(
path.join(ext2Dir, 'qwen-extension.json'),
JSON.stringify({ name: 'ext-a', version: '1.0.0' }),
);
await fs.promises.writeFile(
path.join(ext2Dir, 'commands', 'cmd.md'),
'---\n---\nCommand A',
);
mockConfig.getExtensions = () => [
{
id: 'ext-b',
config: { name: 'ext-b', version: '1.0.0' },
contextFiles: [],
name: 'ext-b',
version: '1.0.0',
isActive: true,
path: ext1Dir,
},
{
id: 'ext-a',
config: { name: 'ext-a', version: '1.0.0' },
contextFiles: [],
name: 'ext-a',
version: '1.0.0',
isActive: true,
path: ext2Dir,
},
];
const loader = new FileCommandLoader(mockConfig as Config);
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(2);
// Extensions are sorted alphabetically, so ext-a comes before ext-b
expect(commands[0].name).toBe('ext-a:cmd');
expect(commands[1].name).toBe('ext-b:cmd');
});
});

View File

@@ -0,0 +1,117 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import { promises as fs } from 'node:fs';
import path from 'node:path';
import os from 'node:os';
import { FileCommandLoader } from './FileCommandLoader.js';
describe('FileCommandLoader - Markdown support', () => {
let tempDir: string;
beforeAll(async () => {
// Create a temporary directory for test commands
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-md-test-'));
});
afterAll(async () => {
// Clean up
await fs.rm(tempDir, { recursive: true, force: true });
});
it('should load markdown commands with frontmatter', async () => {
// Create a test markdown command file
const mdContent = `---
description: Test markdown command
---
This is a test prompt from markdown.`;
const commandPath = path.join(tempDir, 'test-command.md');
await fs.writeFile(commandPath, mdContent, 'utf-8');
// Create loader with temp dir as command source
const loader = new FileCommandLoader(null);
// Mock the getCommandDirectories to return our temp dir
const originalMethod = loader['getCommandDirectories'];
loader['getCommandDirectories'] = () => [{ path: tempDir }];
try {
const commands = await loader.loadCommands(new AbortController().signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('test-command');
expect(commands[0].description).toBe('Test markdown command');
} finally {
// Restore original method
loader['getCommandDirectories'] = originalMethod;
}
});
it('should load markdown commands without frontmatter', async () => {
// Create a test markdown command file without frontmatter
const mdContent = 'This is a simple prompt without frontmatter.';
const commandPath = path.join(tempDir, 'simple-command.md');
await fs.writeFile(commandPath, mdContent, 'utf-8');
const loader = new FileCommandLoader(null);
const originalMethod = loader['getCommandDirectories'];
loader['getCommandDirectories'] = () => [{ path: tempDir }];
try {
const commands = await loader.loadCommands(new AbortController().signal);
const simpleCommand = commands.find(
(cmd) => cmd.name === 'simple-command',
);
expect(simpleCommand).toBeDefined();
expect(simpleCommand?.description).toContain('Custom command from');
} finally {
loader['getCommandDirectories'] = originalMethod;
}
});
it('should load both toml and markdown commands', async () => {
// Create both TOML and Markdown files
const tomlContent = `prompt = "TOML prompt"
description = "TOML command"`;
const mdContent = `---
description: Markdown command
---
Markdown prompt`;
await fs.writeFile(
path.join(tempDir, 'toml-cmd.toml'),
tomlContent,
'utf-8',
);
await fs.writeFile(path.join(tempDir, 'md-cmd.md'), mdContent, 'utf-8');
const loader = new FileCommandLoader(null);
const originalMethod = loader['getCommandDirectories'];
loader['getCommandDirectories'] = () => [{ path: tempDir }];
try {
const commands = await loader.loadCommands(new AbortController().signal);
const tomlCommand = commands.find((cmd) => cmd.name === 'toml-cmd');
const mdCommand = commands.find((cmd) => cmd.name === 'md-cmd');
expect(tomlCommand).toBeDefined();
expect(tomlCommand?.description).toBe('TOML command');
expect(mdCommand).toBeDefined();
expect(mdCommand?.description).toBe('Markdown command');
} finally {
loader['getCommandDirectories'] = originalMethod;
}
});
});

View File

@@ -568,9 +568,9 @@ describe('FileCommandLoader', () => {
expect(commands).toHaveLength(3);
const commandNames = commands.map((cmd) => cmd.name);
expect(commandNames).toEqual(['user', 'project', 'ext']);
expect(commandNames).toEqual(['user', 'project', 'test-ext:ext']);
const extCommand = commands.find((cmd) => cmd.name === 'ext');
const extCommand = commands.find((cmd) => cmd.name === 'test-ext:ext');
expect(extCommand?.extensionName).toBe('test-ext');
expect(extCommand?.description).toMatch(/^\[test-ext\]/);
});
@@ -656,14 +656,14 @@ describe('FileCommandLoader', () => {
expect(result1.content).toEqual([{ text: 'Project deploy command' }]);
}
expect(commands[2].name).toBe('deploy');
expect(commands[2].name).toBe('test-ext:deploy');
expect(commands[2].extensionName).toBe('test-ext');
expect(commands[2].description).toMatch(/^\[test-ext\]/);
const result2 = await commands[2].action?.(
createMockCommandContext({
invocation: {
raw: '/deploy',
name: 'deploy',
raw: '/test-ext:deploy',
name: 'test-ext:deploy',
args: '',
},
}),
@@ -729,7 +729,7 @@ describe('FileCommandLoader', () => {
const commands = await loader.loadCommands(signal);
expect(commands).toHaveLength(1);
expect(commands[0].name).toBe('active');
expect(commands[0].name).toBe('active-ext:active');
expect(commands[0].extensionName).toBe('active-ext');
expect(commands[0].description).toMatch(/^\[active-ext\]/);
});
@@ -803,17 +803,17 @@ describe('FileCommandLoader', () => {
expect(commands).toHaveLength(3);
const commandNames = commands.map((cmd) => cmd.name).sort();
expect(commandNames).toEqual(['b:c', 'b:d:e', 'simple']);
expect(commandNames).toEqual(['a:b:c', 'a:b:d:e', 'a:simple']);
const nestedCmd = commands.find((cmd) => cmd.name === 'b:c');
const nestedCmd = commands.find((cmd) => cmd.name === 'a:b:c');
expect(nestedCmd?.extensionName).toBe('a');
expect(nestedCmd?.description).toMatch(/^\[a\]/);
expect(nestedCmd).toBeDefined();
const result = await nestedCmd!.action?.(
createMockCommandContext({
invocation: {
raw: '/b:c',
name: 'b:c',
raw: '/a:b:c',
name: 'a:b:c',
args: '',
},
}),

View File

@@ -5,34 +5,23 @@
*/
import { promises as fs } from 'node:fs';
import * as fsSync from 'node:fs';
import path from 'node:path';
import toml from '@iarna/toml';
import { glob } from 'glob';
import { z } from 'zod';
import type { Config } from '@qwen-code/qwen-code-core';
import { Storage } from '@qwen-code/qwen-code-core';
import { EXTENSIONS_CONFIG_FILENAME, Storage } from '@qwen-code/qwen-code-core';
import type { ICommandLoader } from './types.js';
import type {
CommandContext,
SlashCommand,
SlashCommandActionReturn,
} from '../ui/commands/types.js';
import { CommandKind } from '../ui/commands/types.js';
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
import type {
IPromptProcessor,
PromptPipelineContent,
} from './prompt-processors/types.js';
import {
SHORTHAND_ARGS_PLACEHOLDER,
SHELL_INJECTION_TRIGGER,
AT_FILE_INJECTION_TRIGGER,
} from './prompt-processors/types.js';
parseMarkdownCommand,
MarkdownCommandDefSchema,
} from './markdown-command-parser.js';
import {
ConfirmationRequiredError,
ShellProcessor,
} from './prompt-processors/shellProcessor.js';
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
createSlashCommandFromDefinition,
type CommandDefinition,
} from './command-factory.js';
import type { SlashCommand } from '../ui/commands/types.js';
interface CommandDirectory {
path: string;
@@ -96,7 +85,12 @@ export class FileCommandLoader implements ICommandLoader {
const commandDirs = this.getCommandDirectories();
for (const dirInfo of commandDirs) {
try {
const files = await glob('**/*.toml', {
// Scan both .toml and .md files
const tomlFiles = await glob('**/*.toml', {
...globOptions,
cwd: dirInfo.path,
});
const mdFiles = await glob('**/*.md', {
...globOptions,
cwd: dirInfo.path,
});
@@ -105,18 +99,28 @@ export class FileCommandLoader implements ICommandLoader {
return [];
}
const commandPromises = files.map((file) =>
this.parseAndAdaptFile(
// Process TOML files
const tomlCommandPromises = tomlFiles.map((file) =>
this.parseAndAdaptTomlFile(
path.join(dirInfo.path, file),
dirInfo.path,
dirInfo.extensionName,
),
);
const commands = (await Promise.all(commandPromises)).filter(
(cmd): cmd is SlashCommand => cmd !== null,
// Process Markdown files
const mdCommandPromises = mdFiles.map((file) =>
this.parseAndAdaptMarkdownFile(
path.join(dirInfo.path, file),
dirInfo.path,
dirInfo.extensionName,
),
);
const commands = (
await Promise.all([...tomlCommandPromises, ...mdCommandPromises])
).filter((cmd): cmd is SlashCommand => cmd !== null);
// Add all commands without deduplication
allCommands.push(...commands);
} catch (error) {
@@ -159,17 +163,73 @@ export class FileCommandLoader implements ICommandLoader {
.filter((ext) => ext.isActive)
.sort((a, b) => a.name.localeCompare(b.name)); // Sort alphabetically for deterministic loading
const extensionCommandDirs = activeExtensions.map((ext) => ({
path: path.join(ext.path, 'commands'),
extensionName: ext.name,
}));
// Collect command directories from each extension
for (const ext of activeExtensions) {
// Get commands paths from extension config
const commandsPaths = this.getExtensionCommandsPaths(ext);
dirs.push(...extensionCommandDirs);
for (const cmdPath of commandsPaths) {
dirs.push({
path: cmdPath,
extensionName: ext.name,
});
}
}
}
return dirs;
}
/**
* Get commands paths from an extension.
* Returns paths from config.commands if specified, otherwise defaults to 'commands' directory.
*/
private getExtensionCommandsPaths(ext: {
path: string;
name: string;
}): string[] {
// Try to get extension config
try {
const configPath = path.join(ext.path, EXTENSIONS_CONFIG_FILENAME);
if (fsSync.existsSync(configPath)) {
const configContent = fsSync.readFileSync(configPath, 'utf-8');
const config = JSON.parse(configContent);
if (config.commands) {
const commandsArray = Array.isArray(config.commands)
? config.commands
: [config.commands];
return commandsArray
.map((cmdPath: string) =>
path.isAbsolute(cmdPath) ? cmdPath : path.join(ext.path, cmdPath),
)
.filter((cmdPath: string) => {
try {
return fsSync.existsSync(cmdPath);
} catch {
return false;
}
});
}
}
} catch (error) {
console.warn(`Failed to read extension config for ${ext.name}:`, error);
}
// Default fallback: use 'commands' directory
const defaultPath = path.join(ext.path, 'commands');
try {
if (fsSync.existsSync(defaultPath)) {
return [defaultPath];
}
} catch {
// Ignore
}
return [];
}
/**
* Parses a single .toml file and transforms it into a SlashCommand object.
* @param filePath The absolute path to the .toml file.
@@ -177,7 +237,7 @@ export class FileCommandLoader implements ICommandLoader {
* @param extensionName Optional extension name to prefix commands with.
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
*/
private async parseAndAdaptFile(
private async parseAndAdaptTomlFile(
filePath: string,
baseDir: string,
extensionName?: string,
@@ -216,104 +276,79 @@ export class FileCommandLoader implements ICommandLoader {
const validDef = validationResult.data;
const relativePathWithExt = path.relative(baseDir, filePath);
const relativePath = relativePathWithExt.substring(
0,
relativePathWithExt.length - 5, // length of '.toml'
);
const baseCommandName = relativePath
.split(path.sep)
// Sanitize each path segment to prevent ambiguity. Since ':' is our
// namespace separator, we replace any literal colons in filenames
// with underscores to avoid naming conflicts.
.map((segment) => segment.replaceAll(':', '_'))
.join(':');
// Add extension name tag for extension commands
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
let description = validDef.description || defaultDescription;
if (extensionName) {
description = `[${extensionName}] ${description}`;
}
const processors: IPromptProcessor[] = [];
const usesArgs = validDef.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
const usesShellInjection = validDef.prompt.includes(
SHELL_INJECTION_TRIGGER,
);
const usesAtFileInjection = validDef.prompt.includes(
AT_FILE_INJECTION_TRIGGER,
);
// 1. @-File Injection (Security First).
// This runs first to ensure we're not executing shell commands that
// could dynamically generate malicious @-paths.
if (usesAtFileInjection) {
processors.push(new AtFileProcessor(baseCommandName));
}
// 2. Argument and Shell Injection.
// This runs after file content has been safely injected.
if (usesShellInjection || usesArgs) {
processors.push(new ShellProcessor(baseCommandName));
}
// 3. Default Argument Handling.
// Appends the raw invocation if no explicit {{args}} are used.
if (!usesArgs) {
processors.push(new DefaultArgumentProcessor());
}
return {
name: baseCommandName,
description,
kind: CommandKind.FILE,
// Use factory to create command
return createSlashCommandFromDefinition(
filePath,
baseDir,
validDef,
extensionName,
action: async (
context: CommandContext,
_args: string,
): Promise<SlashCommandActionReturn> => {
if (!context.invocation) {
console.error(
`[FileCommandLoader] Critical error: Command '${baseCommandName}' was executed without invocation context.`,
);
return {
type: 'submit_prompt',
content: [{ text: validDef.prompt }], // Fallback to unprocessed prompt
};
}
'.toml',
);
}
try {
let processedContent: PromptPipelineContent = [
{ text: validDef.prompt },
];
for (const processor of processors) {
processedContent = await processor.process(
processedContent,
context,
);
}
/**
* Parses a single .md file and transforms it into a SlashCommand object.
* @param filePath The absolute path to the .md file.
* @param baseDir The root command directory for name calculation.
* @param extensionName Optional extension name to prefix commands with.
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
*/
private async parseAndAdaptMarkdownFile(
filePath: string,
baseDir: string,
extensionName?: string,
): Promise<SlashCommand | null> {
let fileContent: string;
try {
fileContent = await fs.readFile(filePath, 'utf-8');
} catch (error: unknown) {
console.error(
`[FileCommandLoader] Failed to read file ${filePath}:`,
error instanceof Error ? error.message : String(error),
);
return null;
}
return {
type: 'submit_prompt',
content: processedContent,
};
} catch (e) {
// Check if it's our specific error type
if (e instanceof ConfirmationRequiredError) {
// Halt and request confirmation from the UI layer.
return {
type: 'confirm_shell_commands',
commandsToConfirm: e.commandsToConfirm,
originalInvocation: {
raw: context.invocation.raw,
},
};
}
// Re-throw other errors to be handled by the global error handler.
throw e;
}
},
let parsed: ReturnType<typeof parseMarkdownCommand>;
try {
parsed = parseMarkdownCommand(fileContent);
} catch (error: unknown) {
console.error(
`[FileCommandLoader] Failed to parse Markdown file ${filePath}:`,
error instanceof Error ? error.message : String(error),
);
return null;
}
const validationResult = MarkdownCommandDefSchema.safeParse(parsed);
if (!validationResult.success) {
console.error(
`[FileCommandLoader] Skipping invalid command file: ${filePath}. Validation errors:`,
validationResult.error.flatten(),
);
return null;
}
const validDef = validationResult.data;
// Convert to CommandDefinition format
const definition: CommandDefinition = {
prompt: validDef.prompt,
description:
validDef.frontmatter?.description &&
typeof validDef.frontmatter.description === 'string'
? validDef.frontmatter.description
: undefined,
};
// Use factory to create command
return createSlashCommandFromDefinition(
filePath,
baseDir,
definition,
extensionName,
'.md',
);
}
}

View File

@@ -0,0 +1,159 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* This file contains helper functions for FileCommandLoader to create SlashCommand
* objects from parsed command definitions (TOML or Markdown).
*/
import path from 'node:path';
import type {
CommandContext,
SlashCommand,
SlashCommandActionReturn,
} from '../ui/commands/types.js';
import { CommandKind } from '../ui/commands/types.js';
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
import type {
IPromptProcessor,
PromptPipelineContent,
} from './prompt-processors/types.js';
import {
SHORTHAND_ARGS_PLACEHOLDER,
SHELL_INJECTION_TRIGGER,
AT_FILE_INJECTION_TRIGGER,
} from './prompt-processors/types.js';
import {
ConfirmationRequiredError,
ShellProcessor,
} from './prompt-processors/shellProcessor.js';
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
export interface CommandDefinition {
prompt: string;
description?: string;
}
/**
* Creates a SlashCommand from a parsed command definition.
* This function is used by both TOML and Markdown command loaders.
*
* @param filePath The absolute path to the command file
* @param baseDir The root command directory for name calculation
* @param definition The parsed command definition (prompt and optional description)
* @param extensionName Optional extension name to prefix commands with
* @param fileExtension The file extension (e.g., '.toml' or '.md')
* @returns A SlashCommand object
*/
export function createSlashCommandFromDefinition(
filePath: string,
baseDir: string,
definition: CommandDefinition,
extensionName: string | undefined,
fileExtension: string,
): SlashCommand {
const relativePathWithExt = path.relative(baseDir, filePath);
const relativePath = relativePathWithExt.substring(
0,
relativePathWithExt.length - fileExtension.length,
);
const baseCommandName = relativePath
.split(path.sep)
// Sanitize each path segment to prevent ambiguity. Since ':' is our
// namespace separator, we replace any literal colons in filenames
// with underscores to avoid naming conflicts.
.map((segment) => segment.replaceAll(':', '_'))
.join(':');
// Prefix command name with extension name if provided
const commandName = extensionName
? `${extensionName}:${baseCommandName}`
: baseCommandName;
// Add extension name tag for extension commands
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
let description = definition.description || defaultDescription;
if (extensionName) {
description = `[${extensionName}] ${description}`;
}
const processors: IPromptProcessor[] = [];
const usesArgs = definition.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
const usesShellInjection = definition.prompt.includes(
SHELL_INJECTION_TRIGGER,
);
const usesAtFileInjection = definition.prompt.includes(
AT_FILE_INJECTION_TRIGGER,
);
// 1. @-File Injection (Security First).
// This runs first to ensure we're not executing shell commands that
// could dynamically generate malicious @-paths.
if (usesAtFileInjection) {
processors.push(new AtFileProcessor(baseCommandName));
}
// 2. Argument and Shell Injection.
// This runs after file content has been safely injected.
if (usesShellInjection || usesArgs) {
processors.push(new ShellProcessor(baseCommandName));
}
// 3. Default Argument Handling.
// Appends the raw invocation if no explicit {{args}} are used.
if (!usesArgs) {
processors.push(new DefaultArgumentProcessor());
}
return {
name: commandName,
description,
kind: CommandKind.FILE,
extensionName,
action: async (
context: CommandContext,
_args: string,
): Promise<SlashCommandActionReturn> => {
if (!context.invocation) {
console.error(
`[FileCommandLoader] Critical error: Command '${commandName}' was executed without invocation context.`,
);
return {
type: 'submit_prompt',
content: [{ text: definition.prompt }], // Fallback to unprocessed prompt
};
}
try {
let processedContent: PromptPipelineContent = [
{ text: definition.prompt },
];
for (const processor of processors) {
processedContent = await processor.process(processedContent, context);
}
return {
type: 'submit_prompt',
content: processedContent,
};
} catch (e) {
// Check if it's our specific error type
if (e instanceof ConfirmationRequiredError) {
// Halt and request confirmation from the UI layer.
return {
type: 'confirm_shell_commands',
commandsToConfirm: e.commandsToConfirm,
originalInvocation: {
raw: context.invocation.raw,
},
};
}
// Re-throw other errors to be handled by the global error handler.
throw e;
}
},
};
}

View File

@@ -0,0 +1,253 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { promises as fs } from 'node:fs';
import path from 'node:path';
import os from 'node:os';
import {
detectTomlCommands,
migrateTomlCommands,
generateMigrationPrompt,
} from './command-migration-tool.js';
describe('command-migration-tool', () => {
let tempDir: string;
beforeEach(async () => {
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-migration-test-'));
});
afterEach(async () => {
await fs.rm(tempDir, { recursive: true, force: true });
});
describe('detectTomlCommands', () => {
it('should detect TOML files in directory', async () => {
// Create some TOML files
await fs.writeFile(
path.join(tempDir, 'cmd1.toml'),
'prompt = "test"',
'utf-8',
);
await fs.writeFile(
path.join(tempDir, 'cmd2.toml'),
'prompt = "test"',
'utf-8',
);
const tomlFiles = await detectTomlCommands(tempDir);
expect(tomlFiles).toHaveLength(2);
expect(tomlFiles).toContain('cmd1.toml');
expect(tomlFiles).toContain('cmd2.toml');
});
it('should detect TOML files in subdirectories', async () => {
const subdir = path.join(tempDir, 'subdir');
await fs.mkdir(subdir);
await fs.writeFile(
path.join(subdir, 'nested.toml'),
'prompt = "test"',
'utf-8',
);
const tomlFiles = await detectTomlCommands(tempDir);
expect(tomlFiles).toContain('subdir/nested.toml');
});
it('should return empty array for non-existent directory', async () => {
const nonExistent = path.join(tempDir, 'does-not-exist');
const tomlFiles = await detectTomlCommands(nonExistent);
expect(tomlFiles).toEqual([]);
});
it('should not detect non-TOML files', async () => {
await fs.writeFile(path.join(tempDir, 'file.txt'), 'text', 'utf-8');
await fs.writeFile(path.join(tempDir, 'file.md'), 'markdown', 'utf-8');
const tomlFiles = await detectTomlCommands(tempDir);
expect(tomlFiles).toHaveLength(0);
});
});
describe('migrateTomlCommands', () => {
it('should migrate TOML file to Markdown', async () => {
const tomlContent = `prompt = "Test prompt"
description = "Test description"`;
await fs.writeFile(path.join(tempDir, 'test.toml'), tomlContent, 'utf-8');
const result = await migrateTomlCommands({
commandDir: tempDir,
createBackup: true,
deleteOriginal: false,
});
expect(result.success).toBe(true);
expect(result.convertedFiles).toContain('test.toml');
expect(result.failedFiles).toHaveLength(0);
// Check Markdown file was created
const mdPath = path.join(tempDir, 'test.md');
const mdContent = await fs.readFile(mdPath, 'utf-8');
expect(mdContent).toContain('description: Test description');
expect(mdContent).toContain('Test prompt');
// Check backup was created (original renamed to .toml.backup)
const backupPath = path.join(tempDir, 'test.toml.backup');
const backupExists = await fs
.access(backupPath)
.then(() => true)
.catch(() => false);
expect(backupExists).toBe(true);
// Original .toml file should not exist (renamed to .backup)
const tomlExists = await fs
.access(path.join(tempDir, 'test.toml'))
.then(() => true)
.catch(() => false);
expect(tomlExists).toBe(false);
});
it('should delete original TOML when deleteOriginal is true', async () => {
await fs.writeFile(
path.join(tempDir, 'delete-me.toml'),
'prompt = "Test"',
'utf-8',
);
await migrateTomlCommands({
commandDir: tempDir,
createBackup: false,
deleteOriginal: true,
});
// Original should be deleted
const tomlExists = await fs
.access(path.join(tempDir, 'delete-me.toml'))
.then(() => true)
.catch(() => false);
expect(tomlExists).toBe(false);
// Markdown should exist
const mdExists = await fs
.access(path.join(tempDir, 'delete-me.md'))
.then(() => true)
.catch(() => false);
expect(mdExists).toBe(true);
// Backup should not exist (createBackup was false)
const backupExists = await fs
.access(path.join(tempDir, 'delete-me.toml.backup'))
.then(() => true)
.catch(() => false);
expect(backupExists).toBe(false);
});
it('should fail if Markdown file already exists', async () => {
await fs.writeFile(
path.join(tempDir, 'existing.toml'),
'prompt = "Test"',
'utf-8',
);
await fs.writeFile(
path.join(tempDir, 'existing.md'),
'Already exists',
'utf-8',
);
const result = await migrateTomlCommands({
commandDir: tempDir,
createBackup: false,
});
expect(result.success).toBe(false);
expect(result.failedFiles).toHaveLength(1);
expect(result.failedFiles[0].file).toBe('existing.toml');
expect(result.failedFiles[0].error).toContain('already exists');
});
it('should handle migration without backup', async () => {
await fs.writeFile(
path.join(tempDir, 'no-backup.toml'),
'prompt = "Test"',
'utf-8',
);
const result = await migrateTomlCommands({
commandDir: tempDir,
createBackup: false,
deleteOriginal: false,
});
expect(result.success).toBe(true);
// Original TOML file should still exist (no backup, no delete)
const tomlExists = await fs
.access(path.join(tempDir, 'no-backup.toml'))
.then(() => true)
.catch(() => false);
expect(tomlExists).toBe(true);
// Backup should not exist
const backupExists = await fs
.access(path.join(tempDir, 'no-backup.toml.backup'))
.then(() => true)
.catch(() => false);
expect(backupExists).toBe(false);
});
it('should return success with empty results for no TOML files', async () => {
const result = await migrateTomlCommands({
commandDir: tempDir,
});
expect(result.success).toBe(true);
expect(result.convertedFiles).toHaveLength(0);
expect(result.failedFiles).toHaveLength(0);
});
});
describe('generateMigrationPrompt', () => {
it('should generate prompt for few files', () => {
const files = ['cmd1.toml', 'cmd2.toml'];
const prompt = generateMigrationPrompt(files);
expect(prompt).toContain('Found 2 command files');
expect(prompt).toContain('cmd1.toml');
expect(prompt).toContain('cmd2.toml');
expect(prompt).toContain('qwen-code migrate-commands');
});
it('should truncate file list for many files', () => {
const files = Array.from({ length: 10 }, (_, i) => `cmd${i}.toml`);
const prompt = generateMigrationPrompt(files);
expect(prompt).toContain('Found 10 command files');
expect(prompt).toContain('... and 7 more');
});
it('should return empty string for no files', () => {
const prompt = generateMigrationPrompt([]);
expect(prompt).toBe('');
});
it('should use singular form for single file', () => {
const prompt = generateMigrationPrompt(['single.toml']);
expect(prompt).toContain('Found 1 command file');
// Don't check for plural since "files" appears in other parts of the message
});
});
});

View File

@@ -0,0 +1,169 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Tool for migrating TOML commands to Markdown format.
*/
import { promises as fs } from 'node:fs';
import path from 'node:path';
import { glob } from 'glob';
import { convertTomlToMarkdown } from '@qwen-code/qwen-code-core';
export interface MigrationResult {
success: boolean;
convertedFiles: string[];
failedFiles: Array<{ file: string; error: string }>;
}
export interface MigrationOptions {
/** Directory containing command files */
commandDir: string;
/** Whether to create backups (default: true) */
createBackup?: boolean;
/** Whether to delete original TOML files after migration (default: false) */
deleteOriginal?: boolean;
}
/**
* Scans a directory for TOML command files.
* @param commandDir Directory to scan
* @returns Array of TOML file paths (relative to commandDir)
*/
export async function detectTomlCommands(
commandDir: string,
): Promise<string[]> {
try {
await fs.access(commandDir);
} catch {
// Directory doesn't exist
return [];
}
const tomlFiles = await glob('**/*.toml', {
cwd: commandDir,
nodir: true,
dot: false,
});
return tomlFiles;
}
/**
* Migrates TOML command files to Markdown format.
* @param options Migration options
* @returns Migration result with details
*/
export async function migrateTomlCommands(
options: MigrationOptions,
): Promise<MigrationResult> {
const { commandDir, createBackup = true, deleteOriginal = false } = options;
const result: MigrationResult = {
success: true,
convertedFiles: [],
failedFiles: [],
};
// Detect TOML files
const tomlFiles = await detectTomlCommands(commandDir);
if (tomlFiles.length === 0) {
return result;
}
// Process each TOML file
for (const relativeFile of tomlFiles) {
const tomlPath = path.join(commandDir, relativeFile);
try {
// Read TOML file
const tomlContent = await fs.readFile(tomlPath, 'utf-8');
// Convert to Markdown
const markdownContent = convertTomlToMarkdown(tomlContent);
// Generate Markdown file path (same location, .md extension)
const markdownPath = tomlPath.replace(/\.toml$/, '.md');
// Check if Markdown file already exists
try {
await fs.access(markdownPath);
throw new Error(
`Markdown file already exists: ${path.basename(markdownPath)}`,
);
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
throw error;
}
// File doesn't exist, continue
}
// Write Markdown file
await fs.writeFile(markdownPath, markdownContent, 'utf-8');
// Backup original if requested (rename to .toml.backup)
if (createBackup) {
const backupPath = `${tomlPath}.backup`;
await fs.rename(tomlPath, backupPath);
} else if (deleteOriginal) {
// Delete original if requested and no backup
await fs.unlink(tomlPath);
}
result.convertedFiles.push(relativeFile);
} catch (error) {
result.success = false;
result.failedFiles.push({
file: relativeFile,
error: error instanceof Error ? error.message : String(error),
});
}
}
return result;
}
/**
* Generates a migration report message.
* @param tomlFiles List of TOML files found
* @returns Human-readable migration prompt message
*/
export function generateMigrationPrompt(tomlFiles: string[]): string {
if (tomlFiles.length === 0) {
return '';
}
const count = tomlFiles.length;
const fileList =
tomlFiles.length <= 5
? tomlFiles.map((f) => ` - ${f}`).join('\n')
: ` - ${tomlFiles.slice(0, 3).join('\n - ')}\n - ... and ${tomlFiles.length - 3} more`;
return `
⚠️ TOML Command Format Deprecation Notice
Found ${count} command file${count > 1 ? 's' : ''} in TOML format:
${fileList}
The TOML format for commands is being deprecated in favor of Markdown format.
Markdown format is more readable and easier to edit.
You can migrate these files automatically using:
qwen-code migrate-commands
Or manually convert each file:
- TOML: prompt = "..." / description = "..."
- Markdown: YAML frontmatter + content
The migration tool will:
✓ Convert TOML files to Markdown
✓ Create backups of original files
✓ Preserve all command functionality
TOML format will continue to work for now, but migration is recommended.
`.trim();
}

View File

@@ -0,0 +1,144 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import {
parseMarkdownCommand,
MarkdownCommandDefSchema,
} from './markdown-command-parser.js';
describe('parseMarkdownCommand', () => {
it('should parse markdown with YAML frontmatter', () => {
const content = `---
description: Test command
---
This is the prompt content.`;
const result = parseMarkdownCommand(content);
expect(result).toEqual({
frontmatter: {
description: 'Test command',
},
prompt: 'This is the prompt content.',
});
});
it('should parse markdown without frontmatter', () => {
const content = 'This is just a prompt without frontmatter.';
const result = parseMarkdownCommand(content);
expect(result).toEqual({
prompt: 'This is just a prompt without frontmatter.',
});
});
it('should handle multi-line prompts', () => {
const content = `---
description: Multi-line test
---
First line of prompt.
Second line of prompt.
Third line of prompt.`;
const result = parseMarkdownCommand(content);
expect(result.prompt).toBe(
'First line of prompt.\nSecond line of prompt.\nThird line of prompt.',
);
});
it('should trim whitespace from prompt', () => {
const content = `---
description: Whitespace test
---
Prompt with leading and trailing spaces
`;
const result = parseMarkdownCommand(content);
expect(result.prompt).toBe('Prompt with leading and trailing spaces');
});
it('should handle empty frontmatter', () => {
const content = `---
---
Prompt content after empty frontmatter.`;
const result = parseMarkdownCommand(content);
// Empty YAML frontmatter returns undefined, not {}
expect(result.frontmatter).toBeUndefined();
expect(result.prompt).toBe('Prompt content after empty frontmatter.');
});
it('should handle invalid YAML frontmatter gracefully', () => {
// The YAML parser we use is quite tolerant, so most "invalid" YAML
// actually parses successfully. This test verifies that behavior.
const content = `---
description: test
---
Prompt content.`;
const result = parseMarkdownCommand(content);
expect(result.frontmatter).toBeDefined();
expect(result.prompt).toBe('Prompt content.');
});
});
describe('MarkdownCommandDefSchema', () => {
it('should validate valid markdown command def', () => {
const validDef = {
frontmatter: {
description: 'Test description',
},
prompt: 'Test prompt',
};
const result = MarkdownCommandDefSchema.safeParse(validDef);
expect(result.success).toBe(true);
});
it('should validate markdown command def without frontmatter', () => {
const validDef = {
prompt: 'Test prompt',
};
const result = MarkdownCommandDefSchema.safeParse(validDef);
expect(result.success).toBe(true);
});
it('should reject command def without prompt', () => {
const invalidDef = {
frontmatter: {
description: 'Test description',
},
};
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
expect(result.success).toBe(false);
});
it('should reject command def with non-string prompt', () => {
const invalidDef = {
prompt: 123,
};
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
expect(result.success).toBe(false);
});
});

View File

@@ -0,0 +1,64 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { z } from 'zod';
import { parse as parseYaml } from '@qwen-code/qwen-code-core';
/**
* Defines the Zod schema for a Markdown command definition file.
* The frontmatter contains optional metadata, and the body is the prompt.
*/
export const MarkdownCommandDefSchema = z.object({
frontmatter: z
.object({
description: z.string().optional(),
})
.optional(),
prompt: z.string({
required_error: 'The prompt content is required.',
invalid_type_error: 'The prompt content must be a string.',
}),
});
export type MarkdownCommandDef = z.infer<typeof MarkdownCommandDefSchema>;
/**
* Parses a Markdown command file with optional YAML frontmatter.
* @param content The file content
* @returns Parsed command definition with frontmatter and prompt
*/
export function parseMarkdownCommand(content: string): MarkdownCommandDef {
// Match YAML frontmatter pattern: ---\n...\n---\n
// Allow empty frontmatter: ---\n---\n // Use (?:[\s\S]*?) to make the frontmatter content optional
const frontmatterRegex = /^---\n([\s\S]*?)---\n([\s\S]*)$/;
const match = content.match(frontmatterRegex);
if (!match) {
// No frontmatter, entire content is the prompt
return {
prompt: content.trim(),
};
}
const [, frontmatterYaml, body] = match;
// Parse YAML frontmatter if not empty
let frontmatter: Record<string, unknown> | undefined;
if (frontmatterYaml.trim()) {
try {
frontmatter = parseYaml(frontmatterYaml) as Record<string, unknown>;
} catch (error) {
throw new Error(
`Failed to parse YAML frontmatter: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
return {
frontmatter,
prompt: body.trim(),
};
}

View File

@@ -72,7 +72,6 @@ describe('ShellProcessor', () => {
getApprovalMode: vi.fn().mockReturnValue(ApprovalMode.DEFAULT),
getShouldUseNodePtyShell: vi.fn().mockReturnValue(false),
getShellExecutionConfig: vi.fn().mockReturnValue({}),
getAllowedTools: vi.fn().mockReturnValue([]),
};
context = createMockCommandContext({
@@ -197,35 +196,6 @@ describe('ShellProcessor', () => {
);
});
it('should NOT throw ConfirmationRequiredError when a command matches allowedTools', async () => {
const processor = new ShellProcessor('test-command');
const prompt: PromptPipelineContent = createPromptPipelineContent(
'Do something dangerous: !{rm -rf /}',
);
mockCheckCommandPermissions.mockReturnValue({
allAllowed: false,
disallowedCommands: ['rm -rf /'],
});
(mockConfig.getAllowedTools as Mock).mockReturnValue([
'ShellTool(rm -rf /)',
]);
mockShellExecute.mockReturnValue({
result: Promise.resolve({ ...SUCCESS_RESULT, output: 'deleted' }),
});
const result = await processor.process(prompt, context);
expect(mockShellExecute).toHaveBeenCalledWith(
'rm -rf /',
expect.any(String),
expect.any(Function),
expect.any(Object),
false,
expect.any(Object),
);
expect(result).toEqual([{ text: 'Do something dangerous: deleted' }]);
});
it('should NOT throw ConfirmationRequiredError if a command is not allowed but approval mode is YOLO', async () => {
const processor = new ShellProcessor('test-command');
const prompt: PromptPipelineContent = createPromptPipelineContent(

View File

@@ -7,13 +7,11 @@
import {
ApprovalMode,
checkCommandPermissions,
doesToolInvocationMatch,
escapeShellArg,
getShellConfiguration,
ShellExecutionService,
flatMapTextParts,
} from '@qwen-code/qwen-code-core';
import type { AnyToolInvocation } from '@qwen-code/qwen-code-core';
import type { CommandContext } from '../../ui/commands/types.js';
import type { IPromptProcessor, PromptPipelineContent } from './types.js';
@@ -126,15 +124,6 @@ export class ShellProcessor implements IPromptProcessor {
// Security check on the final, escaped command string.
const { allAllowed, disallowedCommands, blockReason, isHardDenial } =
checkCommandPermissions(command, config, sessionShellAllowlist);
const allowedTools = config.getAllowedTools() || [];
const invocation = {
params: { command },
} as AnyToolInvocation;
const isAllowedBySettings = doesToolInvocationMatch(
'run_shell_command',
invocation,
allowedTools,
);
if (!allAllowed) {
if (isHardDenial) {
@@ -143,17 +132,10 @@ export class ShellProcessor implements IPromptProcessor {
);
}
// If the command is allowed by settings, skip confirmation.
if (isAllowedBySettings) {
continue;
}
// If not a hard denial, respect YOLO mode and auto-approve.
if (config.getApprovalMode() === ApprovalMode.YOLO) {
continue;
if (config.getApprovalMode() !== ApprovalMode.YOLO) {
disallowedCommands.forEach((uc) => commandsToConfirm.add(uc));
}
disallowedCommands.forEach((uc) => commandsToConfirm.add(uc));
}
}

View File

@@ -0,0 +1,5 @@
---
description: Example markdown command
---
This is an example prompt from a markdown file.

View File

@@ -1,49 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import {
EXTENSIONS_CONFIG_FILENAME,
INSTALL_METADATA_FILENAME,
} from '../config/extension.js';
import {
type MCPServerConfig,
type ExtensionInstallMetadata,
} from '@qwen-code/qwen-code-core';
export function createExtension({
extensionsDir = 'extensions-dir',
name = 'my-extension',
version = '1.0.0',
addContextFile = false,
contextFileName = undefined as string | undefined,
mcpServers = {} as Record<string, MCPServerConfig>,
installMetadata = undefined as ExtensionInstallMetadata | undefined,
} = {}): string {
const extDir = path.join(extensionsDir, name);
fs.mkdirSync(extDir, { recursive: true });
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name, version, contextFileName, mcpServers }),
);
if (addContextFile) {
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
}
if (contextFileName) {
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
}
if (installMetadata) {
fs.writeFileSync(
path.join(extDir, INSTALL_METADATA_FILENAME),
JSON.stringify(installMetadata),
);
}
return extDir;
}

View File

@@ -76,7 +76,6 @@ vi.mock('./hooks/useFolderTrust.js');
vi.mock('./hooks/useIdeTrustListener.js');
vi.mock('./hooks/useMessageQueue.js');
vi.mock('./hooks/useAutoAcceptIndicator.js');
vi.mock('./hooks/useWorkspaceMigration.js');
vi.mock('./hooks/useGitBranchName.js');
vi.mock('./contexts/VimModeContext.js');
vi.mock('./contexts/SessionContext.js');
@@ -103,7 +102,6 @@ import { useFolderTrust } from './hooks/useFolderTrust.js';
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
import { useMessageQueue } from './hooks/useMessageQueue.js';
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
import { useGitBranchName } from './hooks/useGitBranchName.js';
import { useVimMode } from './contexts/VimModeContext.js';
import { useSessionStats } from './contexts/SessionContext.js';
@@ -134,7 +132,6 @@ describe('AppContainer State Management', () => {
const mockedUseIdeTrustListener = useIdeTrustListener as Mock;
const mockedUseMessageQueue = useMessageQueue as Mock;
const mockedUseAutoAcceptIndicator = useAutoAcceptIndicator as Mock;
const mockedUseWorkspaceMigration = useWorkspaceMigration as Mock;
const mockedUseGitBranchName = useGitBranchName as Mock;
const mockedUseVimMode = useVimMode as Mock;
const mockedUseSessionStats = useSessionStats as Mock;
@@ -239,12 +236,6 @@ describe('AppContainer State Management', () => {
getQueuedMessagesText: vi.fn().mockReturnValue(''),
});
mockedUseAutoAcceptIndicator.mockReturnValue(false);
mockedUseWorkspaceMigration.mockReturnValue({
showWorkspaceMigrationDialog: false,
workspaceExtensions: [],
onWorkspaceMigrationDialogOpen: vi.fn(),
onWorkspaceMigrationDialogClose: vi.fn(),
});
mockedUseGitBranchName.mockReturnValue('main');
mockedUseVimMode.mockReturnValue({
isVimEnabled: false,

View File

@@ -38,6 +38,7 @@ import {
getErrorMessage,
getAllGeminiMdFilenames,
ShellExecutionService,
Storage,
} from '@qwen-code/qwen-code-core';
import { buildResumedHistoryItems } from './utils/resumeHistoryUtils.js';
import { validateAuthMethod } from '../config/auth.js';
@@ -76,6 +77,9 @@ import { useLoadingIndicator } from './hooks/useLoadingIndicator.js';
import { useFolderTrust } from './hooks/useFolderTrust.js';
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
import { type IdeIntegrationNudgeResult } from './IdeIntegrationNudge.js';
import { type CommandMigrationNudgeResult } from './CommandFormatMigrationNudge.js';
import { useCommandMigration } from './hooks/useCommandMigration.js';
import { migrateTomlCommands } from '../services/command-migration-tool.js';
import { appEvents, AppEvent } from '../utils/events.js';
import { type UpdateObject } from './utils/updateCheck.js';
import { setUpdateHandler } from '../utils/handleAutoUpdate.js';
@@ -83,10 +87,12 @@ import { ConsolePatcher } from './utils/ConsolePatcher.js';
import { registerCleanup, runExitCleanup } from '../utils/cleanup.js';
import { useMessageQueue } from './hooks/useMessageQueue.js';
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
import { useSessionStats } from './contexts/SessionContext.js';
import { useGitBranchName } from './hooks/useGitBranchName.js';
import { useExtensionUpdates } from './hooks/useExtensionUpdates.js';
import {
useExtensionUpdates,
useConfirmUpdateRequests,
} from './hooks/useExtensionUpdates.js';
import { ShellFocusContext } from './contexts/ShellFocusContext.js';
import { t } from '../i18n/index.js';
import { useWelcomeBack } from './hooks/useWelcomeBack.js';
@@ -97,6 +103,7 @@ import { processVisionSwitchOutcome } from './hooks/useVisionAutoSwitch.js';
import { useSubagentCreateDialog } from './hooks/useSubagentCreateDialog.js';
import { useAgentsManagerDialog } from './hooks/useAgentsManagerDialog.js';
import { useAttentionNotifications } from './hooks/useAttentionNotifications.js';
import { requestConsentInteractive } from '../commands/extensions/consent.js';
const CTRL_EXIT_PROMPT_DURATION_MS = 1000;
@@ -157,15 +164,21 @@ export const AppContainer = (props: AppContainerProps) => {
config.isTrustedFolder(),
);
const extensions = config.getExtensions();
const extensionManager = config.getExtensionManager();
extensionManager.setRequestConsent((description) =>
requestConsentInteractive(description, addConfirmUpdateExtensionRequest),
);
const { addConfirmUpdateExtensionRequest, confirmUpdateExtensionRequests } =
useConfirmUpdateRequests();
const {
extensionsUpdateState,
extensionsUpdateStateInternal,
dispatchExtensionStateUpdate,
confirmUpdateExtensionRequests,
addConfirmUpdateExtensionRequest,
} = useExtensionUpdates(
extensions,
extensionManager,
historyManager.addItem,
config.getWorkingDir(),
);
@@ -436,13 +449,6 @@ export const AppContainer = (props: AppContainerProps) => {
remount: refreshStatic,
});
const {
showWorkspaceMigrationDialog,
workspaceExtensions,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
} = useWorkspaceMigration(settings);
const { toggleVimEnabled } = useVimMode();
const {
@@ -578,11 +584,11 @@ export const AppContainer = (props: AppContainerProps) => {
: [],
config.getDebugMode(),
config.getFileService(),
settings.merged,
config.getExtensionContextFilePaths(),
config.isTrustedFolder(),
settings.merged.context?.importFormat || 'tree', // Use setting or default to 'tree'
config.getFileFilteringOptions(),
config.getDiscoveryMaxDirs(),
);
config.setUserMemory(memoryContent);
@@ -845,6 +851,13 @@ export const AppContainer = (props: AppContainerProps) => {
!idePromptAnswered,
);
// Command migration nudge
const {
showMigrationNudge: shouldShowCommandMigrationNudge,
tomlFiles: commandMigrationTomlFiles,
setShowMigrationNudge: setShowCommandMigrationNudge,
} = useCommandMigration(settings, config.storage);
const [showErrorDetails, setShowErrorDetails] = useState<boolean>(false);
const [showToolDescriptions, setShowToolDescriptions] =
useState<boolean>(false);
@@ -925,12 +938,7 @@ export const AppContainer = (props: AppContainerProps) => {
const handleIdePromptComplete = useCallback(
(result: IdeIntegrationNudgeResult) => {
if (result.userSelection === 'yes') {
// Check whether the extension has been pre-installed
if (result.isExtensionPreInstalled) {
handleSlashCommand('/ide enable');
} else {
handleSlashCommand('/ide install');
}
handleSlashCommand('/ide install');
settings.setValue(SettingScope.User, 'ide.hasSeenNudge', true);
} else if (result.userSelection === 'dismiss') {
settings.setValue(SettingScope.User, 'ide.hasSeenNudge', true);
@@ -940,6 +948,92 @@ export const AppContainer = (props: AppContainerProps) => {
[handleSlashCommand, settings],
);
const handleCommandMigrationComplete = useCallback(
async (result: CommandMigrationNudgeResult) => {
setShowCommandMigrationNudge(false);
if (result.userSelection === 'yes') {
// Perform migration for both workspace and user levels
try {
const results = [];
// Migrate workspace commands
const workspaceCommandsDir = config.storage.getProjectCommandsDir();
const workspaceResult = await migrateTomlCommands({
commandDir: workspaceCommandsDir,
createBackup: true,
deleteOriginal: false,
});
if (
workspaceResult.convertedFiles.length > 0 ||
workspaceResult.failedFiles.length > 0
) {
results.push({ level: 'workspace', result: workspaceResult });
}
// Migrate user commands
const userCommandsDir = Storage.getUserCommandsDir();
const userResult = await migrateTomlCommands({
commandDir: userCommandsDir,
createBackup: true,
deleteOriginal: false,
});
if (
userResult.convertedFiles.length > 0 ||
userResult.failedFiles.length > 0
) {
results.push({ level: 'user', result: userResult });
}
// Report results
for (const { level, result: migrationResult } of results) {
if (
migrationResult.success &&
migrationResult.convertedFiles.length > 0
) {
historyManager.addItem(
{
type: MessageType.INFO,
text: `[${level}] Successfully migrated ${migrationResult.convertedFiles.length} command file${migrationResult.convertedFiles.length > 1 ? 's' : ''} to Markdown format. Original files backed up as .toml.backup`,
},
Date.now(),
);
}
if (migrationResult.failedFiles.length > 0) {
historyManager.addItem(
{
type: MessageType.ERROR,
text: `[${level}] Failed to migrate ${migrationResult.failedFiles.length} file${migrationResult.failedFiles.length > 1 ? 's' : ''}:\n${migrationResult.failedFiles.map((f) => `${f.file}: ${f.error}`).join('\n')}`,
},
Date.now(),
);
}
}
if (results.length === 0) {
historyManager.addItem(
{
type: MessageType.INFO,
text: 'No TOML files found to migrate.',
},
Date.now(),
);
}
} catch (error) {
historyManager.addItem(
{
type: MessageType.ERROR,
text: `❌ Migration failed: ${getErrorMessage(error)}`,
},
Date.now(),
);
}
}
},
[historyManager, setShowCommandMigrationNudge, config.storage],
);
const { elapsedTime, currentLoadingPhrase } = useLoadingIndicator(
streamingState,
settings.merged.ui?.customWittyPhrases,
@@ -1182,8 +1276,8 @@ export const AppContainer = (props: AppContainerProps) => {
const dialogsVisible =
showWelcomeBackDialog ||
showWorkspaceMigrationDialog ||
shouldShowIdePrompt ||
shouldShowCommandMigrationNudge ||
isFolderTrustDialogOpen ||
!!shellConfirmationRequest ||
!!confirmationRequest ||
@@ -1249,6 +1343,8 @@ export const AppContainer = (props: AppContainerProps) => {
suggestionsWidth,
isInputActive,
shouldShowIdePrompt,
shouldShowCommandMigrationNudge,
commandMigrationTomlFiles,
isFolderTrustDialogOpen: isFolderTrustDialogOpen ?? false,
isTrustedFolder,
constrainHeight,
@@ -1265,8 +1361,6 @@ export const AppContainer = (props: AppContainerProps) => {
historyRemountKey,
messageQueue,
showAutoAcceptIndicator,
showWorkspaceMigrationDialog,
workspaceExtensions,
currentModel,
contextFileNames,
errorCount,
@@ -1338,6 +1432,8 @@ export const AppContainer = (props: AppContainerProps) => {
suggestionsWidth,
isInputActive,
shouldShowIdePrompt,
shouldShowCommandMigrationNudge,
commandMigrationTomlFiles,
isFolderTrustDialogOpen,
isTrustedFolder,
constrainHeight,
@@ -1354,8 +1450,6 @@ export const AppContainer = (props: AppContainerProps) => {
historyRemountKey,
messageQueue,
showAutoAcceptIndicator,
showWorkspaceMigrationDialog,
workspaceExtensions,
contextFileNames,
errorCount,
availableTerminalHeight,
@@ -1409,14 +1503,13 @@ export const AppContainer = (props: AppContainerProps) => {
setShellModeActive,
vimHandleInput,
handleIdePromptComplete,
handleCommandMigrationComplete,
handleFolderTrustSelect,
setConstrainHeight,
onEscapePromptChange: handleEscapePromptChange,
refreshStatic,
handleFinalSubmit,
handleClearScreen,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
// Vision switch dialog
handleVisionSwitchSelect,
// Welcome back dialog
@@ -1446,14 +1539,13 @@ export const AppContainer = (props: AppContainerProps) => {
setShellModeActive,
vimHandleInput,
handleIdePromptComplete,
handleCommandMigrationComplete,
handleFolderTrustSelect,
setConstrainHeight,
handleEscapePromptChange,
refreshStatic,
handleFinalSubmit,
handleClearScreen,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
handleVisionSwitchSelect,
handleWelcomeBackSelection,
handleWelcomeBackClose,

View File

@@ -0,0 +1,90 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { Box, Text } from 'ink';
import type { RadioSelectItem } from './components/shared/RadioButtonSelect.js';
import { RadioButtonSelect } from './components/shared/RadioButtonSelect.js';
import { useKeypress } from './hooks/useKeypress.js';
import { theme } from './semantic-colors.js';
export type CommandMigrationNudgeResult = {
userSelection: 'yes' | 'no';
};
interface CommandFormatMigrationNudgeProps {
tomlFiles: string[];
onComplete: (result: CommandMigrationNudgeResult) => void;
}
export function CommandFormatMigrationNudge({
tomlFiles,
onComplete,
}: CommandFormatMigrationNudgeProps) {
useKeypress(
(key) => {
if (key.name === 'escape') {
onComplete({
userSelection: 'no',
});
}
},
{ isActive: true },
);
const OPTIONS: Array<RadioSelectItem<CommandMigrationNudgeResult>> = [
{
label: 'Yes',
value: {
userSelection: 'yes',
},
key: 'Yes',
},
{
label: 'No (esc)',
value: {
userSelection: 'no',
},
key: 'No (esc)',
},
];
const count = tomlFiles.length;
const fileList =
count <= 3
? tomlFiles.map((f) => `${f}`).join('\n')
: `${tomlFiles.slice(0, 2).join('\n • ')}\n • ... and ${count - 2} more`;
return (
<Box
flexDirection="column"
borderStyle="round"
borderColor={theme.status.warning}
padding={1}
width="100%"
marginLeft={1}
>
<Box marginBottom={1} flexDirection="column">
<Text>
<Text color={theme.status.warning}>{'⚠️ '}</Text>
<Text bold>Command Format Migration</Text>
</Text>
<Text color={theme.text.secondary}>
{`Found ${count} TOML command file${count > 1 ? 's' : ''}:`}
</Text>
<Text color={theme.text.secondary}>{fileList}</Text>
<Text>{''}</Text>
<Text color={theme.text.secondary}>
The TOML format is deprecated. Would you like to migrate them to
Markdown format?
</Text>
<Text color={theme.text.secondary}>
(Backups will be created and original files will be preserved)
</Text>
</Box>
<RadioButtonSelect items={OPTIONS} onSelect={onComplete} />
</Box>
);
}

View File

@@ -38,7 +38,6 @@ export function IdeIntegrationNudge({
);
const { displayName: ideName } = ide;
const isInSandbox = !!process.env['SANDBOX'];
// Assume extension is already installed if the env variables are set.
const isExtensionPreInstalled =
!!process.env['QWEN_CODE_IDE_SERVER_PORT'] &&
@@ -71,15 +70,13 @@ export function IdeIntegrationNudge({
},
];
const installText = isInSandbox
? `Note: In sandbox environments, IDE integration requires manual setup on the host system. If you select Yes, you'll receive instructions on how to set this up.`
: isExtensionPreInstalled
? `If you select Yes, the CLI will connect to your ${
ideName ?? 'editor'
} and have access to your open files and display diffs directly.`
: `If you select Yes, we'll install an extension that allows the CLI to access your open files and display diffs directly in ${
ideName ?? 'your editor'
}.`;
const installText = isExtensionPreInstalled
? `If you select Yes, the CLI will have access to your open files and display diffs directly in ${
ideName ?? 'your editor'
}.`
: `If you select Yes, we'll install an extension that allows the CLI to access your open files and display diffs directly in ${
ideName ?? 'your editor'
}.`;
return (
<Box

View File

@@ -4,28 +4,31 @@
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { describe, it, expect } from 'vitest';
import { approvalModeCommand } from './approvalModeCommand.js';
import {
type CommandContext,
CommandKind,
type OpenDialogActionReturn,
type MessageActionReturn,
} from './types.js';
import { createMockCommandContext } from '../../test-utils/mockCommandContext.js';
import type { LoadedSettings } from '../../config/settings.js';
describe('approvalModeCommand', () => {
let mockContext: CommandContext;
let mockSetApprovalMode: ReturnType<typeof vi.fn>;
beforeEach(() => {
mockSetApprovalMode = vi.fn();
mockContext = createMockCommandContext({
services: {
config: {
getApprovalMode: () => 'default',
setApprovalMode: mockSetApprovalMode,
setApprovalMode: () => {},
},
settings: {
merged: {},
setValue: () => {},
forScope: () => ({}),
} as unknown as LoadedSettings,
},
});
});
@@ -38,7 +41,7 @@ describe('approvalModeCommand', () => {
expect(approvalModeCommand.kind).toBe(CommandKind.BUILT_IN);
});
it('should open approval mode dialog when invoked without arguments', async () => {
it('should open approval mode dialog when invoked', async () => {
const result = (await approvalModeCommand.action?.(
mockContext,
'',
@@ -48,123 +51,16 @@ describe('approvalModeCommand', () => {
expect(result.dialog).toBe('approval-mode');
});
it('should open approval mode dialog when invoked with whitespace only', async () => {
it('should open approval mode dialog with arguments (ignored)', async () => {
const result = (await approvalModeCommand.action?.(
mockContext,
' ',
'some arguments',
)) as OpenDialogActionReturn;
expect(result.type).toBe('dialog');
expect(result.dialog).toBe('approval-mode');
});
describe('direct mode setting (session-only)', () => {
it('should set approval mode to "plan" when argument is "plan"', async () => {
const result = (await approvalModeCommand.action?.(
mockContext,
'plan',
)) as MessageActionReturn;
expect(result.type).toBe('message');
expect(result.messageType).toBe('info');
expect(result.content).toContain('plan');
expect(mockSetApprovalMode).toHaveBeenCalledWith('plan');
});
it('should set approval mode to "yolo" when argument is "yolo"', async () => {
const result = (await approvalModeCommand.action?.(
mockContext,
'yolo',
)) as MessageActionReturn;
expect(result.type).toBe('message');
expect(result.messageType).toBe('info');
expect(result.content).toContain('yolo');
expect(mockSetApprovalMode).toHaveBeenCalledWith('yolo');
});
it('should set approval mode to "auto-edit" when argument is "auto-edit"', async () => {
const result = (await approvalModeCommand.action?.(
mockContext,
'auto-edit',
)) as MessageActionReturn;
expect(result.type).toBe('message');
expect(result.messageType).toBe('info');
expect(result.content).toContain('auto-edit');
expect(mockSetApprovalMode).toHaveBeenCalledWith('auto-edit');
});
it('should set approval mode to "default" when argument is "default"', async () => {
const result = (await approvalModeCommand.action?.(
mockContext,
'default',
)) as MessageActionReturn;
expect(result.type).toBe('message');
expect(result.messageType).toBe('info');
expect(result.content).toContain('default');
expect(mockSetApprovalMode).toHaveBeenCalledWith('default');
});
it('should be case-insensitive for mode argument', async () => {
const result = (await approvalModeCommand.action?.(
mockContext,
'YOLO',
)) as MessageActionReturn;
expect(result.type).toBe('message');
expect(result.messageType).toBe('info');
expect(mockSetApprovalMode).toHaveBeenCalledWith('yolo');
});
it('should handle argument with leading/trailing whitespace', async () => {
const result = (await approvalModeCommand.action?.(
mockContext,
' plan ',
)) as MessageActionReturn;
expect(result.type).toBe('message');
expect(result.messageType).toBe('info');
expect(mockSetApprovalMode).toHaveBeenCalledWith('plan');
});
});
describe('invalid mode argument', () => {
it('should return error for invalid mode', async () => {
const result = (await approvalModeCommand.action?.(
mockContext,
'invalid-mode',
)) as MessageActionReturn;
expect(result.type).toBe('message');
expect(result.messageType).toBe('error');
expect(result.content).toContain('invalid-mode');
expect(result.content).toContain('plan');
expect(result.content).toContain('yolo');
expect(mockSetApprovalMode).not.toHaveBeenCalled();
});
});
describe('untrusted folder handling', () => {
it('should return error when setApprovalMode throws (e.g., untrusted folder)', async () => {
const errorMessage =
'Cannot enable privileged approval modes in an untrusted folder.';
mockSetApprovalMode.mockImplementation(() => {
throw new Error(errorMessage);
});
const result = (await approvalModeCommand.action?.(
mockContext,
'yolo',
)) as MessageActionReturn;
expect(result.type).toBe('message');
expect(result.messageType).toBe('error');
expect(result.content).toBe(errorMessage);
});
});
it('should not have subcommands', () => {
expect(approvalModeCommand.subCommands).toBeUndefined();
});

View File

@@ -8,25 +8,9 @@ import type {
SlashCommand,
CommandContext,
OpenDialogActionReturn,
MessageActionReturn,
} from './types.js';
import { CommandKind } from './types.js';
import { t } from '../../i18n/index.js';
import type { ApprovalMode } from '@qwen-code/qwen-code-core';
import { APPROVAL_MODES } from '@qwen-code/qwen-code-core';
/**
* Parses the argument string and returns the corresponding ApprovalMode if valid.
* Returns undefined if the argument is empty or not a valid mode.
*/
function parseApprovalModeArg(arg: string): ApprovalMode | undefined {
const trimmed = arg.trim().toLowerCase();
if (!trimmed) {
return undefined;
}
// Match against valid approval modes (case-insensitive)
return APPROVAL_MODES.find((mode) => mode.toLowerCase() === trimmed);
}
export const approvalModeCommand: SlashCommand = {
name: 'approval-mode',
@@ -35,49 +19,10 @@ export const approvalModeCommand: SlashCommand = {
},
kind: CommandKind.BUILT_IN,
action: async (
context: CommandContext,
args: string,
): Promise<OpenDialogActionReturn | MessageActionReturn> => {
const mode = parseApprovalModeArg(args);
// If no argument provided, open the dialog
if (!args.trim()) {
return {
type: 'dialog',
dialog: 'approval-mode',
};
}
// If invalid argument, return error message with valid options
if (!mode) {
return {
type: 'message',
messageType: 'error',
content: t('Invalid approval mode "{{arg}}". Valid modes: {{modes}}', {
arg: args.trim(),
modes: APPROVAL_MODES.join(', '),
}),
};
}
// Set the mode for current session only (not persisted)
const { config } = context.services;
if (config) {
try {
config.setApprovalMode(mode);
} catch (e) {
return {
type: 'message',
messageType: 'error',
content: (e as Error).message,
};
}
}
return {
type: 'message',
messageType: 'info',
content: t('Approval mode set to "{{mode}}"', { mode }),
};
},
_context: CommandContext,
_args: string,
): Promise<OpenDialogActionReturn> => ({
type: 'dialog',
dialog: 'approval-mode',
}),
};

View File

@@ -4,13 +4,6 @@
* SPDX-License-Identifier: Apache-2.0
*/
import { requestConsentInteractive } from '../../config/extension.js';
import {
updateAllUpdatableExtensions,
type ExtensionUpdateInfo,
updateExtension,
checkForAllExtensionUpdates,
} from '../../config/extensions/update.js';
import { getErrorMessage } from '../../utils/errors.js';
import { ExtensionUpdateState } from '../state/extensions.js';
import { MessageType } from '../types.js';
@@ -20,8 +13,34 @@ import {
CommandKind,
} from './types.js';
import { t } from '../../i18n/index.js';
import type { ExtensionUpdateInfo } from '@qwen-code/qwen-code-core';
function showMessageIfNoExtensions(
context: CommandContext,
extensions: unknown[],
): boolean {
if (extensions.length === 0) {
context.ui.addItem(
{
type: MessageType.INFO,
text: 'No extensions installed. Run `/extensions explore` to check out the gallery.',
},
Date.now(),
);
return true;
}
return false;
}
async function listAction(context: CommandContext) {
const extensions = context.services.config
? context.services.config.getExtensions()
: [];
if (showMessageIfNoExtensions(context, extensions)) {
return;
}
context.ui.addItem(
{
type: MessageType.EXTENSIONS_LIST,
@@ -34,7 +53,6 @@ async function updateAction(context: CommandContext, args: string) {
const updateArgs = args.split(' ').filter((value) => value.length > 0);
const all = updateArgs.length === 1 && updateArgs[0] === '--all';
const names = all ? undefined : updateArgs;
let updateInfos: ExtensionUpdateInfo[] = [];
if (!all && names?.length === 0) {
context.ui.addItem(
@@ -47,29 +65,40 @@ async function updateAction(context: CommandContext, args: string) {
return;
}
let updateInfos: ExtensionUpdateInfo[] = [];
const extensionManager = context.services.config!.getExtensionManager();
const extensions = context.services.config
? context.services.config.getExtensions()
: [];
if (showMessageIfNoExtensions(context, extensions)) {
return Promise.resolve();
}
try {
await checkForAllExtensionUpdates(
context.services.config!.getExtensions(),
context.ui.dispatchExtensionStateUpdate,
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
await extensionManager.checkForAllExtensionUpdates((extensionName, state) =>
context.ui.dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
}),
);
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
context.ui.setPendingItem({
type: MessageType.EXTENSIONS_LIST,
});
if (all) {
updateInfos = await updateAllUpdatableExtensions(
context.services.config!.getWorkingDir(),
// We don't have the ability to prompt for consent yet in this flow.
(description) =>
requestConsentInteractive(
description,
context.ui.addConfirmUpdateExtensionRequest,
),
context.services.config!.getExtensions(),
updateInfos = await extensionManager.updateAllUpdatableExtensions(
context.ui.extensionsUpdateState,
context.ui.dispatchExtensionStateUpdate,
(extensionName, state) =>
context.ui.dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
}),
);
} else if (names?.length) {
const workingDir = context.services.config!.getWorkingDir();
const extensions = context.services.config!.getExtensions();
for (const name of names) {
const extension = extensions.find(
@@ -85,17 +114,15 @@ async function updateAction(context: CommandContext, args: string) {
);
continue;
}
const updateInfo = await updateExtension(
const updateInfo = await extensionManager.updateExtension(
extension,
workingDir,
(description) =>
requestConsentInteractive(
description,
context.ui.addConfirmUpdateExtensionRequest,
),
context.ui.extensionsUpdateState.get(extension.name)?.status ??
ExtensionUpdateState.UNKNOWN,
context.ui.dispatchExtensionStateUpdate,
(extensionName, state) =>
context.ui.dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
}),
);
if (updateInfo) updateInfos.push(updateInfo);
}

View File

@@ -191,23 +191,11 @@ export const ideCommand = async (): Promise<SlashCommand> => {
kind: CommandKind.BUILT_IN,
action: async (context) => {
const installer = getIdeInstaller(currentIDE);
const isSandBox = !!process.env['SANDBOX'];
if (isSandBox) {
context.ui.addItem(
{
type: 'info',
text: `IDE integration needs to be installed on the host. If you have already installed it, you can directly connect the ide`,
},
Date.now(),
);
return;
}
if (!installer) {
const ideName = ideClient.getDetectedIdeDisplayName();
context.ui.addItem(
{
type: 'error',
text: `Automatic installation is not supported for ${ideName}. Please install the '${QWEN_CODE_COMPANION_EXTENSION_NAME}' extension manually from the marketplace.`,
text: `No installer is available for ${ideClient.getDetectedIdeDisplayName()}. Please install the '${QWEN_CODE_COMPANION_EXTENSION_NAME}' extension manually from the marketplace.`,
},
Date.now(),
);

View File

@@ -6,6 +6,7 @@
import { Box, Text } from 'ink';
import { IdeIntegrationNudge } from '../IdeIntegrationNudge.js';
import { CommandFormatMigrationNudge } from '../CommandFormatMigrationNudge.js';
import { LoopDetectionConfirmation } from './LoopDetectionConfirmation.js';
import { FolderTrustDialog } from './FolderTrustDialog.js';
import { ShellConfirmationDialog } from './ShellConfirmationDialog.js';
@@ -16,7 +17,6 @@ import { QwenOAuthProgress } from './QwenOAuthProgress.js';
import { AuthDialog } from '../auth/AuthDialog.js';
import { OpenAIKeyPrompt } from './OpenAIKeyPrompt.js';
import { EditorSettingsDialog } from './EditorSettingsDialog.js';
import { WorkspaceMigrationDialog } from './WorkspaceMigrationDialog.js';
import { PermissionsModifyTrustDialog } from './PermissionsModifyTrustDialog.js';
import { ModelDialog } from './ModelDialog.js';
import { ApprovalModeDialog } from './ApprovalModeDialog.js';
@@ -77,15 +77,6 @@ export const DialogManager = ({
if (uiState.showIdeRestartPrompt) {
return <IdeTrustChangeDialog reason={uiState.ideTrustRestartReason} />;
}
if (uiState.showWorkspaceMigrationDialog) {
return (
<WorkspaceMigrationDialog
workspaceExtensions={uiState.workspaceExtensions}
onOpen={uiActions.onWorkspaceMigrationDialogOpen}
onClose={uiActions.onWorkspaceMigrationDialogClose}
/>
);
}
if (uiState.shouldShowIdePrompt) {
return (
<IdeIntegrationNudge
@@ -94,6 +85,14 @@ export const DialogManager = ({
/>
);
}
if (uiState.shouldShowCommandMigrationNudge) {
return (
<CommandFormatMigrationNudge
tomlFiles={uiState.commandMigrationTomlFiles}
onComplete={uiActions.handleCommandMigrationComplete}
/>
);
}
if (uiState.isFolderTrustDialogOpen) {
return (
<FolderTrustDialog

View File

@@ -87,13 +87,7 @@ export async function showResumeSessionPicker(
let selectedId: string | undefined;
const { unmount, waitUntilExit } = render(
<KeypressProvider
kittyProtocolEnabled={false}
pasteWorkaround={
process.platform === 'win32' ||
parseInt(process.versions.node.split('.')[0], 10) < 20
}
>
<KeypressProvider kittyProtocolEnabled={false}>
<StandalonePickerScreen
sessionService={sessionService}
onSelect={(id) => {

View File

@@ -1,119 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { Box, Text } from 'ink';
import {
type Extension,
performWorkspaceExtensionMigration,
} from '../../config/extension.js';
import { RadioButtonSelect } from './shared/RadioButtonSelect.js';
import { theme } from '../semantic-colors.js';
import { useState } from 'react';
import { useKeypress } from '../hooks/useKeypress.js';
export function WorkspaceMigrationDialog(props: {
workspaceExtensions: Extension[];
onOpen: () => void;
onClose: () => void;
}) {
const { workspaceExtensions, onOpen, onClose } = props;
const [migrationComplete, setMigrationComplete] = useState(false);
const [failedExtensions, setFailedExtensions] = useState<string[]>([]);
onOpen();
const onMigrate = async () => {
const failed = await performWorkspaceExtensionMigration(
workspaceExtensions,
// We aren't updating extensions, just moving them around, don't need to ask for consent.
async (_) => true,
);
setFailedExtensions(failed);
setMigrationComplete(true);
};
useKeypress(
(key) => {
if (migrationComplete && key.sequence === 'q') {
process.exit(0);
}
},
{ isActive: true },
);
if (migrationComplete) {
return (
<Box
flexDirection="column"
borderStyle="round"
borderColor={theme.border.default}
padding={1}
>
{failedExtensions.length > 0 ? (
<>
<Text color={theme.text.primary}>
The following extensions failed to migrate. Please try installing
them manually. To see other changes, Qwen Code must be restarted.
Press &apos;q&apos; to quit.
</Text>
<Box flexDirection="column" marginTop={1} marginLeft={2}>
{failedExtensions.map((failed) => (
<Text key={failed}>- {failed}</Text>
))}
</Box>
</>
) : (
<Text color={theme.text.primary}>
Migration complete. To see changes, Qwen Code must be restarted.
Press &apos;q&apos; to quit.
</Text>
)}
</Box>
);
}
return (
<Box
flexDirection="column"
borderStyle="round"
borderColor={theme.border.default}
padding={1}
>
<Text bold color={theme.text.primary}>
Workspace-level extensions are deprecated{'\n'}
</Text>
<Text color={theme.text.primary}>
Would you like to install them at the user level?
</Text>
<Text color={theme.text.primary}>
The extension definition will remain in your workspace directory.
</Text>
<Text color={theme.text.primary}>
If you opt to skip, you can install them manually using the extensions
install command.
</Text>
<Box flexDirection="column" marginTop={1} marginLeft={2}>
{workspaceExtensions.map((extension) => (
<Text key={extension.config.name}>- {extension.config.name}</Text>
))}
</Box>
<Box marginTop={1}>
<RadioButtonSelect
items={[
{ label: 'Install all', value: 'migrate', key: 'migrate' },
{ label: 'Skip', value: 'skip', key: 'skip' },
]}
onSelect={(value: string) => {
if (value === 'migrate') {
onMigrate();
} else {
onClose();
}
}}
/>
</Box>
</Box>
);
}

View File

@@ -218,7 +218,7 @@ export const AgentSelectionStep = ({
const renderAgentItem = (
agent: {
name: string;
level: 'project' | 'user' | 'builtin' | 'session';
level: 'project' | 'user' | 'builtin' | 'session' | 'extension';
isBuiltin?: boolean;
},
index: number,

View File

@@ -18,7 +18,6 @@ const mockUseUIState = vi.mocked(useUIState);
const mockExtensions = [
{ name: 'ext-one', version: '1.0.0', isActive: true },
{ name: 'ext-two', version: '2.1.0', isActive: true },
{ name: 'ext-disabled', version: '3.0.0', isActive: false },
];
describe('<ExtensionsList />', () => {
@@ -29,7 +28,6 @@ describe('<ExtensionsList />', () => {
const mockUIState = (
extensions: unknown[],
extensionsUpdateState: Map<string, ExtensionUpdateState>,
disabledExtensions: string[] = [],
) => {
mockUseUIState.mockReturnValue({
commandContext: createMockCommandContext({
@@ -37,13 +35,6 @@ describe('<ExtensionsList />', () => {
config: {
getExtensions: () => extensions,
},
settings: {
merged: {
extensions: {
disabled: disabledExtensions,
},
},
},
},
}),
extensionsUpdateState,
@@ -58,12 +49,11 @@ describe('<ExtensionsList />', () => {
});
it('should render a list of extensions with their version and status', () => {
mockUIState(mockExtensions, new Map(), ['ext-disabled']);
mockUIState(mockExtensions, new Map());
const { lastFrame } = render(<ExtensionsList />);
const output = lastFrame();
expect(output).toContain('ext-one (v1.0.0) - active');
expect(output).toContain('ext-two (v2.1.0) - active');
expect(output).toContain('ext-disabled (v3.0.0) - disabled');
});
it('should display "unknown state" if an extension has no update state', () => {

View File

@@ -9,12 +9,10 @@ import { useUIState } from '../../contexts/UIStateContext.js';
import { ExtensionUpdateState } from '../../state/extensions.js';
export const ExtensionsList = () => {
const { commandContext, extensionsUpdateState } = useUIState();
const allExtensions = commandContext.services.config!.getExtensions();
const settings = commandContext.services.settings;
const disabledExtensions = settings.merged.extensions?.disabled ?? [];
const { extensionsUpdateState, commandContext } = useUIState();
const extensions = commandContext.services.config?.getExtensions() || [];
if (allExtensions.length === 0) {
if (extensions.length === 0) {
return <Text>No extensions installed.</Text>;
}
@@ -22,10 +20,11 @@ export const ExtensionsList = () => {
<Box flexDirection="column" marginTop={1} marginBottom={1}>
<Text>Installed extensions:</Text>
<Box flexDirection="column" paddingLeft={2}>
{allExtensions.map((ext) => {
{extensions.map((ext) => {
const state = extensionsUpdateState.get(ext.name);
const isActive = !disabledExtensions.includes(ext.name);
const isActive = ext.isActive;
const activeString = isActive ? 'active' : 'disabled';
const activeColor = isActive ? 'green' : 'grey';
let stateColor = 'gray';
const stateText = state || 'unknown state';
@@ -44,6 +43,7 @@ export const ExtensionsList = () => {
break;
case ExtensionUpdateState.UP_TO_DATE:
case ExtensionUpdateState.NOT_UPDATABLE:
case ExtensionUpdateState.UPDATED:
stateColor = 'green';
break;
default:
@@ -52,12 +52,22 @@ export const ExtensionsList = () => {
}
return (
<Box key={ext.name}>
<Box key={ext.name} flexDirection="column" marginBottom={1}>
<Text>
<Text color="cyan">{`${ext.name} (v${ext.version})`}</Text>
{` - ${activeString}`}
<Text color={activeColor}>{` - ${activeString}`}</Text>
{<Text color={stateColor}>{` (${stateText})`}</Text>}
</Text>
{ext.resolvedSettings && ext.resolvedSettings.length > 0 && (
<Box flexDirection="column" paddingLeft={2}>
<Text>settings:</Text>
{ext.resolvedSettings.map((setting) => (
<Text key={setting.name}>
- {setting.name}: {setting.value}
</Text>
))}
</Box>
)}
</Box>
);
})}

View File

@@ -7,6 +7,7 @@
import { createContext, useContext } from 'react';
import { type Key } from '../hooks/useKeypress.js';
import { type IdeIntegrationNudgeResult } from '../IdeIntegrationNudge.js';
import { type CommandMigrationNudgeResult } from '../CommandFormatMigrationNudge.js';
import { type FolderTrustChoice } from '../components/FolderTrustDialog.js';
import {
type AuthType,
@@ -47,14 +48,13 @@ export interface UIActions {
setShellModeActive: (value: boolean) => void;
vimHandleInput: (key: Key) => boolean;
handleIdePromptComplete: (result: IdeIntegrationNudgeResult) => void;
handleCommandMigrationComplete: (result: CommandMigrationNudgeResult) => void;
handleFolderTrustSelect: (choice: FolderTrustChoice) => void;
setConstrainHeight: (value: boolean) => void;
onEscapePromptChange: (show: boolean) => void;
refreshStatic: () => void;
handleFinalSubmit: (value: string) => void;
handleClearScreen: () => void;
onWorkspaceMigrationDialogOpen: () => void;
onWorkspaceMigrationDialogClose: () => void;
// Vision switch dialog
handleVisionSwitchSelect: (outcome: VisionSwitchOutcome) => void;
// Welcome back dialog

View File

@@ -72,6 +72,8 @@ export interface UIState {
suggestionsWidth: number;
isInputActive: boolean;
shouldShowIdePrompt: boolean;
shouldShowCommandMigrationNudge: boolean;
commandMigrationTomlFiles: string[];
isFolderTrustDialogOpen: boolean;
isTrustedFolder: boolean | undefined;
constrainHeight: boolean;
@@ -87,9 +89,6 @@ export interface UIState {
historyRemountKey: number;
messageQueue: string[];
showAutoAcceptIndicator: ApprovalMode;
showWorkspaceMigrationDialog: boolean;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
workspaceExtensions: any[]; // Extension[]
// Quota-related state
currentModel: string;
contextFileNames: string[];

View File

@@ -0,0 +1,51 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { useEffect, useState } from 'react';
import { Storage } from '@qwen-code/qwen-code-core';
import { detectTomlCommands } from '../../services/command-migration-tool.js';
import type { LoadedSettings } from '../../config/settings.js';
/**
* Hook to detect TOML command files and manage migration nudge visibility.
* Checks all command directories: workspace, user, and global levels.
*/
export function useCommandMigration(
settings: LoadedSettings,
storage: Storage,
) {
const [showMigrationNudge, setShowMigrationNudge] = useState(false);
const [tomlFiles, setTomlFiles] = useState<string[]>([]);
useEffect(() => {
const checkTomlCommands = async () => {
const allFiles: string[] = [];
// Check workspace commands directory (.qwen/commands)
const workspaceCommandsDir = storage.getProjectCommandsDir();
const workspaceFiles = await detectTomlCommands(workspaceCommandsDir);
allFiles.push(...workspaceFiles.map((f) => `workspace: ${f}`));
// Check user commands directory (~/.qwen/commands)
const userCommandsDir = Storage.getUserCommandsDir();
const userFiles = await detectTomlCommands(userCommandsDir);
allFiles.push(...userFiles.map((f) => `user: ${f}`));
if (allFiles.length > 0) {
setTomlFiles(allFiles);
setShowMigrationNudge(true);
}
};
checkTomlCommands();
}, [storage]);
return {
showMigrationNudge,
tomlFiles,
setShowMigrationNudge,
};
}

View File

@@ -4,26 +4,21 @@
* SPDX-License-Identifier: Apache-2.0
*/
import { vi } from 'vitest';
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import {
ExtensionStorage,
annotateActiveExtensions,
loadExtension,
} from '../../config/extension.js';
import { createExtension } from '../../test-utils/createExtension.js';
import { useExtensionUpdates } from './useExtensionUpdates.js';
import { QWEN_DIR, type GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import {
QWEN_DIR,
type ExtensionManager,
type Extension,
type ExtensionUpdateInfo,
ExtensionUpdateState,
} from '@qwen-code/qwen-code-core';
import { renderHook, waitFor } from '@testing-library/react';
import { MessageType } from '../types.js';
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
import {
checkForAllExtensionUpdates,
updateExtension,
} from '../../config/extensions/update.js';
import { ExtensionUpdateState } from '../state/extensions.js';
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
@@ -33,63 +28,85 @@ vi.mock('os', async (importOriginal) => {
};
});
vi.mock('../../config/extensions/update.js', () => ({
checkForAllExtensionUpdates: vi.fn(),
updateExtension: vi.fn(),
}));
function createMockExtension(overrides: Partial<Extension> = {}): Extension {
return {
id: 'test-extension-id',
name: 'test-extension',
version: '1.0.0',
path: '/some/path',
isActive: true,
config: {
name: 'test-extension',
version: '1.0.0',
},
contextFiles: [],
installMetadata: {
type: 'git',
source: 'https://some/repo',
autoUpdate: false,
},
...overrides,
};
}
function createMockExtensionManager(
extensions: Extension[],
checkCallback?: (
callback: (extensionName: string, state: ExtensionUpdateState) => void,
) => Promise<void>,
updateResult?: ExtensionUpdateInfo | undefined,
): ExtensionManager {
return {
getLoadedExtensions: vi.fn(() => extensions),
checkForAllExtensionUpdates: vi.fn(
async (
callback: (extensionName: string, state: ExtensionUpdateState) => void,
) => {
if (checkCallback) {
await checkCallback(callback);
}
},
),
updateExtension: vi.fn(async () => updateResult),
} as unknown as ExtensionManager;
}
describe('useExtensionUpdates', () => {
let tempHomeDir: string;
let userExtensionsDir: string;
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
);
tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'qwen-cli-test-home-'));
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
fs.mkdirSync(userExtensionsDir, { recursive: true });
vi.mocked(checkForAllExtensionUpdates).mockReset();
vi.mocked(updateExtension).mockReset();
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
vi.clearAllMocks();
});
it('should check for updates and log a message if an update is available', async () => {
const extensions = [
{
name: 'test-extension',
const extension = createMockExtension({
name: 'test-extension',
installMetadata: {
type: 'git',
version: '1.0.0',
path: '/some/path',
isActive: true,
installMetadata: {
type: 'git',
source: 'https://some/repo',
autoUpdate: false,
},
source: 'https://some/repo',
autoUpdate: false,
},
];
});
const addItem = vi.fn();
const cwd = '/test/cwd';
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
const extensionManager = createMockExtensionManager(
[extension],
async (callback) => {
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
},
);
renderHook(() =>
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
);
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
await waitFor(() => {
expect(addItem).toHaveBeenCalledWith(
@@ -103,43 +120,32 @@ describe('useExtensionUpdates', () => {
});
it('should check for updates and automatically update if autoUpdate is true', async () => {
const extensionDir = createExtension({
extensionsDir: userExtensionsDir,
const extension = createMockExtension({
name: 'test-extension',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo',
type: 'git',
source: 'https://some.git/repo',
autoUpdate: true,
},
});
const extension = annotateActiveExtensions(
[loadExtension({ extensionDir, workspaceDir: tempHomeDir })!],
tempHomeDir,
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
)[0];
const addItem = vi.fn();
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
const extensionManager = createMockExtensionManager(
[extension],
async (callback) => {
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
},
{
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: 'test-extension',
},
);
vi.mocked(updateExtension).mockResolvedValue({
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: '',
});
renderHook(() => useExtensionUpdates([extension], addItem, tempHomeDir));
renderHook(() =>
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
);
await waitFor(
() => {
@@ -156,77 +162,64 @@ describe('useExtensionUpdates', () => {
});
it('should batch update notifications for multiple extensions', async () => {
const extensionDir1 = createExtension({
extensionsDir: userExtensionsDir,
const extension1 = createMockExtension({
id: 'test-extension-1-id',
name: 'test-extension-1',
version: '1.0.0',
installMetadata: {
source: 'https://some.git/repo1',
type: 'git',
source: 'https://some.git/repo1',
autoUpdate: true,
},
});
const extensionDir2 = createExtension({
extensionsDir: userExtensionsDir,
const extension2 = createMockExtension({
id: 'test-extension-2-id',
name: 'test-extension-2',
version: '2.0.0',
installMetadata: {
source: 'https://some.git/repo2',
type: 'git',
source: 'https://some.git/repo2',
autoUpdate: true,
},
});
const extensions = annotateActiveExtensions(
[
loadExtension({
extensionDir: extensionDir1,
workspaceDir: tempHomeDir,
})!,
loadExtension({
extensionDir: extensionDir2,
workspaceDir: tempHomeDir,
})!,
],
tempHomeDir,
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
);
const addItem = vi.fn();
let updateCallCount = 0;
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({
type: 'SET_STATE',
payload: {
const extensionManager = {
getLoadedExtensions: vi.fn(() => [extension1, extension2]),
checkForAllExtensionUpdates: vi.fn(
async (
callback: (
extensionName: string,
state: ExtensionUpdateState,
) => void,
) => {
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
},
),
updateExtension: vi.fn(async () => {
updateCallCount++;
if (updateCallCount === 1) {
return {
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: 'test-extension-1',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension-2',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
},
};
}
return {
originalVersion: '2.0.0',
updatedVersion: '2.1.0',
name: 'test-extension-2',
};
}),
} as unknown as ExtensionManager;
renderHook(() =>
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
);
vi.mocked(updateExtension)
.mockResolvedValueOnce({
originalVersion: '1.0.0',
updatedVersion: '1.1.0',
name: '',
})
.mockResolvedValueOnce({
originalVersion: '2.0.0',
updatedVersion: '2.1.0',
name: '',
});
renderHook(() => useExtensionUpdates(extensions, addItem, tempHomeDir));
await waitFor(
() => {
expect(addItem).toHaveBeenCalledTimes(2);
@@ -250,60 +243,40 @@ describe('useExtensionUpdates', () => {
});
it('should batch update notifications for multiple extensions with autoUpdate: false', async () => {
const extensions = [
{
name: 'test-extension-1',
const extension1 = createMockExtension({
id: 'test-extension-1-id',
name: 'test-extension-1',
version: '1.0.0',
installMetadata: {
type: 'git',
version: '1.0.0',
path: '/some/path1',
isActive: true,
installMetadata: {
type: 'git',
source: 'https://some/repo1',
autoUpdate: false,
},
source: 'https://some/repo1',
autoUpdate: false,
},
{
name: 'test-extension-2',
});
const extension2 = createMockExtension({
id: 'test-extension-2-id',
name: 'test-extension-2',
version: '2.0.0',
installMetadata: {
type: 'git',
version: '2.0.0',
path: '/some/path2',
isActive: true,
installMetadata: {
type: 'git',
source: 'https://some/repo2',
autoUpdate: false,
},
source: 'https://some/repo2',
autoUpdate: false,
},
];
});
const addItem = vi.fn();
const cwd = '/test/cwd';
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
async (extensions, dispatch) => {
dispatch({ type: 'BATCH_CHECK_START' });
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension-1',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
const extensionManager = createMockExtensionManager(
[extension1, extension2],
async (callback) => {
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
await new Promise((r) => setTimeout(r, 50));
dispatch({
type: 'SET_STATE',
payload: {
name: 'test-extension-2',
state: ExtensionUpdateState.UPDATE_AVAILABLE,
},
});
dispatch({ type: 'BATCH_CHECK_END' });
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
},
);
renderHook(() =>
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
);
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
await waitFor(() => {
expect(addItem).toHaveBeenCalledTimes(1);

View File

@@ -4,7 +4,7 @@
* SPDX-License-Identifier: Apache-2.0
*/
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import type { ExtensionManager } from '@qwen-code/qwen-code-core';
import { getErrorMessage } from '../../utils/errors.js';
import {
ExtensionUpdateState,
@@ -14,11 +14,6 @@ import {
import { useCallback, useEffect, useMemo, useReducer } from 'react';
import type { UseHistoryManagerReturn } from './useHistoryManager.js';
import { MessageType, type ConfirmationRequest } from '../types.js';
import {
checkForAllExtensionUpdates,
updateExtension,
} from '../../config/extensions/update.js';
import { requestConsentInteractive } from '../../config/extension.js';
import { checkExhaustive } from '../../utils/checks.js';
type ConfirmationRequestWrapper = {
@@ -45,15 +40,7 @@ function confirmationRequestsReducer(
}
}
export const useExtensionUpdates = (
extensions: GeminiCLIExtension[],
addItem: UseHistoryManagerReturn['addItem'],
cwd: string,
) => {
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
extensionUpdatesReducer,
initialExtensionUpdatesState,
);
export const useConfirmUpdateRequests = () => {
const [
confirmUpdateExtensionRequests,
dispatchConfirmUpdateExtensionRequests,
@@ -78,15 +65,52 @@ export const useExtensionUpdates = (
},
[dispatchConfirmUpdateExtensionRequests],
);
return {
addConfirmUpdateExtensionRequest,
confirmUpdateExtensionRequests,
dispatchConfirmUpdateExtensionRequests,
};
};
export const useExtensionUpdates = (
extensionManager: ExtensionManager,
addItem: UseHistoryManagerReturn['addItem'],
cwd: string,
) => {
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
extensionUpdatesReducer,
initialExtensionUpdatesState,
);
const extensions = extensionManager.getLoadedExtensions();
useEffect(() => {
(async () => {
await checkForAllExtensionUpdates(
extensions,
dispatchExtensionStateUpdate,
const extensionsToCheck = extensions.filter((extension) => {
const currentStatus = extensionsUpdateState.extensionStatuses.get(
extension.name,
);
if (!currentStatus) return true;
const currentState = currentStatus.status;
return !currentState || currentState === ExtensionUpdateState.UNKNOWN;
});
if (extensionsToCheck.length === 0) return;
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
await extensionManager.checkForAllExtensionUpdates(
(extensionName: string, state: ExtensionUpdateState) => {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
});
},
);
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
})();
}, [extensions, extensions.length, dispatchExtensionStateUpdate]);
}, [
extensions,
extensionManager,
extensionsUpdateState.extensionStatuses,
dispatchExtensionStateUpdate,
]);
useEffect(() => {
if (extensionsUpdateState.batchChecksInProgress > 0) {
@@ -113,17 +137,17 @@ export const useExtensionUpdates = (
});
if (extension.installMetadata?.autoUpdate) {
updateExtension(
extension,
cwd,
(description) =>
requestConsentInteractive(
description,
addConfirmUpdateExtensionRequest,
),
currentState.status,
dispatchExtensionStateUpdate,
)
extensionManager
.updateExtension(
extension,
currentState.status,
(extensionName, state) => {
dispatchExtensionStateUpdate({
type: 'SET_STATE',
payload: { name: extensionName, state },
});
},
)
.then((result) => {
if (!result) return;
addItem(
@@ -157,13 +181,7 @@ export const useExtensionUpdates = (
Date.now(),
);
}
}, [
extensions,
extensionsUpdateState,
addConfirmUpdateExtensionRequest,
addItem,
cwd,
]);
}, [extensions, extensionManager, extensionsUpdateState, addItem, cwd]);
const extensionsUpdateStateComputed = useMemo(() => {
const result = new Map<string, ExtensionUpdateState>();
@@ -180,7 +198,5 @@ export const useExtensionUpdates = (
extensionsUpdateState: extensionsUpdateStateComputed,
extensionsUpdateStateInternal: extensionsUpdateState.extensionStatuses,
dispatchExtensionStateUpdate,
confirmUpdateExtensionRequests,
addConfirmUpdateExtensionRequest,
};
};

View File

@@ -1,70 +0,0 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { useState, useEffect } from 'react';
import {
type Extension,
getWorkspaceExtensions,
} from '../../config/extension.js';
import { type LoadedSettings, SettingScope } from '../../config/settings.js';
import process from 'node:process';
export function useWorkspaceMigration(settings: LoadedSettings) {
const [showWorkspaceMigrationDialog, setShowWorkspaceMigrationDialog] =
useState(false);
const [workspaceExtensions, setWorkspaceExtensions] = useState<Extension[]>(
[],
);
useEffect(() => {
// Default to true if not set.
if (!(settings.merged.experimental?.extensionManagement ?? true)) {
return;
}
const cwd = process.cwd();
const extensions = getWorkspaceExtensions(cwd);
if (
extensions.length > 0 &&
!settings.merged.extensions?.workspacesWithMigrationNudge?.includes(cwd)
) {
setWorkspaceExtensions(extensions);
setShowWorkspaceMigrationDialog(true);
console.log(settings.merged.extensions);
}
}, [
settings.merged.extensions,
settings.merged.experimental?.extensionManagement,
]);
const onWorkspaceMigrationDialogOpen = () => {
const userSettings = settings.forScope(SettingScope.User);
const extensionSettings = userSettings.settings.extensions || {
disabled: [],
};
const workspacesWithMigrationNudge =
extensionSettings.workspacesWithMigrationNudge || [];
const cwd = process.cwd();
if (!workspacesWithMigrationNudge.includes(cwd)) {
workspacesWithMigrationNudge.push(cwd);
}
extensionSettings.workspacesWithMigrationNudge =
workspacesWithMigrationNudge;
settings.setValue(SettingScope.User, 'extensions', extensionSettings);
};
const onWorkspaceMigrationDialogClose = () => {
setShowWorkspaceMigrationDialog(false);
};
return {
showWorkspaceMigrationDialog,
workspaceExtensions,
onWorkspaceMigrationDialogOpen,
onWorkspaceMigrationDialogClose,
};
}

View File

@@ -10,6 +10,7 @@ export enum ExtensionUpdateState {
CHECKING_FOR_UPDATES = 'checking for updates',
UPDATED_NEEDS_RESTART = 'updated, needs restart',
UPDATING = 'updating',
UPDATED = 'updated',
UPDATE_AVAILABLE = 'update available',
UP_TO_DATE = 'up to date',
ERROR = 'error',

View File

@@ -17,10 +17,16 @@
* resolveEnvVarsInString("URL: ${BASE_URL}/api") // Returns "URL: https://api.example.com/api"
* resolveEnvVarsInString("Missing: $UNDEFINED_VAR") // Returns "Missing: $UNDEFINED_VAR"
*/
export function resolveEnvVarsInString(value: string): string {
export function resolveEnvVarsInString(
value: string,
customEnv?: Record<string, string>,
): string {
const envVarRegex = /\$(?:(\w+)|{([^}]+)})/g; // Find $VAR_NAME or ${VAR_NAME}
return value.replace(envVarRegex, (match, varName1, varName2) => {
const varName = varName1 || varName2;
if (customEnv && typeof customEnv[varName] === 'string') {
return customEnv[varName];
}
if (process && process.env && typeof process.env[varName] === 'string') {
return process.env[varName]!;
}
@@ -47,8 +53,11 @@ export function resolveEnvVarsInString(value: string): string {
* };
* const resolved = resolveEnvVarsInObject(config);
*/
export function resolveEnvVarsInObject<T>(obj: T): T {
return resolveEnvVarsInObjectInternal(obj, new WeakSet());
export function resolveEnvVarsInObject<T>(
obj: T,
customEnv?: Record<string, string>,
): T {
return resolveEnvVarsInObjectInternal(obj, new WeakSet(), customEnv);
}
/**
@@ -61,6 +70,7 @@ export function resolveEnvVarsInObject<T>(obj: T): T {
function resolveEnvVarsInObjectInternal<T>(
obj: T,
visited: WeakSet<object>,
customEnv?: Record<string, string>,
): T {
if (
obj === null ||
@@ -72,7 +82,7 @@ function resolveEnvVarsInObjectInternal<T>(
}
if (typeof obj === 'string') {
return resolveEnvVarsInString(obj) as unknown as T;
return resolveEnvVarsInString(obj, customEnv) as unknown as T;
}
if (Array.isArray(obj)) {
@@ -84,7 +94,7 @@ function resolveEnvVarsInObjectInternal<T>(
visited.add(obj);
const result = obj.map((item) =>
resolveEnvVarsInObjectInternal(item, visited),
resolveEnvVarsInObjectInternal(item, visited, customEnv),
) as unknown as T;
visited.delete(obj);
return result;
@@ -101,7 +111,11 @@ function resolveEnvVarsInObjectInternal<T>(
const newObj = { ...obj } as T;
for (const key in newObj) {
if (Object.prototype.hasOwnProperty.call(newObj, key)) {
newObj[key] = resolveEnvVarsInObjectInternal(newObj[key], visited);
newObj[key] = resolveEnvVarsInObjectInternal(
newObj[key],
visited,
customEnv,
);
}
}
visited.delete(obj as object);

View File

@@ -6,11 +6,7 @@
import { vi, type Mock, type MockInstance } from 'vitest';
import type { Config } from '@qwen-code/qwen-code-core';
import {
OutputFormat,
FatalInputError,
ToolErrorType,
} from '@qwen-code/qwen-code-core';
import { OutputFormat, FatalInputError } from '@qwen-code/qwen-code-core';
import {
getErrorMessage,
handleError,
@@ -69,7 +65,6 @@ vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
describe('errors', () => {
let mockConfig: Config;
let processExitSpy: MockInstance;
let processStderrWriteSpy: MockInstance;
let consoleErrorSpy: MockInstance;
beforeEach(() => {
@@ -79,11 +74,6 @@ describe('errors', () => {
// Mock console.error
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
// Mock process.stderr.write
processStderrWriteSpy = vi
.spyOn(process.stderr, 'write')
.mockImplementation(() => true);
// Mock process.exit to throw instead of actually exiting
processExitSpy = vi.spyOn(process, 'exit').mockImplementation((code) => {
throw new Error(`process.exit called with code: ${code}`);
@@ -94,13 +84,11 @@ describe('errors', () => {
getOutputFormat: vi.fn().mockReturnValue(OutputFormat.TEXT),
getContentGeneratorConfig: vi.fn().mockReturnValue({ authType: 'test' }),
getDebugMode: vi.fn().mockReturnValue(true),
isInteractive: vi.fn().mockReturnValue(false),
} as unknown as Config;
});
afterEach(() => {
consoleErrorSpy.mockRestore();
processStderrWriteSpy.mockRestore();
processExitSpy.mockRestore();
});
@@ -444,87 +432,6 @@ describe('errors', () => {
expect(processExitSpy).not.toHaveBeenCalled();
});
});
describe('permission denied warnings', () => {
it('should show warning when EXECUTION_DENIED in non-interactive text mode', () => {
(mockConfig.getDebugMode as Mock).mockReturnValue(false);
(mockConfig.isInteractive as Mock).mockReturnValue(false);
(
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
).mockReturnValue(OutputFormat.TEXT);
handleToolError(
toolName,
toolError,
mockConfig,
ToolErrorType.EXECUTION_DENIED,
);
expect(processStderrWriteSpy).toHaveBeenCalledWith(
expect.stringContaining(
'Warning: Tool "test-tool" requires user approval',
),
);
expect(processStderrWriteSpy).toHaveBeenCalledWith(
expect.stringContaining('use the -y flag (YOLO mode)'),
);
expect(processExitSpy).not.toHaveBeenCalled();
});
it('should not show warning when EXECUTION_DENIED in interactive mode', () => {
(mockConfig.getDebugMode as Mock).mockReturnValue(false);
(mockConfig.isInteractive as Mock).mockReturnValue(true);
(
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
).mockReturnValue(OutputFormat.TEXT);
handleToolError(
toolName,
toolError,
mockConfig,
ToolErrorType.EXECUTION_DENIED,
);
expect(processStderrWriteSpy).not.toHaveBeenCalled();
expect(processExitSpy).not.toHaveBeenCalled();
});
it('should not show warning when EXECUTION_DENIED in JSON mode', () => {
(mockConfig.getDebugMode as Mock).mockReturnValue(false);
(mockConfig.isInteractive as Mock).mockReturnValue(false);
(
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
).mockReturnValue(OutputFormat.JSON);
handleToolError(
toolName,
toolError,
mockConfig,
ToolErrorType.EXECUTION_DENIED,
);
expect(processStderrWriteSpy).not.toHaveBeenCalled();
expect(processExitSpy).not.toHaveBeenCalled();
});
it('should not show warning for non-EXECUTION_DENIED errors', () => {
(mockConfig.getDebugMode as Mock).mockReturnValue(false);
(mockConfig.isInteractive as Mock).mockReturnValue(false);
(
mockConfig.getOutputFormat as ReturnType<typeof vi.fn>
).mockReturnValue(OutputFormat.TEXT);
handleToolError(
toolName,
toolError,
mockConfig,
ToolErrorType.FILE_NOT_FOUND,
);
expect(processStderrWriteSpy).not.toHaveBeenCalled();
expect(processExitSpy).not.toHaveBeenCalled();
});
});
});
describe('handleCancellationError', () => {

View File

@@ -11,7 +11,6 @@ import {
parseAndFormatApiError,
FatalTurnLimitedError,
FatalCancellationError,
ToolErrorType,
} from '@qwen-code/qwen-code-core';
export function getErrorMessage(error: unknown): string {
@@ -103,24 +102,10 @@ export function handleToolError(
toolName: string,
toolError: Error,
config: Config,
errorCode?: string | number,
_errorCode?: string | number,
resultDisplay?: string,
): void {
// Check if this is a permission denied error in non-interactive mode
const isExecutionDenied = errorCode === ToolErrorType.EXECUTION_DENIED;
const isNonInteractive = !config.isInteractive();
const isTextMode = config.getOutputFormat() === OutputFormat.TEXT;
// Show warning for permission denied errors in non-interactive text mode
if (isExecutionDenied && isNonInteractive && isTextMode) {
const warningMessage =
`Warning: Tool "${toolName}" requires user approval but cannot execute in non-interactive mode.\n` +
`To enable automatic tool execution, use the -y flag (YOLO mode):\n` +
`Example: qwen -p 'your prompt' -y\n\n`;
process.stderr.write(warningMessage);
}
// Always log detailed error in debug mode
// Always just log to stderr; JSON/streaming formatting happens in the tool_result block elsewhere
if (config.getDebugMode()) {
console.error(
`Error executing tool ${toolName}: ${resultDisplay || toolError.message}`,

View File

@@ -11,7 +11,9 @@
"src/**/*.ts",
"src/**/*.tsx",
"src/**/*.json",
"./package.json"
"./package.json",
"../core/src/utils/toml-to-markdown-converter.test.ts",
"../core/src/utils/toml-to-markdown-converter.ts"
],
"exclude": [
"node_modules",

View File

@@ -1,6 +1,6 @@
{
"name": "@qwen-code/qwen-code-core",
"version": "0.7.0",
"version": "0.6.0",
"description": "Qwen Code Core",
"repository": {
"type": "git",

View File

@@ -80,6 +80,10 @@ import {
type TelemetryTarget,
uiTelemetryService,
} from '../telemetry/index.js';
import {
ExtensionManager,
type Extension,
} from '../extension/extensionManager.js';
// Utils
import { shouldAttemptBrowserLaunch } from '../utils/browser.js';
@@ -102,6 +106,7 @@ import {
type ResumedSessionData,
} from '../services/sessionService.js';
import { randomUUID } from 'node:crypto';
import { loadServerHierarchicalMemory } from '../utils/memoryDiscovery.js';
// Re-export types
export type { AnyToolInvocation, FileFilteringOptions, MCPOAuthConfig };
@@ -192,20 +197,17 @@ export interface GitCoAuthorSettings {
email?: string;
}
export interface GeminiCLIExtension {
name: string;
version: string;
isActive: boolean;
path: string;
installMetadata?: ExtensionInstallMetadata;
}
export interface ExtensionInstallMetadata {
source: string;
type: 'git' | 'local' | 'link' | 'github-release';
type: 'git' | 'local' | 'link' | 'github-release' | 'marketplace';
releaseTag?: string; // Only present for github-release installs.
ref?: string;
autoUpdate?: boolean;
allowPreRelease?: boolean;
marketplace?: {
marketplaceSource: string;
pluginName: string;
};
}
export const DEFAULT_TRUNCATE_TOOL_OUTPUT_THRESHOLD = 25_000;
@@ -303,14 +305,15 @@ export interface ConfigParameters {
includeDirectories?: string[];
bugCommand?: BugCommandSettings;
model?: string;
extensionContextFilePaths?: string[];
outputLanguageFilePath?: string;
maxSessionTurns?: number;
sessionTokenLimit?: number;
experimentalSkills?: boolean;
experimentalZedIntegration?: boolean;
listExtensions?: boolean;
extensions?: GeminiCLIExtension[];
blockedMcpServers?: Array<{ name: string; extensionName: string }>;
overrideExtensions?: string[];
allowedMcpServers?: string[];
excludedMcpServers?: string[];
noBrowser?: boolean;
summarizeToolOutput?: Record<string, SummarizeToolOutputSettings>;
folderTrustFeature?: boolean;
@@ -320,6 +323,8 @@ export interface ConfigParameters {
generationConfig?: Partial<ContentGeneratorConfig>;
cliVersion?: string;
loadMemoryFromIncludeDirectories?: boolean;
importFormat?: 'tree' | 'flat';
discoveryMaxDirs?: number;
chatRecording?: boolean;
// Web search providers
webSearch?: {
@@ -338,7 +343,6 @@ export interface ConfigParameters {
shouldUseNodePtyShell?: boolean;
skipNextSpeakerCheck?: boolean;
shellExecutionConfig?: ShellExecutionConfig;
extensionManagement?: boolean;
skipLoopDetection?: boolean;
vlmSwitchMode?: string;
truncateToolOutputThreshold?: number;
@@ -391,6 +395,7 @@ export class Config {
private toolRegistry!: ToolRegistry;
private promptRegistry!: PromptRegistry;
private subagentManager!: SubagentManager;
private extensionManager!: ExtensionManager;
private skillManager!: SkillManager;
private fileSystemService: FileSystemService;
private contentGeneratorConfig!: ContentGeneratorConfig;
@@ -413,6 +418,8 @@ export class Config {
private readonly toolCallCommand: string | undefined;
private readonly mcpServerCommand: string | undefined;
private mcpServers: Record<string, MCPServerConfig> | undefined;
private readonly allowedMcpServers?: string[];
private readonly excludedMcpServers?: string[];
private sessionSubagents: SubagentConfig[];
private userMemory: string;
private sdkMode: boolean;
@@ -435,11 +442,12 @@ export class Config {
private gitService: GitService | undefined = undefined;
private sessionService: SessionService | undefined = undefined;
private chatRecordingService: ChatRecordingService | undefined = undefined;
private extensionContextFilePaths: string[] = [];
private readonly checkpointing: boolean;
private readonly proxy: string | undefined;
private readonly cwd: string;
private readonly bugCommand: BugCommandSettings | undefined;
private readonly extensionContextFilePaths: string[];
private readonly outputLanguageFilePath?: string;
private readonly noBrowser: boolean;
private readonly folderTrustFeature: boolean;
private readonly folderTrust: boolean;
@@ -449,11 +457,8 @@ export class Config {
private readonly maxSessionTurns: number;
private readonly sessionTokenLimit: number;
private readonly listExtensions: boolean;
private readonly _extensions: GeminiCLIExtension[];
private readonly _blockedMcpServers: Array<{
name: string;
extensionName: string;
}>;
private readonly overrideExtensions?: string[];
fallbackModelHandler?: FallbackModelHandler;
private quotaErrorOccurred: boolean = false;
private readonly summarizeToolOutput:
@@ -464,6 +469,8 @@ export class Config {
private readonly experimentalSkills: boolean = false;
private readonly chatRecordingEnabled: boolean;
private readonly loadMemoryFromIncludeDirectories: boolean = false;
private readonly importFormat: 'tree' | 'flat';
private readonly discoveryMaxDirs: number;
private readonly webSearch?: {
provider: Array<{
type: 'tavily' | 'google' | 'dashscope';
@@ -480,7 +487,6 @@ export class Config {
private readonly shouldUseNodePtyShell: boolean;
private readonly skipNextSpeakerCheck: boolean;
private shellExecutionConfig: ShellExecutionConfig;
private readonly extensionManagement: boolean = true;
private readonly skipLoopDetection: boolean;
private readonly skipStartupContext: boolean;
private readonly vlmSwitchMode: string | undefined;
@@ -521,6 +527,8 @@ export class Config {
this.toolCallCommand = params.toolCallCommand;
this.mcpServerCommand = params.mcpServerCommand;
this.mcpServers = params.mcpServers;
this.allowedMcpServers = params.allowedMcpServers;
this.excludedMcpServers = params.excludedMcpServers;
this.sessionSubagents = params.sessionSubagents ?? [];
this.sdkMode = params.sdkMode ?? false;
this.userMemory = params.userMemory ?? '';
@@ -543,6 +551,7 @@ export class Config {
email: 'qwen-coder@alibabacloud.com',
};
this.usageStatisticsEnabled = params.usageStatisticsEnabled ?? true;
this.outputLanguageFilePath = params.outputLanguageFilePath;
this.fileFiltering = {
respectGitIgnore: params.fileFiltering?.respectGitIgnore ?? true,
@@ -556,15 +565,13 @@ export class Config {
this.cwd = params.cwd ?? process.cwd();
this.fileDiscoveryService = params.fileDiscoveryService ?? null;
this.bugCommand = params.bugCommand;
this.extensionContextFilePaths = params.extensionContextFilePaths ?? [];
this.maxSessionTurns = params.maxSessionTurns ?? -1;
this.sessionTokenLimit = params.sessionTokenLimit ?? -1;
this.experimentalZedIntegration =
params.experimentalZedIntegration ?? false;
this.experimentalSkills = params.experimentalSkills ?? false;
this.listExtensions = params.listExtensions ?? false;
this._extensions = params.extensions ?? [];
this._blockedMcpServers = params.blockedMcpServers ?? [];
this.overrideExtensions = params.overrideExtensions;
this.noBrowser = params.noBrowser ?? false;
this.summarizeToolOutput = params.summarizeToolOutput;
this.folderTrustFeature = params.folderTrustFeature ?? false;
@@ -583,6 +590,8 @@ export class Config {
this.loadMemoryFromIncludeDirectories =
params.loadMemoryFromIncludeDirectories ?? false;
this.importFormat = params.importFormat ?? 'tree';
this.discoveryMaxDirs = params.discoveryMaxDirs ?? 200;
this.chatCompression = params.chatCompression;
this.interactive = params.interactive ?? false;
this.trustedFolder = params.trustedFolder;
@@ -608,7 +617,6 @@ export class Config {
params.truncateToolOutputLines ?? DEFAULT_TRUNCATE_TOOL_OUTPUT_LINES;
this.enableToolOutputTruncation = params.enableToolOutputTruncation ?? true;
this.useSmartEdit = params.useSmartEdit ?? false;
this.extensionManagement = params.extensionManagement ?? true;
this.channel = params.channel;
this.storage = new Storage(this.targetDir);
this.vlmSwitchMode = params.vlmSwitchMode;
@@ -656,6 +664,23 @@ export class Config {
this.subagentManager.loadSessionSubagents(this.sessionSubagents);
}
this.extensionManager = new ExtensionManager({
workspaceDir: this.targetDir,
enabledExtensionOverrides: this.overrideExtensions,
config: this,
isWorkspaceTrusted: this.isTrustedFolder(),
});
await this.extensionManager.refreshCache();
const activeExtensions = await this.extensionManager
.getLoadedExtensions()
.filter((e) => e.isActive);
this.extensionContextFilePaths = activeExtensions.flatMap(
(e) => e.contextFiles,
);
await this.refreshHierarchicalMemory();
this.toolRegistry = await this.createToolRegistry(
options?.sendSdkMcpMessage,
);
@@ -665,6 +690,24 @@ export class Config {
logStartSession(this, new StartSessionEvent(this));
}
async refreshHierarchicalMemory(): Promise<void> {
const { memoryContent, fileCount } = await loadServerHierarchicalMemory(
this.getWorkingDir(),
this.shouldLoadMemoryFromIncludeDirectories()
? this.getWorkspaceContext().getDirectories()
: [],
this.getDebugMode(),
this.getFileService(),
this.getExtensionContextFilePaths(),
this.getFolderTrust(),
this.getImportFormat(),
this.getFileFilteringOptions(),
this.getDiscoveryMaxDirs(),
);
this.setUserMemory(memoryContent);
this.setGeminiMdFileCount(fileCount);
}
getContentGenerator(): ContentGenerator {
return this.contentGenerator;
}
@@ -763,6 +806,14 @@ export class Config {
return this.loadMemoryFromIncludeDirectories;
}
getImportFormat(): 'tree' | 'flat' {
return this.importFormat;
}
getDiscoveryMaxDirs(): number {
return this.discoveryMaxDirs;
}
getContentGeneratorConfig(): ContentGeneratorConfig {
return this.contentGeneratorConfig;
}
@@ -870,7 +921,14 @@ export class Config {
}
getExcludeTools(): string[] | undefined {
return this.excludeTools;
const allExcludeTools = new Set(this.excludeTools || []);
const extensions = this.getActiveExtensions();
for (const extension of extensions) {
for (const tool of extension.config.excludeTools || []) {
allExcludeTools.add(tool);
}
}
return [...allExcludeTools];
}
getToolDiscoveryCommand(): string | undefined {
@@ -886,7 +944,37 @@ export class Config {
}
getMcpServers(): Record<string, MCPServerConfig> | undefined {
return this.mcpServers;
let mcpServers = { ...(this.mcpServers || {}) };
const extensions = this.getActiveExtensions();
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) return;
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
if (this.allowedMcpServers) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key]) =>
this.allowedMcpServers?.includes(key),
),
);
}
if (this.excludedMcpServers) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(
([key]) => !this.excludedMcpServers?.includes(key),
),
);
}
return mcpServers;
}
addMcpServers(servers: Record<string, MCPServerConfig>): void {
@@ -1065,7 +1153,10 @@ export class Config {
}
getExtensionContextFilePaths(): string[] {
return this.extensionContextFilePaths;
return [
...this.extensionContextFilePaths,
...(this.outputLanguageFilePath ? [this.outputLanguageFilePath] : []),
];
}
getExperimentalZedIntegration(): boolean {
@@ -1080,16 +1171,54 @@ export class Config {
return this.listExtensions;
}
getExtensionManagement(): boolean {
return this.extensionManagement;
getExtensionManager(): ExtensionManager {
return this.extensionManager;
}
getExtensions(): GeminiCLIExtension[] {
return this._extensions;
getExtensions(): Extension[] {
const extensions = this.extensionManager.getLoadedExtensions();
if (this.overrideExtensions) {
return extensions.filter((e) =>
this.overrideExtensions?.includes(e.name),
);
} else {
return extensions;
}
}
getActiveExtensions(): Extension[] {
return this.getExtensions().filter((e) => e.isActive);
}
getBlockedMcpServers(): Array<{ name: string; extensionName: string }> {
return this._blockedMcpServers;
const mcpServers = { ...(this.mcpServers || {}) };
const extensions = this.getActiveExtensions();
for (const extension of extensions) {
Object.entries(extension.config.mcpServers || {}).forEach(
([key, server]) => {
if (mcpServers[key]) return;
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}
const blockedMcpServers: Array<{ name: string; extensionName: string }> =
[];
if (this.allowedMcpServers) {
Object.entries(mcpServers).forEach(([key, server]) => {
const isAllowed = this.allowedMcpServers?.includes(key);
if (!isAllowed) {
blockedMcpServers.push({
name: key,
extensionName: server.extensionName || '',
});
}
});
}
return blockedMcpServers;
}
getNoBrowser(): boolean {

View File

@@ -824,6 +824,7 @@ export class CoreToolScheduler {
*/
const shouldAutoDeny =
!this.config.isInteractive() &&
!this.config.getIdeMode() &&
!this.config.getExperimentalZedIntegration() &&
this.config.getInputFormat() !== InputFormat.STREAM_JSON;

View File

@@ -752,8 +752,6 @@ export class OpenAIContentConverter {
usage.prompt_tokens_details?.cached_tokens ??
extendedUsage.cached_tokens ??
0;
const thinkingTokens =
usage.completion_tokens_details?.reasoning_tokens || 0;
// If we only have total tokens but no breakdown, estimate the split
// Typically input is ~70% and output is ~30% for most conversations
@@ -771,7 +769,6 @@ export class OpenAIContentConverter {
candidatesTokenCount: finalCompletionTokens,
totalTokenCount: totalTokens,
cachedContentTokenCount: cachedTokens,
thoughtsTokenCount: thinkingTokens,
};
}

View File

@@ -317,22 +317,15 @@ export class ContentGenerationPipeline {
}
private buildReasoningConfig(): Record<string, unknown> {
// Reasoning configuration for OpenAI-compatible endpoints is highly fragmented.
// For example, across common providers and models:
//
// - deepseek-reasoner — thinking is enabled by default and cannot be disabled
// - glm-4.7 — thinking is enabled by default; can be disabled via `extra_body.thinking.enabled`
// - kimi-k2-thinking — thinking is enabled by default and cannot be disabled
// - gpt-5.x series — thinking is enabled by default; can be disabled via `reasoning.effort`
// - qwen3 series — model-dependent; can be manually disabled via `extra_body.enable_thinking`
//
// Given this inconsistency, we choose not to set any reasoning config here and
// instead rely on each models default behavior.
const reasoning = this.contentGeneratorConfig.reasoning;
// We plan to introduce provider- and model-specific settings to enable more
// fine-grained control over reasoning configuration.
if (reasoning === false) {
return {};
}
return {};
return {
reasoning_effort: reasoning?.effort ?? 'medium',
};
}
/**

View File

@@ -58,6 +58,8 @@ export class DefaultOpenAICompatibleProvider
}
getDefaultGenerationConfig(): GenerateContentConfig {
return {};
return {
topP: 0.95,
};
}
}

View File

@@ -0,0 +1,121 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import {
convertClaudeToQwenConfig,
mergeClaudeConfigs,
isClaudePluginConfig,
type ClaudePluginConfig,
type ClaudeMarketplacePluginConfig,
} from './claude-converter.js';
describe('convertClaudeToQwenConfig', () => {
it('should convert basic Claude config', () => {
const claudeConfig: ClaudePluginConfig = {
name: 'claude-plugin',
version: '1.0.0',
};
const result = convertClaudeToQwenConfig(claudeConfig);
expect(result.name).toBe('claude-plugin');
expect(result.version).toBe('1.0.0');
});
it('should convert config with basic fields only', () => {
const claudeConfig: ClaudePluginConfig = {
name: 'full-plugin',
version: '1.0.0',
commands: 'commands',
agents: ['agents/agent1.md'],
skills: ['skills/skill1'],
};
const result = convertClaudeToQwenConfig(claudeConfig);
// Commands, skills, agents are collected as directories, not in config
expect(result.name).toBe('full-plugin');
expect(result.version).toBe('1.0.0');
expect(result.mcpServers).toBeUndefined();
});
it('should throw error for missing name', () => {
const invalidConfig = {
version: '1.0.0',
} as ClaudePluginConfig;
expect(() => convertClaudeToQwenConfig(invalidConfig)).toThrow();
});
});
describe('mergeClaudeConfigs', () => {
it('should merge marketplace and plugin configs', () => {
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
name: 'marketplace-name',
version: '2.0.0',
source: 'github:org/repo',
description: 'From marketplace',
};
const pluginConfig: ClaudePluginConfig = {
name: 'plugin-name',
version: '1.0.0',
commands: 'commands',
};
const merged = mergeClaudeConfigs(marketplacePlugin, pluginConfig);
// Marketplace takes precedence
expect(merged.name).toBe('marketplace-name');
expect(merged.version).toBe('2.0.0');
expect(merged.description).toBe('From marketplace');
// Plugin fields preserved
expect(merged.commands).toBe('commands');
});
it('should work with strict=false and no plugin config', () => {
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
name: 'standalone',
version: '1.0.0',
source: 'local',
strict: false,
commands: 'commands',
};
const merged = mergeClaudeConfigs(marketplacePlugin);
expect(merged.name).toBe('standalone');
expect(merged.commands).toBe('commands');
});
it('should throw error for strict mode without plugin config', () => {
const marketplacePlugin: ClaudeMarketplacePluginConfig = {
name: 'strict-plugin',
version: '1.0.0',
source: 'github:org/repo',
strict: true,
};
expect(() => mergeClaudeConfigs(marketplacePlugin)).toThrow();
});
});
describe('isClaudePluginConfig', () => {
it('should identify Claude plugin directory', () => {
const extensionDir = '/tmp/test-extension';
const marketplace = {
marketplaceSource: 'https://test.com',
pluginName: 'test-plugin',
};
// This will check if marketplace.json exists and contains the plugin
// Note: In real usage, this requires actual file system setup
expect(typeof isClaudePluginConfig(extensionDir, marketplace)).toBe(
'boolean',
);
});
});

View File

@@ -0,0 +1,500 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Converter for Claude Code plugins to Qwen Code format.
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import { glob } from 'glob';
import type { ExtensionConfig } from './extensionManager.js';
import { ExtensionStorage } from './storage.js';
import type { MCPServerConfig } from '../config/config.js';
export interface ClaudePluginConfig {
name: string;
version: string;
description?: string;
author?: { name?: string; email?: string; url?: string };
homepage?: string;
repository?: string;
license?: string;
keywords?: string[];
commands?: string | string[];
agents?: string | string[];
skills?: string | string[];
hooks?: string;
mcpServers?: string | Record<string, MCPServerConfig>;
outputStyles?: string | string[];
lspServers?: string;
}
export type ClaudePluginSource =
| { source: 'github'; repo: string }
| { source: 'url'; url: string };
export interface ClaudeMarketplacePluginConfig extends ClaudePluginConfig {
source: string | ClaudePluginSource;
category?: string;
strict?: boolean;
tags?: string[];
}
export interface ClaudeMarketplaceConfig {
name: string;
owner: { name: string; email: string };
plugins: ClaudeMarketplacePluginConfig[];
metadata?: { description?: string; version?: string; pluginRoot?: string };
}
/**
* Converts a Claude plugin config to Qwen Code format.
* @param claudeConfig Claude plugin configuration
* @returns Qwen ExtensionConfig
*/
export function convertClaudeToQwenConfig(
claudeConfig: ClaudePluginConfig,
): ExtensionConfig {
// Validate required fields
if (!claudeConfig.name || !claudeConfig.version) {
throw new Error('Claude plugin config must have name and version fields');
}
// Parse MCP servers
let mcpServers: Record<string, MCPServerConfig> | undefined;
if (claudeConfig.mcpServers) {
if (typeof claudeConfig.mcpServers === 'string') {
// TODO: Load from file path
console.warn(
`[Claude Converter] MCP servers path not yet supported: ${claudeConfig.mcpServers}`,
);
} else {
mcpServers = claudeConfig.mcpServers;
}
}
// Warn about unsupported fields
if (claudeConfig.hooks) {
console.warn(
`[Claude Converter] Hooks are not yet supported in ${claudeConfig.name}`,
);
}
if (claudeConfig.outputStyles) {
console.warn(
`[Claude Converter] Output styles are not yet supported in ${claudeConfig.name}`,
);
}
if (claudeConfig.lspServers) {
console.warn(
`[Claude Converter] LSP servers are not yet supported in ${claudeConfig.name}`,
);
}
// Direct field mapping - commands, skills, agents will be collected as folders
return {
name: claudeConfig.name,
version: claudeConfig.version,
mcpServers,
};
}
/**
* Converts a complete Claude plugin package to Qwen Code format.
* Creates a new temporary directory with:
* 1. Converted qwen-extension.json
* 2. Commands, skills, and agents collected to respective folders
* 3. MCP servers resolved from JSON files if needed
* 4. All other files preserved
*
* @param extensionDir Path to the Claude plugin directory
* @param marketplace Marketplace information for loading marketplace.json
* @returns Object containing converted config and the temporary directory path
*/
export async function convertClaudePluginPackage(
extensionDir: string,
marketplace: { marketplaceSource: string; pluginName: string },
): Promise<{ config: ExtensionConfig; convertedDir: string }> {
// Step 1: Load marketplace.json
const marketplaceJsonPath = path.join(
extensionDir,
'.claude-plugin',
'marketplace.json',
);
if (!fs.existsSync(marketplaceJsonPath)) {
throw new Error(
`Marketplace configuration not found at ${marketplaceJsonPath}`,
);
}
const marketplaceContent = fs.readFileSync(marketplaceJsonPath, 'utf-8');
const marketplaceConfig: ClaudeMarketplaceConfig =
JSON.parse(marketplaceContent);
// Find the target plugin in marketplace
const marketplacePlugin = marketplaceConfig.plugins.find(
(p) => p.name === marketplace.pluginName,
);
if (!marketplacePlugin) {
throw new Error(
`Plugin ${marketplace.pluginName} not found in marketplace.json`,
);
}
// Step 2: Resolve plugin source directory based on source field
const source = marketplacePlugin.source;
let pluginSourceDir: string;
if (typeof source === 'string') {
// Check if it's a URL (online path)
if (source.startsWith('http://') || source.startsWith('https://')) {
throw new Error(
`Online plugin sources are not supported in convertClaudePluginPackage. ` +
`Plugin ${marketplace.pluginName} has source: ${source}. ` +
`This should be downloaded and resolved before calling this function.`,
);
}
// Relative path within marketplace directory
const marketplaceDir = marketplaceConfig.metadata?.pluginRoot
? path.join(extensionDir, marketplaceConfig.metadata.pluginRoot)
: extensionDir;
pluginSourceDir = path.join(marketplaceDir, source);
} else if (source.source === 'github' || source.source === 'url') {
throw new Error(
`Online plugin sources (github/url) are not supported in convertClaudePluginPackage. ` +
`Plugin ${marketplace.pluginName} has source type: ${source.source}. ` +
`This should be downloaded and resolved before calling this function.`,
);
} else {
throw new Error(
`Unsupported plugin source type for ${marketplace.pluginName}: ${JSON.stringify(source)}`,
);
}
if (!fs.existsSync(pluginSourceDir)) {
throw new Error(`Plugin source directory not found: ${pluginSourceDir}`);
}
// Step 3: Load and merge plugin.json if exists (based on strict mode)
const strict = marketplacePlugin.strict ?? true;
let mergedConfig: ClaudePluginConfig;
if (strict) {
const pluginJsonPath = path.join(
pluginSourceDir,
'.claude-plugin',
'plugin.json',
);
if (!fs.existsSync(pluginJsonPath)) {
throw new Error(`Strict mode requires plugin.json at ${pluginJsonPath}`);
}
const pluginContent = fs.readFileSync(pluginJsonPath, 'utf-8');
const pluginConfig: ClaudePluginConfig = JSON.parse(pluginContent);
mergedConfig = mergeClaudeConfigs(marketplacePlugin, pluginConfig);
} else {
mergedConfig = marketplacePlugin as ClaudePluginConfig;
}
// Step 4: Resolve MCP servers from JSON files if needed
if (mergedConfig.mcpServers && typeof mergedConfig.mcpServers === 'string') {
const mcpServersPath = path.isAbsolute(mergedConfig.mcpServers)
? mergedConfig.mcpServers
: path.join(pluginSourceDir, mergedConfig.mcpServers);
if (fs.existsSync(mcpServersPath)) {
try {
const mcpContent = fs.readFileSync(mcpServersPath, 'utf-8');
mergedConfig.mcpServers = JSON.parse(mcpContent) as Record<
string,
MCPServerConfig
>;
} catch (error) {
console.warn(
`Failed to parse MCP servers file ${mcpServersPath}: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
}
// Step 5: Create temporary directory for converted extension
const tmpDir = await ExtensionStorage.createTmpDir();
try {
// Step 6: Copy plugin files to temporary directory
await copyDirectory(pluginSourceDir, tmpDir);
// Step 7: Collect commands to commands folder
if (mergedConfig.commands) {
const commandsDestDir = path.join(tmpDir, 'commands');
await collectResources(
mergedConfig.commands,
pluginSourceDir,
commandsDestDir,
);
}
// Step 8: Collect skills to skills folder
if (mergedConfig.skills) {
const skillsDestDir = path.join(tmpDir, 'skills');
await collectResources(
mergedConfig.skills,
pluginSourceDir,
skillsDestDir,
);
}
// Step 9: Collect agents to agents folder
if (mergedConfig.agents) {
const agentsDestDir = path.join(tmpDir, 'agents');
await collectResources(
mergedConfig.agents,
pluginSourceDir,
agentsDestDir,
);
}
// Step 10: Convert to Qwen format config
const qwenConfig = convertClaudeToQwenConfig(mergedConfig);
// Step 11: Write qwen-extension.json
const qwenConfigPath = path.join(tmpDir, 'qwen-extension.json');
fs.writeFileSync(
qwenConfigPath,
JSON.stringify(qwenConfig, null, 2),
'utf-8',
);
return {
config: qwenConfig,
convertedDir: tmpDir,
};
} catch (error) {
// Clean up temporary directory on error
try {
fs.rmSync(tmpDir, { recursive: true, force: true });
} catch {
// Ignore cleanup errors
}
throw error;
}
}
/**
* Recursively copies a directory and its contents.
* @param source Source directory path
* @param destination Destination directory path
*/
async function copyDirectory(
source: string,
destination: string,
): Promise<void> {
// Create destination directory if it doesn't exist
if (!fs.existsSync(destination)) {
fs.mkdirSync(destination, { recursive: true });
}
const entries = fs.readdirSync(source, { withFileTypes: true });
for (const entry of entries) {
const sourcePath = path.join(source, entry.name);
const destPath = path.join(destination, entry.name);
if (entry.isDirectory()) {
await copyDirectory(sourcePath, destPath);
} else {
fs.copyFileSync(sourcePath, destPath);
}
}
}
/**
* Collects resources (commands, skills, agents) to a destination folder.
* If a resource is already in the destination folder, it will be skipped.
* @param resourcePaths String or array of resource paths
* @param pluginRoot Root directory of the plugin
* @param destDir Destination directory for collected resources
*/
async function collectResources(
resourcePaths: string | string[],
pluginRoot: string,
destDir: string,
): Promise<void> {
const paths = Array.isArray(resourcePaths) ? resourcePaths : [resourcePaths];
// Create destination directory
if (!fs.existsSync(destDir)) {
fs.mkdirSync(destDir, { recursive: true });
}
// Get the destination folder name (e.g., 'commands', 'skills', 'agents')
const destFolderName = path.basename(destDir);
for (const resourcePath of paths) {
const resolvedPath = path.isAbsolute(resourcePath)
? resourcePath
: path.join(pluginRoot, resourcePath);
if (!fs.existsSync(resolvedPath)) {
console.warn(`Resource path not found: ${resolvedPath}`);
continue;
}
const stat = fs.statSync(resolvedPath);
if (stat.isDirectory()) {
// If it's a directory, check if it's already the destination folder
const dirName = path.basename(resolvedPath);
const parentDir = path.dirname(resolvedPath);
// If the directory is already named as the destination folder (e.g., 'commands')
// and it's at the plugin root level, skip it
if (dirName === destFolderName && parentDir === pluginRoot) {
console.log(
`Skipping ${resolvedPath} as it's already in the correct location`,
);
continue;
}
// Copy all files from the directory
const files = await glob('**/*', {
cwd: resolvedPath,
nodir: true,
dot: false,
});
for (const file of files) {
const srcFile = path.join(resolvedPath, file);
const destFile = path.join(destDir, file);
// Ensure parent directory exists
const destFileDir = path.dirname(destFile);
if (!fs.existsSync(destFileDir)) {
fs.mkdirSync(destFileDir, { recursive: true });
}
fs.copyFileSync(srcFile, destFile);
}
} else {
// If it's a file, check if it's already in the destination folder
const relativePath = path.relative(pluginRoot, resolvedPath);
// Check if the file path starts with the destination folder name
// e.g., 'commands/test1.md' or 'commands/me/test.md' should be skipped
const segments = relativePath.split(path.sep);
if (segments.length > 0 && segments[0] === destFolderName) {
console.log(
`Skipping ${resolvedPath} as it's already in ${destFolderName}/`,
);
continue;
}
// Copy the file to destination
const fileName = path.basename(resolvedPath);
const destFile = path.join(destDir, fileName);
fs.copyFileSync(resolvedPath, destFile);
}
}
}
/**
* Merges marketplace plugin config with the actual plugin.json config.
* Marketplace config takes precedence for conflicting fields.
* @param marketplacePlugin Marketplace plugin definition
* @param pluginConfig Actual plugin.json config (optional if strict=false)
* @returns Merged Claude plugin config
*/
export function mergeClaudeConfigs(
marketplacePlugin: ClaudeMarketplacePluginConfig,
pluginConfig?: ClaudePluginConfig,
): ClaudePluginConfig {
if (!pluginConfig && marketplacePlugin.strict !== false) {
throw new Error(
`Plugin ${marketplacePlugin.name} requires plugin.json (strict mode)`,
);
}
// Start with plugin.json config (if exists)
const merged: ClaudePluginConfig = pluginConfig
? { ...pluginConfig }
: {
name: marketplacePlugin.name,
version: '1.0.0', // Default version if not in marketplace
};
// Overlay marketplace config (takes precedence)
if (marketplacePlugin.name) merged.name = marketplacePlugin.name;
if (marketplacePlugin.version) merged.version = marketplacePlugin.version;
if (marketplacePlugin.description)
merged.description = marketplacePlugin.description;
if (marketplacePlugin.author) merged.author = marketplacePlugin.author;
if (marketplacePlugin.homepage) merged.homepage = marketplacePlugin.homepage;
if (marketplacePlugin.repository)
merged.repository = marketplacePlugin.repository;
if (marketplacePlugin.license) merged.license = marketplacePlugin.license;
if (marketplacePlugin.keywords) merged.keywords = marketplacePlugin.keywords;
if (marketplacePlugin.commands) merged.commands = marketplacePlugin.commands;
if (marketplacePlugin.agents) merged.agents = marketplacePlugin.agents;
if (marketplacePlugin.skills) merged.skills = marketplacePlugin.skills;
if (marketplacePlugin.hooks) merged.hooks = marketplacePlugin.hooks;
if (marketplacePlugin.mcpServers)
merged.mcpServers = marketplacePlugin.mcpServers;
if (marketplacePlugin.outputStyles)
merged.outputStyles = marketplacePlugin.outputStyles;
if (marketplacePlugin.lspServers)
merged.lspServers = marketplacePlugin.lspServers;
return merged;
}
/**
* Checks if a config object is in Claude plugin format.
* @param config Configuration object to check
* @returns true if config appears to be Claude format
*/
export function isClaudePluginConfig(
extensionDir: string,
marketplace: { marketplaceSource: string; pluginName: string },
) {
const marketplaceConfigFilePath = path.join(
extensionDir,
'.claude-plugin/marketplace.json',
);
if (!fs.existsSync(marketplaceConfigFilePath)) {
return false;
}
const marketplaceConfigContent = fs.readFileSync(
marketplaceConfigFilePath,
'utf-8',
);
const marketplaceConfig = JSON.parse(marketplaceConfigContent);
if (typeof marketplaceConfig !== 'object' || marketplaceConfig === null) {
return false;
}
const marketplaceConfigObj = marketplaceConfig as Record<string, unknown>;
// Must have name and owner
if (
typeof marketplaceConfigObj['name'] !== 'string' ||
typeof marketplaceConfigObj['owner'] !== 'object'
) {
return false;
}
if (!Array.isArray(marketplaceConfigObj['plugins'])) {
return false;
}
const marketplacePluginObj = marketplaceConfigObj['plugins'].find(
(plugin: ClaudeMarketplacePluginConfig) =>
plugin.name === marketplace.pluginName,
);
if (!marketplacePluginObj) return false;
return true;
}

View File

@@ -0,0 +1,832 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import {
INSTALL_METADATA_FILENAME,
EXTENSIONS_CONFIG_FILENAME,
} from './variables.js';
import { QWEN_DIR } from '../config/storage.js';
import {
ExtensionManager,
SettingScope,
type ExtensionManagerOptions,
validateName,
getExtensionId,
hashValue,
extensionConsentString,
maybeRequestConsentOrFail,
parseInstallSource,
type ExtensionConfig,
} from './extensionManager.js';
import type { MCPServerConfig, ExtensionInstallMetadata } from '../index.js';
const mockGit = {
clone: vi.fn(),
getRemotes: vi.fn(),
fetch: vi.fn(),
checkout: vi.fn(),
listRemote: vi.fn(),
revparse: vi.fn(),
path: vi.fn(),
};
vi.mock('simple-git', () => ({
simpleGit: vi.fn((path: string) => {
mockGit.path.mockReturnValue(path);
return mockGit;
}),
}));
vi.mock('./github.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('./github.js')>();
return {
...actual,
downloadFromGitHubRelease: vi
.fn()
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
};
});
const mockHomedir = vi.hoisted(() => vi.fn());
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
return {
...mockedOs,
homedir: mockHomedir,
};
});
const mockLogExtensionEnable = vi.hoisted(() => vi.fn());
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
const mockLogExtensionDisable = vi.hoisted(() => vi.fn());
const mockLogExtensionUpdateEvent = vi.hoisted(() => vi.fn());
vi.mock('../telemetry/loggers.js', () => ({
logExtensionEnable: mockLogExtensionEnable,
logExtensionUpdateEvent: mockLogExtensionUpdateEvent,
}));
vi.mock('../index.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('../index.js')>();
return {
...actual,
logExtensionEnable: mockLogExtensionEnable,
logExtensionInstallEvent: mockLogExtensionInstallEvent,
logExtensionUninstall: mockLogExtensionUninstall,
logExtensionDisable: mockLogExtensionDisable,
};
});
const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
function createExtension({
extensionsDir = 'extensions-dir',
name = 'my-extension',
version = '1.0.0',
addContextFile = false,
contextFileName = undefined as string | undefined,
mcpServers = {} as Record<string, MCPServerConfig>,
installMetadata = undefined as ExtensionInstallMetadata | undefined,
} = {}): string {
const extDir = path.join(extensionsDir, name);
fs.mkdirSync(extDir, { recursive: true });
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify({ name, version, contextFileName, mcpServers }),
);
if (addContextFile) {
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
}
if (contextFileName) {
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
}
if (installMetadata) {
fs.writeFileSync(
path.join(extDir, INSTALL_METADATA_FILENAME),
JSON.stringify(installMetadata),
);
}
return extDir;
}
describe('extension tests', () => {
let tempHomeDir: string;
let tempWorkspaceDir: string;
let userExtensionsDir: string;
beforeEach(() => {
tempHomeDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'qwen-code-test-home-'),
);
tempWorkspaceDir = fs.mkdtempSync(
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
);
userExtensionsDir = path.join(tempHomeDir, EXTENSIONS_DIRECTORY_NAME);
fs.mkdirSync(userExtensionsDir, { recursive: true });
mockHomedir.mockReturnValue(tempHomeDir);
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
Object.values(mockGit).forEach((fn) => fn.mockReset());
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
vi.restoreAllMocks();
});
function createExtensionManager(
options: Partial<ExtensionManagerOptions> = {},
): ExtensionManager {
return new ExtensionManager({
workspaceDir: tempWorkspaceDir,
isWorkspaceTrusted: true,
...options,
});
}
describe('loadExtension', () => {
it('should include extension path in loaded extension', async () => {
const extensionDir = path.join(userExtensionsDir, 'test-extension');
fs.mkdirSync(extensionDir, { recursive: true });
createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
expect(extensions[0].path).toBe(extensionDir);
expect(extensions[0].config.name).toBe('test-extension');
});
it('should load context file path when QWEN.md is present', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
addContextFile: true,
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '2.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(2);
const ext1 = extensions.find((e) => e.config.name === 'ext1');
const ext2 = extensions.find((e) => e.config.name === 'ext2');
expect(ext1?.contextFiles).toEqual([
path.join(userExtensionsDir, 'ext1', 'QWEN.md'),
]);
expect(ext2?.contextFiles).toEqual([]);
});
it('should load context file path from the extension config', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
addContextFile: false,
contextFileName: 'my-context-file.md',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
const ext1 = extensions.find((e) => e.config.name === 'ext1');
expect(ext1?.contextFiles).toEqual([
path.join(userExtensionsDir, 'ext1', 'my-context-file.md'),
]);
});
it('should skip extensions with invalid JSON and log a warning', async () => {
const consoleSpy = vi
.spyOn(console, 'error')
.mockImplementation(() => {});
// Good extension
createExtension({
extensionsDir: userExtensionsDir,
name: 'good-ext',
version: '1.0.0',
});
// Bad extension
const badExtDir = path.join(userExtensionsDir, 'bad-ext');
fs.mkdirSync(badExtDir);
const badConfigPath = path.join(badExtDir, EXTENSIONS_CONFIG_FILENAME);
fs.writeFileSync(badConfigPath, '{ "name": "bad-ext"'); // Malformed
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
expect(extensions[0].config.name).toBe('good-ext');
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining(`Warning: Skipping extension in ${badExtDir}`),
);
consoleSpy.mockRestore();
});
it('should skip extensions with missing name and log a warning', async () => {
const consoleSpy = vi
.spyOn(console, 'error')
.mockImplementation(() => {});
// Good extension
createExtension({
extensionsDir: userExtensionsDir,
name: 'good-ext',
version: '1.0.0',
});
// Bad extension
const badExtDir = path.join(userExtensionsDir, 'bad-ext-no-name');
fs.mkdirSync(badExtDir);
const badConfigPath = path.join(badExtDir, EXTENSIONS_CONFIG_FILENAME);
fs.writeFileSync(badConfigPath, JSON.stringify({ version: '1.0.0' }));
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
expect(extensions[0].config.name).toBe('good-ext');
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining(`Warning: Skipping extension in ${badExtDir}`),
);
consoleSpy.mockRestore();
});
it('should filter trust out of mcp servers', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js'],
trust: true,
} as MCPServerConfig,
},
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
// trust should be filtered from extension.mcpServers (not config.mcpServers)
expect(extensions[0].mcpServers?.['test-server']?.trust).toBeUndefined();
// config.mcpServers should still have trust (original config)
expect(extensions[0].config.mcpServers?.['test-server']?.trust).toBe(
true,
);
});
});
describe('enableExtension / disableExtension', () => {
it('should disable an extension at the user scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('my-extension', SettingScope.User);
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
});
it('should disable an extension at the workspace scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension(
'my-extension',
SettingScope.Workspace,
tempWorkspaceDir,
);
expect(manager.isEnabled('my-extension', tempHomeDir)).toBe(true);
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
});
it('should handle disabling the same extension twice', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('my-extension', SettingScope.User);
manager.disableExtension('my-extension', SettingScope.User);
expect(manager.isEnabled('my-extension', tempWorkspaceDir)).toBe(false);
});
it('should throw an error if you request system scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'my-extension',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
expect(() =>
manager.disableExtension('my-extension', SettingScope.System),
).toThrow('System and SystemDefaults scopes are not supported.');
});
it('should enable an extension at the user scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('ext1', SettingScope.User);
expect(manager.isEnabled('ext1')).toBe(false);
manager.enableExtension('ext1', SettingScope.User);
expect(manager.isEnabled('ext1')).toBe(true);
});
it('should enable an extension at the workspace scope', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
manager.disableExtension('ext1', SettingScope.Workspace);
expect(manager.isEnabled('ext1', tempWorkspaceDir)).toBe(false);
manager.enableExtension('ext1', SettingScope.Workspace);
expect(manager.isEnabled('ext1', tempWorkspaceDir)).toBe(true);
});
});
describe('validateExtensionOverrides', () => {
it('should mark all extensions as active if no enabled extensions are provided', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '1.0.0',
});
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(2);
expect(extensions.every((e) => e.isActive)).toBe(true);
});
it('should mark only the enabled extensions as active', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext3',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['ext1', 'ext3'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions.find((e) => e.name === 'ext1')?.isActive).toBe(true);
expect(extensions.find((e) => e.name === 'ext2')?.isActive).toBe(false);
expect(extensions.find((e) => e.name === 'ext3')?.isActive).toBe(true);
});
it('should mark all extensions as inactive when "none" is provided', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext2',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['none'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions.every((e) => !e.isActive)).toBe(true);
});
it('should handle case-insensitivity', async () => {
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['EXT1'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions.find((e) => e.name === 'ext1')?.isActive).toBe(true);
});
it('should log an error for unknown extensions', async () => {
const consoleSpy = vi
.spyOn(console, 'error')
.mockImplementation(() => {});
createExtension({
extensionsDir: userExtensionsDir,
name: 'ext1',
version: '1.0.0',
});
const manager = createExtensionManager({
enabledExtensionOverrides: ['ext4'],
});
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
manager.validateExtensionOverrides(extensions);
expect(consoleSpy).toHaveBeenCalledWith('Extension not found: ext4');
consoleSpy.mockRestore();
});
});
describe('loadExtensionConfig', () => {
it('should resolve environment variables in extension configuration', async () => {
process.env['TEST_API_KEY'] = 'test-api-key-123';
process.env['TEST_DB_URL'] = 'postgresql://localhost:5432/testdb';
try {
const extDir = path.join(userExtensionsDir, 'test-extension');
fs.mkdirSync(extDir);
const extensionConfig = {
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js'],
env: {
API_KEY: '$TEST_API_KEY',
DATABASE_URL: '${TEST_DB_URL}',
STATIC_VALUE: 'no-substitution',
},
},
},
};
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify(extensionConfig),
);
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
const extension = extensions[0];
expect(extension.config.name).toBe('test-extension');
expect(extension.config.mcpServers).toBeDefined();
const serverConfig = extension.config.mcpServers?.['test-server'];
expect(serverConfig).toBeDefined();
expect(serverConfig?.env).toBeDefined();
expect(serverConfig?.env?.['API_KEY']).toBe('test-api-key-123');
expect(serverConfig?.env?.['DATABASE_URL']).toBe(
'postgresql://localhost:5432/testdb',
);
expect(serverConfig?.env?.['STATIC_VALUE']).toBe('no-substitution');
} finally {
delete process.env['TEST_API_KEY'];
delete process.env['TEST_DB_URL'];
}
});
it('should handle missing environment variables gracefully', async () => {
const extDir = path.join(userExtensionsDir, 'test-extension');
fs.mkdirSync(extDir);
const extensionConfig = {
name: 'test-extension',
version: '1.0.0',
mcpServers: {
'test-server': {
command: 'node',
args: ['server.js'],
env: {
MISSING_VAR: '$UNDEFINED_ENV_VAR',
MISSING_VAR_BRACES: '${ALSO_UNDEFINED}',
},
},
},
};
fs.writeFileSync(
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
JSON.stringify(extensionConfig),
);
const manager = createExtensionManager();
await manager.refreshCache();
const extensions = manager.getLoadedExtensions();
expect(extensions).toHaveLength(1);
const extension = extensions[0];
const serverConfig = extension.config.mcpServers!['test-server'];
expect(serverConfig.env).toBeDefined();
expect(serverConfig.env!['MISSING_VAR']).toBe('$UNDEFINED_ENV_VAR');
expect(serverConfig.env!['MISSING_VAR_BRACES']).toBe('${ALSO_UNDEFINED}');
});
});
});
describe('extensionManager utility functions', () => {
describe('validateName', () => {
it('should accept valid extension names', () => {
expect(() => validateName('my-extension')).not.toThrow();
expect(() => validateName('Extension123')).not.toThrow();
expect(() => validateName('test-ext-1')).not.toThrow();
expect(() => validateName('UPPERCASE')).not.toThrow();
});
it('should reject names with invalid characters', () => {
expect(() => validateName('my_extension')).toThrow(
'Invalid extension name',
);
expect(() => validateName('my.extension')).toThrow(
'Invalid extension name',
);
expect(() => validateName('my extension')).toThrow(
'Invalid extension name',
);
expect(() => validateName('my@ext')).toThrow('Invalid extension name');
});
it('should reject empty names', () => {
expect(() => validateName('')).toThrow('Invalid extension name');
});
});
describe('hashValue', () => {
it('should generate consistent hash for same input', () => {
const hash1 = hashValue('test-input');
const hash2 = hashValue('test-input');
expect(hash1).toBe(hash2);
});
it('should generate different hashes for different inputs', () => {
const hash1 = hashValue('input-1');
const hash2 = hashValue('input-2');
expect(hash1).not.toBe(hash2);
});
it('should generate a valid SHA256 hash', () => {
const hash = hashValue('test');
expect(hash).toMatch(/^[a-f0-9]{64}$/);
});
});
describe('getExtensionId', () => {
it('should use hashed name when no install metadata', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const id = getExtensionId(config);
expect(id).toBe(hashValue('test-ext'));
});
it('should use hashed source for local install', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const metadata = { type: 'local' as const, source: '/path/to/ext' };
const id = getExtensionId(config, metadata);
expect(id).toBe(hashValue('/path/to/ext'));
});
it('should use GitHub URL for git install', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const metadata = {
type: 'git' as const,
source: 'https://github.com/owner/repo',
};
const id = getExtensionId(config, metadata);
expect(id).toBe(hashValue('https://github.com/owner/repo'));
});
});
describe('extensionConsentString', () => {
it('should generate basic consent string', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const consent = extensionConsentString(config);
expect(consent).toContain('Installing extension "test-ext"');
expect(consent).toContain('Extensions may introduce unexpected behavior');
});
it('should include MCP servers in consent string', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
mcpServers: {
'my-server': { command: 'node', args: ['server.js'] },
},
};
const consent = extensionConsentString(config);
expect(consent).toContain(
'This extension will run the following MCP servers',
);
expect(consent).toContain('my-server');
});
it('should include commands in consent string', () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const consent = extensionConsentString(config, ['cmd1', 'cmd2']);
expect(consent).toContain(
'This extension will add the following commands',
);
expect(consent).toContain('cmd1, cmd2');
});
it('should include context file info', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
contextFileName: 'CONTEXT.md',
};
const consent = extensionConsentString(config);
expect(consent).toContain('CONTEXT.md');
});
it('should include excluded tools', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
excludeTools: ['tool1', 'tool2'],
};
const consent = extensionConsentString(config);
expect(consent).toContain('exclude the following core tools');
});
});
describe('maybeRequestConsentOrFail', () => {
it('should request consent for new installation', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const requestConsent = vi.fn().mockResolvedValue(true);
await maybeRequestConsentOrFail(config, requestConsent, []);
expect(requestConsent).toHaveBeenCalledTimes(1);
});
it('should throw if user declines consent', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const requestConsent = vi.fn().mockResolvedValue(false);
await expect(
maybeRequestConsentOrFail(config, requestConsent, []),
).rejects.toThrow('Installation cancelled');
});
it('should skip consent if config unchanged during update', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '0.9.0',
};
const requestConsent = vi.fn().mockResolvedValue(true);
await maybeRequestConsentOrFail(
config,
requestConsent,
[],
previousConfig,
);
expect(requestConsent).not.toHaveBeenCalled();
});
it('should request consent if config changed during update', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
mcpServers: { server: { command: 'node' } },
};
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '0.9.0',
};
const requestConsent = vi.fn().mockResolvedValue(true);
await maybeRequestConsentOrFail(
config,
requestConsent,
[],
previousConfig,
);
expect(requestConsent).toHaveBeenCalledTimes(1);
});
});
describe('parseInstallSource', () => {
it('should parse HTTPS URL as git type', async () => {
const result = await parseInstallSource('https://github.com/owner/repo');
expect(result.type).toBe('git');
expect(result.source).toBe('https://github.com/owner/repo');
});
it('should parse HTTP URL as git type', async () => {
const result = await parseInstallSource('http://example.com/repo');
expect(result.type).toBe('git');
});
it('should parse git@ URL as git type', async () => {
const result = await parseInstallSource('git@github.com:owner/repo.git');
expect(result.type).toBe('git');
});
it('should parse sso:// URL as git type', async () => {
const result = await parseInstallSource('sso://some/path');
expect(result.type).toBe('git');
});
it('should parse marketplace URL correctly', async () => {
const result = await parseInstallSource(
'https://example.com/marketplace:plugin-name',
);
expect(result.type).toBe('marketplace');
expect(result.marketplace?.pluginName).toBe('plugin-name');
});
it('should throw for non-existent local path', async () => {
await expect(
parseInstallSource('/nonexistent/path/to/extension'),
).rejects.toThrow('Install source not found');
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,730 @@
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import * as path from 'node:path';
import * as os from 'node:os';
import {
getEnvContents,
maybePromptForSettings,
promptForSetting,
type ExtensionSetting,
updateSetting,
ExtensionSettingScope,
getScopedEnvContents,
} from './extensionSettings.js';
import type { ExtensionConfig } from './extensionManager.js';
import { ExtensionStorage } from './storage.js';
import prompts from 'prompts';
import * as fsPromises from 'node:fs/promises';
import * as fs from 'node:fs';
import { KeychainTokenStorage } from '../mcp/token-storage/keychain-token-storage.js';
import { EXTENSION_SETTINGS_FILENAME } from './variables.js';
vi.mock('prompts');
vi.mock('os', async (importOriginal) => {
const mockedOs = await importOriginal<typeof os>();
return {
...mockedOs,
homedir: vi.fn(),
};
});
vi.mock(
'../mcp/token-storage/keychain-token-storage.js',
async (importOriginal) => {
const actual =
await importOriginal<
typeof import('../mcp/token-storage/keychain-token-storage.js')
>();
return {
...actual,
KeychainTokenStorage: vi.fn(),
};
},
);
describe('extensionSettings', () => {
let tempHomeDir: string;
let tempWorkspaceDir: string;
let extensionDir: string;
let mockKeychainData: Record<string, Record<string, string>>;
beforeEach(() => {
vi.clearAllMocks();
mockKeychainData = {};
vi.mocked(KeychainTokenStorage).mockImplementation(
(serviceName: string) => {
if (!mockKeychainData[serviceName]) {
mockKeychainData[serviceName] = {};
}
const keychainData = mockKeychainData[serviceName];
return {
getSecret: vi
.fn()
.mockImplementation(
async (key: string) => keychainData[key] || null,
),
setSecret: vi
.fn()
.mockImplementation(async (key: string, value: string) => {
keychainData[key] = value;
}),
deleteSecret: vi.fn().mockImplementation(async (key: string) => {
delete keychainData[key];
}),
listSecrets: vi
.fn()
.mockImplementation(async () => Object.keys(keychainData)),
isAvailable: vi.fn().mockResolvedValue(true),
} as unknown as KeychainTokenStorage;
},
);
tempHomeDir = os.tmpdir() + path.sep + `gemini-cli-test-home-${Date.now()}`;
tempWorkspaceDir = path.join(
os.tmpdir(),
`gemini-cli-test-workspace-${Date.now()}`,
);
extensionDir = path.join(tempHomeDir, '.gemini', 'extensions', 'test-ext');
// Spy and mock the method, but also create the directory so we can write to it.
vi.spyOn(ExtensionStorage.prototype, 'getExtensionDir').mockReturnValue(
extensionDir,
);
fs.mkdirSync(extensionDir, { recursive: true });
fs.mkdirSync(tempWorkspaceDir, { recursive: true });
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
vi.mocked(prompts).mockClear();
});
afterEach(() => {
fs.rmSync(tempHomeDir, { recursive: true, force: true });
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
vi.restoreAllMocks();
});
describe('maybePromptForSettings', () => {
const mockRequestSetting = vi.fn(
async (setting: ExtensionSetting) => `mock-${setting.envVar}`,
);
beforeEach(() => {
mockRequestSetting.mockClear();
});
it('should do nothing if settings are undefined', async () => {
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
});
it('should do nothing if settings are empty', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [],
};
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
});
it('should prompt for all settings if there is no previous config', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
expect(mockRequestSetting).toHaveBeenCalledTimes(2);
expect(mockRequestSetting).toHaveBeenCalledWith(config.settings![0]);
expect(mockRequestSetting).toHaveBeenCalledWith(config.settings![1]);
});
it('should only prompt for new settings', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const previousSettings = { VAR1: 'previous-VAR1' };
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).toHaveBeenCalledTimes(1);
expect(mockRequestSetting).toHaveBeenCalledWith(newConfig.settings![1]);
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
const expectedContent = 'VAR1=previous-VAR1\nVAR2=mock-VAR2\n';
expect(actualContent).toBe(expectedContent);
});
it('should clear settings if new config has no settings', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{
name: 's2',
description: 'd2',
envVar: 'SENSITIVE_VAR',
sensitive: true,
},
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [],
};
const previousSettings = {
VAR1: 'previous-VAR1',
SENSITIVE_VAR: 'secret',
};
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('SENSITIVE_VAR', 'secret');
const envPath = path.join(extensionDir, '.env');
await fsPromises.writeFile(envPath, 'VAR1=previous-VAR1');
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
const actualContent = await fsPromises.readFile(envPath, 'utf-8');
expect(actualContent).toBe('');
expect(await userKeychain.getSecret('SENSITIVE_VAR')).toBeNull();
});
it('should remove sensitive settings from keychain', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{
name: 's1',
description: 'd1',
envVar: 'SENSITIVE_VAR',
sensitive: true,
},
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [],
};
const previousSettings = { SENSITIVE_VAR: 'secret' };
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('SENSITIVE_VAR', 'secret');
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(await userKeychain.getSecret('SENSITIVE_VAR')).toBeNull();
});
it('should remove settings that are no longer in the config', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
const previousSettings = {
VAR1: 'previous-VAR1',
VAR2: 'previous-VAR2',
};
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
const expectedContent = 'VAR1=previous-VAR1\n';
expect(actualContent).toBe(expectedContent);
});
it('should reprompt if a setting changes sensitivity', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1', sensitive: false },
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1', sensitive: true },
],
};
const previousSettings = { VAR1: 'previous-VAR1' };
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).toHaveBeenCalledTimes(1);
expect(mockRequestSetting).toHaveBeenCalledWith(newConfig.settings![0]);
// The value should now be in keychain, not the .env file.
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toBe('');
});
it('should not prompt if settings are identical', async () => {
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const newConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2' },
],
};
const previousSettings = {
VAR1: 'previous-VAR1',
VAR2: 'previous-VAR2',
};
await maybePromptForSettings(
newConfig,
'12345',
mockRequestSetting,
previousConfig,
previousSettings,
);
expect(mockRequestSetting).not.toHaveBeenCalled();
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
const expectedContent = 'VAR1=previous-VAR1\nVAR2=previous-VAR2\n';
expect(actualContent).toBe(expectedContent);
});
it('should wrap values with spaces in quotes', async () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
mockRequestSetting.mockResolvedValue('a value with spaces');
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
undefined,
undefined,
);
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toBe('VAR1="a value with spaces"\n');
});
it('should not attempt to clear secrets if keychain is unavailable', async () => {
// Arrange
const mockIsAvailable = vi.fn().mockResolvedValue(false);
const mockListSecrets = vi.fn();
vi.mocked(KeychainTokenStorage).mockImplementation(
() =>
({
isAvailable: mockIsAvailable,
listSecrets: mockListSecrets,
deleteSecret: vi.fn(),
getSecret: vi.fn(),
setSecret: vi.fn(),
}) as unknown as KeychainTokenStorage,
);
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [], // Empty settings triggers clearSettings
};
const previousConfig: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [{ name: 's1', description: 'd1', envVar: 'VAR1' }],
};
// Act
await maybePromptForSettings(
config,
'12345',
mockRequestSetting,
previousConfig,
undefined,
);
// Assert
expect(mockIsAvailable).toHaveBeenCalled();
expect(mockListSecrets).not.toHaveBeenCalled();
});
});
describe('promptForSetting', () => {
it.each([
{
description:
'should use prompts with type "password" for sensitive settings',
setting: {
name: 'API Key',
description: 'Your secret key',
envVar: 'API_KEY',
sensitive: true,
},
expectedType: 'password',
promptValue: 'secret-key',
},
{
description:
'should use prompts with type "text" for non-sensitive settings',
setting: {
name: 'Username',
description: 'Your public username',
envVar: 'USERNAME',
sensitive: false,
},
expectedType: 'text',
promptValue: 'test-user',
},
{
description: 'should default to "text" if sensitive is undefined',
setting: {
name: 'Username',
description: 'Your public username',
envVar: 'USERNAME',
},
expectedType: 'text',
promptValue: 'test-user',
},
])('$description', async ({ setting, expectedType, promptValue }) => {
vi.mocked(prompts).mockResolvedValue({ value: promptValue });
const result = await promptForSetting(setting as ExtensionSetting);
expect(prompts).toHaveBeenCalledWith({
type: expectedType,
name: 'value',
message: `${setting.name}\n${setting.description}`,
});
expect(result).toBe(promptValue);
});
it('should return undefined if the user cancels the prompt', async () => {
vi.mocked(prompts).mockResolvedValue({ value: undefined });
const result = await promptForSetting({
name: 'Test',
description: 'Test desc',
envVar: 'TEST_VAR',
});
expect(result).toBeUndefined();
});
});
describe('getScopedEnvContents', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{
name: 's2',
description: 'd2',
envVar: 'SENSITIVE_VAR',
sensitive: true,
},
],
};
const extensionId = '12345';
it('should return combined contents from user .env and keychain for USER scope', async () => {
const userEnvPath = path.join(extensionDir, EXTENSION_SETTINGS_FILENAME);
await fsPromises.writeFile(userEnvPath, 'VAR1=user-value1');
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('SENSITIVE_VAR', 'user-secret');
const contents = await getScopedEnvContents(
config,
extensionId,
ExtensionSettingScope.USER,
);
expect(contents).toEqual({
VAR1: 'user-value1',
SENSITIVE_VAR: 'user-secret',
});
});
it('should return combined contents from workspace .env and keychain for WORKSPACE scope', async () => {
const workspaceEnvPath = path.join(
tempWorkspaceDir,
EXTENSION_SETTINGS_FILENAME,
);
await fsPromises.writeFile(workspaceEnvPath, 'VAR1=workspace-value1');
const workspaceKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345 ${tempWorkspaceDir}`,
);
await workspaceKeychain.setSecret('SENSITIVE_VAR', 'workspace-secret');
const contents = await getScopedEnvContents(
config,
extensionId,
ExtensionSettingScope.WORKSPACE,
);
expect(contents).toEqual({
VAR1: 'workspace-value1',
SENSITIVE_VAR: 'workspace-secret',
});
});
});
describe('getEnvContents (merged)', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2', sensitive: true },
{ name: 's3', description: 'd3', envVar: 'VAR3' },
],
};
const extensionId = '12345';
it('should merge user and workspace settings, with workspace taking precedence', async () => {
// User settings
const userEnvPath = path.join(extensionDir, EXTENSION_SETTINGS_FILENAME);
await fsPromises.writeFile(
userEnvPath,
'VAR1=user-value1\nVAR3=user-value3',
);
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext ${extensionId}`,
);
await userKeychain.setSecret('VAR2', 'user-secret2');
// Workspace settings
const workspaceEnvPath = path.join(
tempWorkspaceDir,
EXTENSION_SETTINGS_FILENAME,
);
await fsPromises.writeFile(workspaceEnvPath, 'VAR1=workspace-value1');
const workspaceKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext ${extensionId} ${tempWorkspaceDir}`,
);
await workspaceKeychain.setSecret('VAR2', 'workspace-secret2');
const contents = await getEnvContents(config, extensionId);
expect(contents).toEqual({
VAR1: 'workspace-value1',
VAR2: 'workspace-secret2',
VAR3: 'user-value3',
});
});
});
describe('updateSetting', () => {
const config: ExtensionConfig = {
name: 'test-ext',
version: '1.0.0',
settings: [
{ name: 's1', description: 'd1', envVar: 'VAR1' },
{ name: 's2', description: 'd2', envVar: 'VAR2', sensitive: true },
],
};
const mockRequestSetting = vi.fn();
beforeEach(async () => {
const userEnvPath = path.join(extensionDir, '.env');
await fsPromises.writeFile(userEnvPath, 'VAR1=value1\n');
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
await userKeychain.setSecret('VAR2', 'value2');
mockRequestSetting.mockClear();
});
it('should update a non-sensitive setting in USER scope', async () => {
mockRequestSetting.mockResolvedValue('new-value1');
await updateSetting(
config,
'12345',
'VAR1',
mockRequestSetting,
ExtensionSettingScope.USER,
);
const expectedEnvPath = path.join(extensionDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toContain('VAR1=new-value1');
});
it('should update a non-sensitive setting in WORKSPACE scope', async () => {
mockRequestSetting.mockResolvedValue('new-workspace-value');
await updateSetting(
config,
'12345',
'VAR1',
mockRequestSetting,
ExtensionSettingScope.WORKSPACE,
);
const expectedEnvPath = path.join(tempWorkspaceDir, '.env');
const actualContent = await fsPromises.readFile(expectedEnvPath, 'utf-8');
expect(actualContent).toContain('VAR1=new-workspace-value');
});
it('should update a sensitive setting in USER scope', async () => {
mockRequestSetting.mockResolvedValue('new-value2');
await updateSetting(
config,
'12345',
'VAR2',
mockRequestSetting,
ExtensionSettingScope.USER,
);
const userKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345`,
);
expect(await userKeychain.getSecret('VAR2')).toBe('new-value2');
});
it('should update a sensitive setting in WORKSPACE scope', async () => {
mockRequestSetting.mockResolvedValue('new-workspace-secret');
await updateSetting(
config,
'12345',
'VAR2',
mockRequestSetting,
ExtensionSettingScope.WORKSPACE,
);
const workspaceKeychain = new KeychainTokenStorage(
`Gemini CLI Extensions test-ext 12345 ${tempWorkspaceDir}`,
);
expect(await workspaceKeychain.getSecret('VAR2')).toBe(
'new-workspace-secret',
);
});
it('should leave existing, unmanaged .env variables intact when updating in WORKSPACE scope', async () => {
// Setup a pre-existing .env file in the workspace with unmanaged variables
const workspaceEnvPath = path.join(tempWorkspaceDir, '.env');
const originalEnvContent =
'PROJECT_VAR_1=value_1\nPROJECT_VAR_2=value_2\nVAR1=original-value'; // VAR1 is managed by extension
await fsPromises.writeFile(workspaceEnvPath, originalEnvContent);
// Simulate updating an extension-managed non-sensitive setting
mockRequestSetting.mockResolvedValue('updated-value');
await updateSetting(
config,
'12345',
'VAR1',
mockRequestSetting,
ExtensionSettingScope.WORKSPACE,
);
// Read the .env file after update
const actualContent = await fsPromises.readFile(
workspaceEnvPath,
'utf-8',
);
// Assert that original variables are intact and extension variable is updated
expect(actualContent).toContain('PROJECT_VAR_1=value_1');
expect(actualContent).toContain('PROJECT_VAR_2=value_2');
expect(actualContent).toContain('VAR1=updated-value');
// Ensure no other unexpected changes or deletions
const lines = actualContent.split('\n').filter((line) => line.length > 0);
expect(lines).toHaveLength(3); // Should only have the three variables
});
});
});

View File

@@ -0,0 +1,298 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'node:fs/promises';
import * as fsSync from 'node:fs';
import * as dotenv from 'dotenv';
import * as path from 'node:path';
import { ExtensionStorage } from './storage.js';
import type { ExtensionConfig } from './extensionManager.js';
import prompts from 'prompts';
import { EXTENSION_SETTINGS_FILENAME } from './variables.js';
import { KeychainTokenStorage } from '../mcp/token-storage/keychain-token-storage.js';
export enum ExtensionSettingScope {
USER = 'user',
WORKSPACE = 'workspace',
}
export interface ExtensionSetting {
name: string;
description: string;
envVar: string;
// NOTE: If no value is set, this setting will be considered NOT sensitive.
sensitive?: boolean;
}
const getKeychainStorageName = (
extensionName: string,
extensionId: string,
scope: ExtensionSettingScope,
): string => {
const base = `Qwen Code Extensions ${extensionName} ${extensionId}`;
if (scope === ExtensionSettingScope.WORKSPACE) {
return `${base} ${process.cwd()}`;
}
return base;
};
const getEnvFilePath = (
extensionName: string,
scope: ExtensionSettingScope,
): string => {
if (scope === ExtensionSettingScope.WORKSPACE) {
return path.join(process.cwd(), EXTENSION_SETTINGS_FILENAME);
}
return new ExtensionStorage(extensionName).getEnvFilePath();
};
export async function maybePromptForSettings(
extensionConfig: ExtensionConfig,
extensionId: string,
requestSetting: (setting: ExtensionSetting) => Promise<string>,
previousExtensionConfig?: ExtensionConfig,
previousSettings?: Record<string, string>,
): Promise<void> {
const { name: extensionName, settings } = extensionConfig;
if (
(!settings || settings.length === 0) &&
(!previousExtensionConfig?.settings ||
previousExtensionConfig.settings.length === 0)
) {
return;
}
// We assume user scope here because we don't have a way to ask the user for scope during the initial setup.
// The user can change the scope later using the `settings set` command.
const scope = ExtensionSettingScope.USER;
const envFilePath = getEnvFilePath(extensionName, scope);
const keychain = new KeychainTokenStorage(
getKeychainStorageName(extensionName, extensionId, scope),
);
if (!settings || settings.length === 0) {
await clearSettings(envFilePath, keychain);
return;
}
const settingsChanges = getSettingsChanges(
settings,
previousExtensionConfig?.settings ?? [],
);
const allSettings: Record<string, string> = { ...previousSettings };
for (const removedEnvSetting of settingsChanges.removeEnv) {
delete allSettings[removedEnvSetting.envVar];
}
for (const removedSensitiveSetting of settingsChanges.removeSensitive) {
await keychain.deleteSecret(removedSensitiveSetting.envVar);
}
for (const setting of settingsChanges.promptForSensitive.concat(
settingsChanges.promptForEnv,
)) {
const answer = await requestSetting(setting);
allSettings[setting.envVar] = answer;
}
const nonSensitiveSettings: Record<string, string> = {};
for (const setting of settings) {
const value = allSettings[setting.envVar];
if (value === undefined) {
continue;
}
if (setting.sensitive) {
await keychain.setSecret(setting.envVar, value);
} else {
nonSensitiveSettings[setting.envVar] = value;
}
}
const envContent = formatEnvContent(nonSensitiveSettings);
await fs.writeFile(envFilePath, envContent);
}
function formatEnvContent(settings: Record<string, string>): string {
let envContent = '';
for (const [key, value] of Object.entries(settings)) {
const formattedValue = value.includes(' ') ? `"${value}"` : value;
envContent += `${key}=${formattedValue}\n`;
}
return envContent;
}
export async function promptForSetting(
setting: ExtensionSetting,
): Promise<string> {
const response = await prompts({
type: setting.sensitive ? 'password' : 'text',
name: 'value',
message: `${setting.name}\n${setting.description}`,
});
return response.value;
}
export async function getScopedEnvContents(
extensionConfig: ExtensionConfig,
extensionId: string,
scope: ExtensionSettingScope,
): Promise<Record<string, string>> {
const { name: extensionName } = extensionConfig;
const keychain = new KeychainTokenStorage(
getKeychainStorageName(extensionName, extensionId, scope),
);
const envFilePath = getEnvFilePath(extensionName, scope);
let customEnv: Record<string, string> = {};
if (fsSync.existsSync(envFilePath)) {
const envFile = fsSync.readFileSync(envFilePath, 'utf-8');
customEnv = dotenv.parse(envFile);
}
if (extensionConfig.settings) {
for (const setting of extensionConfig.settings) {
if (setting.sensitive) {
const secret = await keychain.getSecret(setting.envVar);
if (secret) {
customEnv[setting.envVar] = secret;
}
}
}
}
return customEnv;
}
export async function getEnvContents(
extensionConfig: ExtensionConfig,
extensionId: string,
): Promise<Record<string, string>> {
if (!extensionConfig.settings || extensionConfig.settings.length === 0) {
return Promise.resolve({});
}
const userSettings = await getScopedEnvContents(
extensionConfig,
extensionId,
ExtensionSettingScope.USER,
);
const workspaceSettings = await getScopedEnvContents(
extensionConfig,
extensionId,
ExtensionSettingScope.WORKSPACE,
);
return { ...userSettings, ...workspaceSettings };
}
export async function updateSetting(
extensionConfig: ExtensionConfig,
extensionId: string,
settingKey: string,
requestSetting: (setting: ExtensionSetting) => Promise<string>,
scope: ExtensionSettingScope,
): Promise<void> {
const { name: extensionName, settings } = extensionConfig;
if (!settings || settings.length === 0) {
console.log('This extension does not have any settings.');
return;
}
const settingToUpdate = settings.find(
(s) => s.name === settingKey || s.envVar === settingKey,
);
if (!settingToUpdate) {
console.log(`Setting ${settingKey} not found.`);
return;
}
const newValue = await requestSetting(settingToUpdate);
const keychain = new KeychainTokenStorage(
getKeychainStorageName(extensionName, extensionId, scope),
);
if (settingToUpdate.sensitive) {
await keychain.setSecret(settingToUpdate.envVar, newValue);
return;
}
// For non-sensitive settings, we need to read the existing .env file,
// update the value, and write it back, preserving any other values.
const envFilePath = getEnvFilePath(extensionName, scope);
let envContent = '';
if (fsSync.existsSync(envFilePath)) {
envContent = await fs.readFile(envFilePath, 'utf-8');
}
const parsedEnv = dotenv.parse(envContent);
parsedEnv[settingToUpdate.envVar] = newValue;
// We only want to write back the variables that are not sensitive.
const nonSensitiveSettings: Record<string, string> = {};
const sensitiveEnvVars = new Set(
settings.filter((s) => s.sensitive).map((s) => s.envVar),
);
for (const [key, value] of Object.entries(parsedEnv)) {
if (!sensitiveEnvVars.has(key)) {
nonSensitiveSettings[key] = value;
}
}
const newEnvContent = formatEnvContent(nonSensitiveSettings);
await fs.writeFile(envFilePath, newEnvContent);
}
interface settingsChanges {
promptForSensitive: ExtensionSetting[];
removeSensitive: ExtensionSetting[];
promptForEnv: ExtensionSetting[];
removeEnv: ExtensionSetting[];
}
function getSettingsChanges(
settings: ExtensionSetting[],
oldSettings: ExtensionSetting[],
): settingsChanges {
const isSameSetting = (a: ExtensionSetting, b: ExtensionSetting) =>
a.envVar === b.envVar && (a.sensitive ?? false) === (b.sensitive ?? false);
const sensitiveOld = oldSettings.filter((s) => s.sensitive ?? false);
const sensitiveNew = settings.filter((s) => s.sensitive ?? false);
const envOld = oldSettings.filter((s) => !(s.sensitive ?? false));
const envNew = settings.filter((s) => !(s.sensitive ?? false));
return {
promptForSensitive: sensitiveNew.filter(
(s) => !sensitiveOld.some((old) => isSameSetting(s, old)),
),
removeSensitive: sensitiveOld.filter(
(s) => !sensitiveNew.some((neu) => isSameSetting(s, neu)),
),
promptForEnv: envNew.filter(
(s) => !envOld.some((old) => isSameSetting(s, old)),
),
removeEnv: envOld.filter(
(s) => !envNew.some((neu) => isSameSetting(s, neu)),
),
};
}
async function clearSettings(
envFilePath: string,
keychain: KeychainTokenStorage,
) {
if (fsSync.existsSync(envFilePath)) {
await fs.writeFile(envFilePath, '');
}
if (!(await keychain.isAvailable())) {
return;
}
const secrets = await keychain.listSecrets();
for (const secret of secrets) {
await keychain.deleteSecret(secret);
}
return;
}

View File

@@ -0,0 +1,179 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as path from 'node:path';
import {
convertGeminiToQwenConfig,
isGeminiExtensionConfig,
type GeminiExtensionConfig,
} from './gemini-converter.js';
// Mock fs module
vi.mock('node:fs', async (importOriginal) => {
const actual = await importOriginal<typeof import('node:fs')>();
return {
...actual,
existsSync: vi.fn(),
readFileSync: vi.fn(),
};
});
describe('convertGeminiToQwenConfig', () => {
beforeEach(() => {
vi.clearAllMocks();
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should convert basic Gemini config from directory', () => {
const mockDir = '/mock/extension/dir';
const geminiConfig: GeminiExtensionConfig = {
name: 'test-extension',
version: '1.0.0',
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(geminiConfig));
const result = convertGeminiToQwenConfig(mockDir);
expect(result.name).toBe('test-extension');
expect(result.version).toBe('1.0.0');
expect(fs.readFileSync).toHaveBeenCalledWith(
path.join(mockDir, 'gemini-extension.json'),
'utf-8',
);
});
it('should convert config with all optional fields', () => {
const mockDir = '/mock/extension/dir';
const geminiConfig = {
name: 'full-extension',
version: '2.0.0',
mcpServers: { server1: {} },
contextFileName: 'context.txt',
excludeTools: ['tool1', 'tool2'],
settings: [
{ name: 'Setting1', envVar: 'VAR1', description: 'Test setting' },
],
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(geminiConfig));
const result = convertGeminiToQwenConfig(mockDir);
expect(result.name).toBe('full-extension');
expect(result.version).toBe('2.0.0');
expect(result.mcpServers).toEqual({ server1: {} });
expect(result.contextFileName).toBe('context.txt');
expect(result.excludeTools).toEqual(['tool1', 'tool2']);
expect(result.settings).toHaveLength(1);
expect(result.settings?.[0].name).toBe('Setting1');
});
it('should throw error for missing name', () => {
const mockDir = '/mock/extension/dir';
const invalidConfig = {
version: '1.0.0',
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
expect(() => convertGeminiToQwenConfig(mockDir)).toThrow(
'Gemini extension config must have name and version fields',
);
});
it('should throw error for missing version', () => {
const mockDir = '/mock/extension/dir';
const invalidConfig = {
name: 'test-extension',
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
expect(() => convertGeminiToQwenConfig(mockDir)).toThrow(
'Gemini extension config must have name and version fields',
);
});
});
describe('isGeminiExtensionConfig', () => {
beforeEach(() => {
vi.clearAllMocks();
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should identify Gemini extension directory with valid config', () => {
const mockDir = '/mock/extension/dir';
const mockConfig = {
name: 'test',
version: '1.0.0',
settings: [{ name: 'Test', envVar: 'TEST', description: 'Test' }],
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(mockConfig));
expect(isGeminiExtensionConfig(mockDir)).toBe(true);
expect(fs.existsSync).toHaveBeenCalledWith(
path.join(mockDir, 'gemini-extension.json'),
);
});
it('should return false when gemini-extension.json does not exist', () => {
const mockDir = '/mock/nonexistent/dir';
vi.mocked(fs.existsSync).mockReturnValue(false);
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
});
it('should return false for invalid config content', () => {
const mockDir = '/mock/invalid/dir';
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue('null');
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
});
it('should return false for config missing required fields', () => {
const mockDir = '/mock/invalid/dir';
const invalidConfig = {
name: 'test',
// missing version
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
expect(isGeminiExtensionConfig(mockDir)).toBe(false);
});
it('should return true for basic config without settings', () => {
const mockDir = '/mock/extension/dir';
const basicConfig = {
name: 'test',
version: '1.0.0',
};
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(basicConfig));
expect(isGeminiExtensionConfig(mockDir)).toBe(true);
});
});
// Note: convertGeminiExtensionPackage() is tested through integration tests
// as it requires real file system operations

View File

@@ -0,0 +1,217 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* Converter for Gemini CLI extensions to Qwen Code format.
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import { glob } from 'glob';
import type { ExtensionConfig, ExtensionSetting } from './extensionManager.js';
import { ExtensionStorage } from './storage.js';
import { convertTomlToMarkdown } from '../utils/toml-to-markdown-converter.js';
export interface GeminiExtensionConfig {
name: string;
version: string;
mcpServers?: Record<string, unknown>;
contextFileName?: string | string[];
excludeTools?: string[];
settings?: ExtensionSetting[];
}
/**
* Converts a Gemini CLI extension config to Qwen Code format.
* @param extensionDir Path to the Gemini extension directory
* @returns Qwen ExtensionConfig
*/
export function convertGeminiToQwenConfig(
extensionDir: string,
): ExtensionConfig {
const configFilePath = path.join(extensionDir, 'gemini-extension.json');
const configContent = fs.readFileSync(configFilePath, 'utf-8');
const geminiConfig: GeminiExtensionConfig = JSON.parse(configContent);
// Validate required fields
if (!geminiConfig.name || !geminiConfig.version) {
throw new Error(
'Gemini extension config must have name and version fields',
);
}
const settings: ExtensionSetting[] | undefined = geminiConfig.settings;
// Direct field mapping
return {
name: geminiConfig.name,
version: geminiConfig.version,
mcpServers: geminiConfig.mcpServers as ExtensionConfig['mcpServers'],
contextFileName: geminiConfig.contextFileName,
excludeTools: geminiConfig.excludeTools,
settings,
};
}
/**
* Converts a complete Gemini extension package to Qwen Code format.
* Creates a new temporary directory with:
* 1. Converted qwen-extension.json
* 2. Commands converted from TOML to MD
* 3. All other files/folders preserved
*
* @param extensionDir Path to the Gemini extension directory
* @returns Object containing converted config and the temporary directory path
*/
export async function convertGeminiExtensionPackage(
extensionDir: string,
): Promise<{ config: ExtensionConfig; convertedDir: string }> {
const geminiConfig = convertGeminiToQwenConfig(extensionDir);
// Create temporary directory for converted extension
const tmpDir = await ExtensionStorage.createTmpDir();
try {
// Step 1: Copy all files and directories to temporary directory
await copyDirectory(extensionDir, tmpDir);
// Step 2: Convert TOML commands to Markdown in commands folder
const commandsDir = path.join(tmpDir, 'commands');
if (fs.existsSync(commandsDir)) {
await convertCommandsDirectory(commandsDir);
}
// Step 3: Create qwen-extension.json with converted config
const qwenConfigPath = path.join(tmpDir, 'qwen-extension.json');
fs.writeFileSync(
qwenConfigPath,
JSON.stringify(geminiConfig, null, 2),
'utf-8',
);
return {
config: geminiConfig,
convertedDir: tmpDir,
};
} catch (error) {
// Clean up temporary directory on error
try {
fs.rmSync(tmpDir, { recursive: true, force: true });
} catch {
// Ignore cleanup errors
}
throw error;
}
}
/**
* Recursively copies a directory and its contents.
* @param source Source directory path
* @param destination Destination directory path
*/
async function copyDirectory(
source: string,
destination: string,
): Promise<void> {
// Create destination directory if it doesn't exist
if (!fs.existsSync(destination)) {
fs.mkdirSync(destination, { recursive: true });
}
const entries = fs.readdirSync(source, { withFileTypes: true });
for (const entry of entries) {
const sourcePath = path.join(source, entry.name);
const destPath = path.join(destination, entry.name);
if (entry.isDirectory()) {
await copyDirectory(sourcePath, destPath);
} else {
fs.copyFileSync(sourcePath, destPath);
}
}
}
/**
* Converts all TOML command files in a directory to Markdown format.
* @param commandsDir Path to the commands directory
*/
async function convertCommandsDirectory(commandsDir: string): Promise<void> {
// Find all .toml files in the commands directory
const tomlFiles = await glob('**/*.toml', {
cwd: commandsDir,
nodir: true,
dot: false,
});
// Convert each TOML file to Markdown
for (const relativeFile of tomlFiles) {
const tomlPath = path.join(commandsDir, relativeFile);
try {
// Read TOML file
const tomlContent = fs.readFileSync(tomlPath, 'utf-8');
// Convert to Markdown
const markdownContent = convertTomlToMarkdown(tomlContent);
// Generate Markdown file path (same location, .md extension)
const markdownPath = tomlPath.replace(/\.toml$/, '.md');
// Write Markdown file
fs.writeFileSync(markdownPath, markdownContent, 'utf-8');
// Delete original TOML file
fs.unlinkSync(tomlPath);
} catch (error) {
console.warn(
`Warning: Failed to convert command file ${relativeFile}: ${error instanceof Error ? error.message : String(error)}`,
);
// Continue with other files even if one fails
}
}
}
/**
* Checks if a config object is in Gemini format.
* This is a heuristic check based on typical Gemini extension patterns.
* @param config Configuration object to check
* @returns true if config appears to be Gemini format
*/
export function isGeminiExtensionConfig(extensionDir: string) {
const configFilePath = path.join(extensionDir, 'gemini-extension.json');
if (!fs.existsSync(configFilePath)) {
return false;
}
const configContent = fs.readFileSync(configFilePath, 'utf-8');
const parsedConfig = JSON.parse(configContent);
if (typeof parsedConfig !== 'object' || parsedConfig === null) {
return false;
}
const obj = parsedConfig as Record<string, unknown>;
// Must have name and version
if (typeof obj['name'] !== 'string' || typeof obj['version'] !== 'string') {
return false;
}
// Check for Gemini-specific settings format
if (obj['settings'] && Array.isArray(obj['settings'])) {
const firstSetting = obj['settings'][0];
if (
firstSetting &&
typeof firstSetting === 'object' &&
'envVar' in firstSetting
) {
return true;
}
}
// If it has Gemini-specific fields but not Qwen-specific fields, likely Gemini
return true;
}

View File

@@ -13,14 +13,17 @@ import {
parseGitHubRepoForReleases,
} from './github.js';
import { simpleGit, type SimpleGit } from 'simple-git';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import * as os from 'node:os';
import * as fs from 'node:fs/promises';
import * as fsSync from 'node:fs';
import * as path from 'node:path';
import * as tar from 'tar';
import * as archiver from 'archiver';
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
import {
ExtensionUpdateState,
type Extension,
type ExtensionManager,
} from './extensionManager.js';
const mockPlatform = vi.hoisted(() => vi.fn());
const mockArch = vi.hoisted(() => vi.fn());
@@ -123,119 +126,170 @@ describe('git extension helpers', () => {
revparse: vi.fn(),
};
const mockExtensionManager = {
loadExtensionConfig: vi.fn(),
} as unknown as ExtensionManager;
beforeEach(() => {
vi.mocked(simpleGit).mockReturnValue(mockGit as unknown as SimpleGit);
});
it('should return NOT_UPDATABLE for non-git extensions', async () => {
const extension: GeminiCLIExtension = {
function createExtension(overrides: Partial<Extension> = {}): Extension {
return {
id: 'test-id',
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
config: { name: 'test', version: '1.0.0' },
contextFiles: [],
...overrides,
};
}
it('should return NOT_UPDATABLE for non-git extensions', async () => {
const extension = createExtension({
installMetadata: {
type: 'link',
source: '',
},
};
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
});
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.NOT_UPDATABLE);
});
it('should return ERROR if no remotes found', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: '',
},
};
});
mockGit.getRemotes.mockResolvedValue([]);
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.ERROR);
});
it('should return UPDATE_AVAILABLE when remote hash is different', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
});
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
]);
mockGit.listRemote.mockResolvedValue('remote-hash\tHEAD');
mockGit.revparse.mockResolvedValue('local-hash');
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.UPDATE_AVAILABLE);
});
it('should return UP_TO_DATE when remote and local hashes are the same', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
});
mockGit.getRemotes.mockResolvedValue([
{ name: 'origin', refs: { fetch: 'http://my-repo.com' } },
]);
mockGit.listRemote.mockResolvedValue('same-hash\tHEAD');
mockGit.revparse.mockResolvedValue('same-hash');
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.UP_TO_DATE);
});
it('should return ERROR on git error', async () => {
const extension: GeminiCLIExtension = {
name: 'test',
path: '/ext',
version: '1.0.0',
isActive: true,
const extension = createExtension({
installMetadata: {
type: 'git',
source: 'my/ext',
},
};
});
mockGit.getRemotes.mockRejectedValue(new Error('git error'));
let result: ExtensionUpdateState | undefined = undefined;
await checkForExtensionUpdate(
const result = await checkForExtensionUpdate(
extension,
(newState) => (result = newState),
mockExtensionManager,
);
expect(result).toBe(ExtensionUpdateState.ERROR);
});
it('should return UPDATE_AVAILABLE for local extension with different version', async () => {
const extension = createExtension({
version: '1.0.0',
installMetadata: {
type: 'local',
source: '/path/to/source',
},
});
const mockManager = {
loadExtensionConfig: vi.fn().mockReturnValue({
name: 'test',
version: '2.0.0',
}),
} as unknown as ExtensionManager;
const result = await checkForExtensionUpdate(extension, mockManager);
expect(result).toBe(ExtensionUpdateState.UPDATE_AVAILABLE);
});
it('should return UP_TO_DATE for local extension with same version', async () => {
const extension = createExtension({
version: '1.0.0',
installMetadata: {
type: 'local',
source: '/path/to/source',
},
});
const mockManager = {
loadExtensionConfig: vi.fn().mockReturnValue({
name: 'test',
version: '1.0.0',
}),
} as unknown as ExtensionManager;
const result = await checkForExtensionUpdate(extension, mockManager);
expect(result).toBe(ExtensionUpdateState.UP_TO_DATE);
});
it('should return NOT_UPDATABLE for local extension when source cannot be loaded', async () => {
const extension = createExtension({
version: '1.0.0',
installMetadata: {
type: 'local',
source: '/path/to/source',
},
});
const mockManager = {
loadExtensionConfig: vi.fn().mockImplementation(() => {
throw new Error('Cannot load config');
}),
} as unknown as ExtensionManager;
const result = await checkForExtensionUpdate(extension, mockManager);
expect(result).toBe(ExtensionUpdateState.NOT_UPDATABLE);
});
});
describe('findReleaseAsset', () => {

View File

@@ -5,19 +5,38 @@
*/
import { simpleGit } from 'simple-git';
import { getErrorMessage } from '../../utils/errors.js';
import type {
ExtensionInstallMetadata,
GeminiCLIExtension,
} from '@qwen-code/qwen-code-core';
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
import { getErrorMessage } from '../utils/errors.js';
import * as os from 'node:os';
import * as https from 'node:https';
import * as fs from 'node:fs';
import * as path from 'node:path';
import { EXTENSIONS_CONFIG_FILENAME, loadExtension } from '../extension.js';
import { EXTENSIONS_CONFIG_FILENAME } from './variables.js';
import * as tar from 'tar';
import extract from 'extract-zip';
import {
ExtensionUpdateState,
type Extension,
type ExtensionConfig,
type ExtensionManager,
} from './extensionManager.js';
import type { ExtensionInstallMetadata } from '../config/config.js';
interface GithubReleaseData {
assets: Asset[];
tag_name: string;
tarball_url?: string;
zipball_url?: string;
}
interface Asset {
name: string;
browser_download_url: string;
}
export interface GitHubDownloadResult {
tagName: string;
type: 'git' | 'github-release';
}
function getGitHubToken(): string | undefined {
return process.env['GITHUB_TOKEN'];
@@ -115,38 +134,40 @@ async function fetchReleaseFromGithub(
}
export async function checkForExtensionUpdate(
extension: GeminiCLIExtension,
setExtensionUpdateState: (updateState: ExtensionUpdateState) => void,
cwd: string = process.cwd(),
): Promise<void> {
setExtensionUpdateState(ExtensionUpdateState.CHECKING_FOR_UPDATES);
extension: Extension,
extensionManager: ExtensionManager,
): Promise<ExtensionUpdateState> {
const installMetadata = extension.installMetadata;
if (installMetadata?.type === 'local') {
const newExtension = loadExtension({
extensionDir: installMetadata.source,
workspaceDir: cwd,
});
if (!newExtension) {
let latestConfig: ExtensionConfig | undefined;
try {
latestConfig = extensionManager.loadExtensionConfig({
extensionDir: installMetadata.source,
});
} catch (e) {
console.error(
`Failed to check for update for local extension "${extension.name}". Could not load extension from source path: ${installMetadata.source}. Error: ${getErrorMessage(e)}`,
);
return ExtensionUpdateState.NOT_UPDATABLE;
}
if (!latestConfig) {
console.error(
`Failed to check for update for local extension "${extension.name}". Could not load extension from source path: ${installMetadata.source}`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.NOT_UPDATABLE;
}
if (newExtension.config.version !== extension.version) {
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
if (latestConfig.version !== extension.version) {
return ExtensionUpdateState.UPDATE_AVAILABLE;
}
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
return ExtensionUpdateState.UP_TO_DATE;
}
if (
!installMetadata ||
(installMetadata.type !== 'git' &&
installMetadata.type !== 'github-release')
) {
setExtensionUpdateState(ExtensionUpdateState.NOT_UPDATABLE);
return;
return ExtensionUpdateState.NOT_UPDATABLE;
}
try {
if (installMetadata.type === 'git') {
@@ -154,14 +175,12 @@ export async function checkForExtensionUpdate(
const remotes = await git.getRemotes(true);
if (remotes.length === 0) {
console.error('No git remotes found.');
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
const remoteUrl = remotes[0].refs.fetch;
if (!remoteUrl) {
console.error(`No fetch URL found for git remote ${remotes[0].name}.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
// Determine the ref to check on the remote.
@@ -171,8 +190,7 @@ export async function checkForExtensionUpdate(
if (typeof lsRemoteOutput !== 'string' || lsRemoteOutput.trim() === '') {
console.error(`Git ref ${refToCheck} not found.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
const remoteHash = lsRemoteOutput.split('\t')[0];
@@ -182,21 +200,17 @@ export async function checkForExtensionUpdate(
console.error(
`Unable to parse hash from git ls-remote output "${lsRemoteOutput}"`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
if (remoteHash === localHash) {
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
return ExtensionUpdateState.UP_TO_DATE;
}
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
return ExtensionUpdateState.UPDATE_AVAILABLE;
} else {
const { source, releaseTag } = installMetadata;
if (!source) {
console.error(`No "source" provided for extension.`);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
const { owner, repo } = parseGitHubRepoForReleases(source);
@@ -206,30 +220,28 @@ export async function checkForExtensionUpdate(
installMetadata.ref,
);
if (releaseData.tag_name !== releaseTag) {
setExtensionUpdateState(ExtensionUpdateState.UPDATE_AVAILABLE);
return;
return ExtensionUpdateState.UPDATE_AVAILABLE;
}
setExtensionUpdateState(ExtensionUpdateState.UP_TO_DATE);
return;
return ExtensionUpdateState.UP_TO_DATE;
}
} catch (error) {
console.error(
`Failed to check for updates for extension "${installMetadata.source}": ${getErrorMessage(error)}`,
);
setExtensionUpdateState(ExtensionUpdateState.ERROR);
return;
return ExtensionUpdateState.ERROR;
}
}
export interface GitHubDownloadResult {
tagName: string;
type: 'git' | 'github-release';
}
export async function downloadFromGitHubRelease(
installMetadata: ExtensionInstallMetadata,
destination: string,
): Promise<GitHubDownloadResult> {
const { source, ref } = installMetadata;
const { owner, repo } = parseGitHubRepoForReleases(source);
const { source, ref, marketplace, type } = installMetadata;
const { owner, repo } = parseGitHubRepoForReleases(
type === 'marketplace' && marketplace
? marketplace.marketplaceSource
: source,
);
try {
const releaseData = await fetchReleaseFromGithub(owner, repo, ref);
@@ -276,28 +288,32 @@ export async function downloadFromGitHubRelease(
// For regular github releases, the repository is put inside of a top level
// directory. In this case we should see exactly two file in the destination
// dir, the archive and the directory. If we see that, validate that the
// dir has a qwen extension configuration file and then move all files
// from the directory up one level into the destination directory.
// dir has a qwen extension configuration file (or gemini-extension.json
// which will be converted later) and then move all files from the directory
// up one level into the destination directory.
const entries = await fs.promises.readdir(destination, {
withFileTypes: true,
});
if (entries.length === 2) {
const lonelyDir = entries.find((entry) => entry.isDirectory());
if (
lonelyDir &&
fs.existsSync(
if (lonelyDir) {
const hasQwenConfig = fs.existsSync(
path.join(destination, lonelyDir.name, EXTENSIONS_CONFIG_FILENAME),
)
) {
const dirPathToExtract = path.join(destination, lonelyDir.name);
const extractedDirFiles = await fs.promises.readdir(dirPathToExtract);
for (const file of extractedDirFiles) {
await fs.promises.rename(
path.join(dirPathToExtract, file),
path.join(destination, file),
);
);
const hasGeminiConfig = fs.existsSync(
path.join(destination, lonelyDir.name, 'gemini-extension.json'),
);
if (hasQwenConfig || hasGeminiConfig) {
const dirPathToExtract = path.join(destination, lonelyDir.name);
const extractedDirFiles = await fs.promises.readdir(dirPathToExtract);
for (const file of extractedDirFiles) {
await fs.promises.rename(
path.join(dirPathToExtract, file),
path.join(destination, file),
);
}
await fs.promises.rmdir(dirPathToExtract);
}
await fs.promises.rmdir(dirPathToExtract);
}
}
@@ -313,18 +329,6 @@ export async function downloadFromGitHubRelease(
}
}
interface GithubReleaseData {
assets: Asset[];
tag_name: string;
tarball_url?: string;
zipball_url?: string;
}
interface Asset {
name: string;
browser_download_url: string;
}
export function findReleaseAsset(assets: Asset[]): Asset | undefined {
const platform = os.platform();
const arch = os.arch();

View File

@@ -0,0 +1,3 @@
export * from './extensionManager.js';
export * from './variables.js';
export * from './github.js';

View File

@@ -0,0 +1,78 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import { parseMarketplaceSource } from './marketplace.js';
describe('Marketplace Installation', () => {
describe('parseMarketplaceSource', () => {
it('should parse valid marketplace source with http URL', () => {
const result = parseMarketplaceSource(
'http://example.com/marketplace:my-plugin',
);
expect(result).toEqual({
marketplaceSource: 'http://example.com/marketplace',
pluginName: 'my-plugin',
});
});
it('should parse valid marketplace source with https URL', () => {
const result = parseMarketplaceSource(
'https://github.com/example/marketplace:awesome-plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://github.com/example/marketplace',
pluginName: 'awesome-plugin',
});
});
it('should handle plugin names with hyphens', () => {
const result = parseMarketplaceSource(
'https://example.com:my-super-plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://example.com',
pluginName: 'my-super-plugin',
});
});
it('should handle URLs with ports', () => {
const result = parseMarketplaceSource(
'https://example.com:8080/marketplace:plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://example.com:8080/marketplace',
pluginName: 'plugin',
});
});
it('should return null for source without colon separator', () => {
const result = parseMarketplaceSource('https://example.com/plugin');
expect(result).toBeNull();
});
it('should return null for source without URL', () => {
const result = parseMarketplaceSource('not-a-url:plugin');
expect(result).toBeNull();
});
it('should return null for source with empty plugin name', () => {
const result = parseMarketplaceSource('https://example.com:');
expect(result).toBeNull();
});
it('should use last colon as separator', () => {
// URLs with ports have colons, should use the last one
const result = parseMarketplaceSource(
'https://example.com:8080:my-plugin',
);
expect(result).toEqual({
marketplaceSource: 'https://example.com:8080',
pluginName: 'my-plugin',
});
});
});
});

View File

@@ -0,0 +1,259 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* This module handles installation of extensions from Claude marketplaces.
*
* A marketplace URL format: marketplace-url:plugin-name
* Example: https://github.com/example/marketplace:my-plugin
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import type { ExtensionConfig } from './extensionManager.js';
import {
convertClaudeToQwenConfig,
mergeClaudeConfigs,
type ClaudeMarketplaceConfig,
type ClaudeMarketplacePluginConfig,
type ClaudePluginConfig,
} from './claude-converter.js';
import { cloneFromGit, downloadFromGitHubRelease } from './github.js';
import type { ExtensionInstallMetadata } from '../config/config.js';
export interface MarketplaceInstallOptions {
marketplaceUrl: string;
pluginName: string;
tempDir: string;
requestConsent: (consent: string) => Promise<boolean>;
}
export interface MarketplaceInstallResult {
config: ExtensionConfig;
sourcePath: string;
installMetadata: ExtensionInstallMetadata;
}
/**
* Parse marketplace install source string.
* Format: marketplace-url:plugin-name
*/
export function parseMarketplaceSource(source: string): {
marketplaceSource: string;
pluginName: string;
} | null {
// Check if source contains a colon separator
const lastColonIndex = source.lastIndexOf(':');
if (lastColonIndex === -1) {
return null;
}
// Split at the last colon to separate URL from plugin name
const marketplaceSource = source.substring(0, lastColonIndex);
const pluginName = source.substring(lastColonIndex + 1);
// Validate that marketplace URL looks like a URL
if (
!marketplaceSource.startsWith('http://') &&
!marketplaceSource.startsWith('https://')
) {
return null;
}
if (!pluginName || pluginName.length === 0) {
return null;
}
return { marketplaceSource, pluginName };
}
/**
* Install an extension from a Claude marketplace.
*
* Process:
* 1. Download marketplace repository
* 2. Parse marketplace.json
* 3. Find the specified plugin
* 4. Download/copy plugin source
* 5. Merge configurations (if strict mode)
* 6. Convert to Qwen format
*/
export async function installFromMarketplace(
options: MarketplaceInstallOptions,
): Promise<MarketplaceInstallResult> {
const {
marketplaceUrl,
pluginName,
tempDir,
requestConsent: _requestConsent,
} = options;
// Step 1: Download marketplace repository
const marketplaceDir = path.join(tempDir, 'marketplace');
await fs.promises.mkdir(marketplaceDir, { recursive: true });
console.log(`Downloading marketplace from ${marketplaceUrl}...`);
const installMetadata: ExtensionInstallMetadata = {
source: marketplaceUrl,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, marketplaceDir);
} catch {
await cloneFromGit(installMetadata, marketplaceDir);
}
// Step 2: Parse marketplace.json
const marketplaceConfigPath = path.join(marketplaceDir, 'marketplace.json');
if (!fs.existsSync(marketplaceConfigPath)) {
throw new Error(
`Marketplace configuration not found at ${marketplaceConfigPath}`,
);
}
const marketplaceConfigContent = await fs.promises.readFile(
marketplaceConfigPath,
'utf-8',
);
const marketplaceConfig: ClaudeMarketplaceConfig = JSON.parse(
marketplaceConfigContent,
);
// Step 3: Find the plugin
const pluginConfig = marketplaceConfig.plugins.find(
(p) => p.name.toLowerCase() === pluginName.toLowerCase(),
);
if (!pluginConfig) {
throw new Error(
`Plugin "${pluginName}" not found in marketplace. Available plugins: ${marketplaceConfig.plugins.map((p) => p.name).join(', ')}`,
);
}
// Step 4: Download/copy plugin source
const pluginDir = path.join(tempDir, 'plugin');
await fs.promises.mkdir(pluginDir, { recursive: true });
const pluginSource = await resolvePluginSource(
pluginConfig,
marketplaceDir,
pluginDir,
);
// Step 5: Merge configurations (if strict mode)
let finalPluginConfig: ClaudePluginConfig;
const strict = pluginConfig.strict ?? true;
if (strict) {
// Read plugin.json from plugin source
const pluginJsonPath = path.join(
pluginSource,
'.claude-plugin',
'plugin.json',
);
if (!fs.existsSync(pluginJsonPath)) {
throw new Error(
`Strict mode requires plugin.json at ${pluginJsonPath}, but file not found`,
);
}
const pluginJsonContent = await fs.promises.readFile(
pluginJsonPath,
'utf-8',
);
const basePluginConfig: ClaudePluginConfig = JSON.parse(pluginJsonContent);
// Merge marketplace config with plugin config
finalPluginConfig = mergeClaudeConfigs(pluginConfig, basePluginConfig);
} else {
// Use marketplace config directly
finalPluginConfig = pluginConfig;
}
// Step 6: Convert to Qwen format
const qwenConfig = convertClaudeToQwenConfig(finalPluginConfig);
return {
config: qwenConfig,
sourcePath: pluginSource,
installMetadata: {
source: `${marketplaceUrl}:${pluginName}`,
type: 'git', // Marketplace installs are treated as git installs
},
};
}
/**
* Resolve plugin source from marketplace plugin configuration.
* Returns the absolute path to the plugin source directory.
*/
async function resolvePluginSource(
pluginConfig: ClaudeMarketplacePluginConfig,
marketplaceDir: string,
pluginDir: string,
): Promise<string> {
const source = pluginConfig.source;
// Handle string source (relative path or URL)
if (typeof source === 'string') {
// Check if it's a URL
if (source.startsWith('http://') || source.startsWith('https://')) {
// Download from URL
const installMetadata: ExtensionInstallMetadata = {
source,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, pluginDir);
} catch {
await cloneFromGit(installMetadata, pluginDir);
}
return pluginDir;
}
// Relative path within marketplace
const pluginRoot = marketplaceDir;
const sourcePath = path.join(pluginRoot, source);
if (!fs.existsSync(sourcePath)) {
throw new Error(`Plugin source not found at ${sourcePath}`);
}
// Copy to plugin directory
await fs.promises.cp(sourcePath, pluginDir, { recursive: true });
return pluginDir;
}
// Handle object source (github or url)
if (source.source === 'github') {
const installMetadata: ExtensionInstallMetadata = {
source: `https://github.com/${source.repo}`,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, pluginDir);
} catch {
await cloneFromGit(installMetadata, pluginDir);
}
return pluginDir;
}
if (source.source === 'url') {
const installMetadata: ExtensionInstallMetadata = {
source: source.url,
type: 'git',
};
try {
await downloadFromGitHubRelease(installMetadata, pluginDir);
} catch {
await cloneFromGit(installMetadata, pluginDir);
}
return pluginDir;
}
throw new Error(`Unsupported plugin source type: ${JSON.stringify(source)}`);
}

Some files were not shown because too many files have changed in this diff Show More