mirror of
https://github.com/QwenLM/qwen-code.git
synced 2026-01-14 12:59:16 +00:00
Compare commits
8 Commits
fix/vscode
...
feat/exten
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b37ede07e8 | ||
|
|
0a88dd7861 | ||
|
|
70991e474f | ||
|
|
551e546974 | ||
|
|
74013bd8b2 | ||
|
|
18713ef2b0 | ||
|
|
50dac93c80 | ||
|
|
22504b0a5b |
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@@ -1,3 +0,0 @@
|
||||
* @tanzhenxin @DennisYu07 @gwinthis @LaZzyMan @pomelo-nwu @Mingholy
|
||||
# SDK TypeScript package changes require review from Mingholy
|
||||
packages/sdk-typescript/** @Mingholy
|
||||
17
.github/workflows/release-sdk.yml
vendored
17
.github/workflows/release-sdk.yml
vendored
@@ -241,7 +241,7 @@ jobs:
|
||||
${{ steps.vars.outputs.is_dry_run == 'false' && steps.vars.outputs.is_nightly == 'false' && steps.vars.outputs.is_preview == 'false' }}
|
||||
id: 'pr'
|
||||
env:
|
||||
GITHUB_TOKEN: '${{ secrets.CI_BOT_PAT }}'
|
||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
RELEASE_BRANCH: '${{ steps.release_branch.outputs.BRANCH_NAME }}'
|
||||
RELEASE_TAG: '${{ steps.version.outputs.RELEASE_TAG }}'
|
||||
run: |-
|
||||
@@ -258,15 +258,26 @@ jobs:
|
||||
|
||||
echo "PR_URL=${pr_url}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: 'Wait for CI checks to complete'
|
||||
if: |-
|
||||
${{ steps.vars.outputs.is_dry_run == 'false' && steps.vars.outputs.is_nightly == 'false' && steps.vars.outputs.is_preview == 'false' }}
|
||||
env:
|
||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
PR_URL: '${{ steps.pr.outputs.PR_URL }}'
|
||||
run: |-
|
||||
set -euo pipefail
|
||||
echo "Waiting for CI checks to complete..."
|
||||
gh pr checks "${PR_URL}" --watch --interval 30
|
||||
|
||||
- name: 'Enable auto-merge for release PR'
|
||||
if: |-
|
||||
${{ steps.vars.outputs.is_dry_run == 'false' && steps.vars.outputs.is_nightly == 'false' && steps.vars.outputs.is_preview == 'false' }}
|
||||
env:
|
||||
GITHUB_TOKEN: '${{ secrets.CI_BOT_PAT }}'
|
||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
PR_URL: '${{ steps.pr.outputs.PR_URL }}'
|
||||
run: |-
|
||||
set -euo pipefail
|
||||
gh pr merge "${PR_URL}" --merge --auto --delete-branch
|
||||
gh pr merge "${PR_URL}" --merge --auto
|
||||
|
||||
- name: 'Create Issue on Failure'
|
||||
if: |-
|
||||
|
||||
@@ -11,7 +11,6 @@ export default {
|
||||
type: 'separator',
|
||||
},
|
||||
'sdk-typescript': 'Typescript SDK',
|
||||
'sdk-java': 'Java SDK(alpha)',
|
||||
'Dive Into Qwen Code': {
|
||||
title: 'Dive Into Qwen Code',
|
||||
type: 'separator',
|
||||
|
||||
@@ -1,312 +0,0 @@
|
||||
# Qwen Code Java SDK
|
||||
|
||||
The Qwen Code Java SDK is a minimum experimental SDK for programmatic access to Qwen Code functionality. It provides a Java interface to interact with the Qwen Code CLI, allowing developers to integrate Qwen Code capabilities into their Java applications.
|
||||
|
||||
## Requirements
|
||||
|
||||
- Java >= 1.8
|
||||
- Maven >= 3.6.0 (for building from source)
|
||||
- qwen-code >= 0.5.0
|
||||
|
||||
### Dependencies
|
||||
|
||||
- **Logging**: ch.qos.logback:logback-classic
|
||||
- **Utilities**: org.apache.commons:commons-lang3
|
||||
- **JSON Processing**: com.alibaba.fastjson2:fastjson2
|
||||
- **Testing**: JUnit 5 (org.junit.jupiter:junit-jupiter)
|
||||
|
||||
## Installation
|
||||
|
||||
Add the following dependency to your Maven `pom.xml`:
|
||||
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>qwencode-sdk</artifactId>
|
||||
<version>{$version}</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
Or if using Gradle, add to your `build.gradle`:
|
||||
|
||||
```gradle
|
||||
implementation 'com.alibaba:qwencode-sdk:{$version}'
|
||||
```
|
||||
|
||||
## Building and Running
|
||||
|
||||
### Build Commands
|
||||
|
||||
```bash
|
||||
# Compile the project
|
||||
mvn compile
|
||||
|
||||
# Run tests
|
||||
mvn test
|
||||
|
||||
# Package the JAR
|
||||
mvn package
|
||||
|
||||
# Install to local repository
|
||||
mvn install
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
The simplest way to use the SDK is through the `QwenCodeCli.simpleQuery()` method:
|
||||
|
||||
```java
|
||||
public static void runSimpleExample() {
|
||||
List<String> result = QwenCodeCli.simpleQuery("hello world");
|
||||
result.forEach(logger::info);
|
||||
}
|
||||
```
|
||||
|
||||
For more advanced usage with custom transport options:
|
||||
|
||||
```java
|
||||
public static void runTransportOptionsExample() {
|
||||
TransportOptions options = new TransportOptions()
|
||||
.setModel("qwen3-coder-flash")
|
||||
.setPermissionMode(PermissionMode.AUTO_EDIT)
|
||||
.setCwd("./")
|
||||
.setEnv(new HashMap<String, String>() {{put("CUSTOM_VAR", "value");}})
|
||||
.setIncludePartialMessages(true)
|
||||
.setTurnTimeout(new Timeout(120L, TimeUnit.SECONDS))
|
||||
.setMessageTimeout(new Timeout(90L, TimeUnit.SECONDS))
|
||||
.setAllowedTools(Arrays.asList("read_file", "write_file", "list_directory"));
|
||||
|
||||
List<String> result = QwenCodeCli.simpleQuery("who are you, what are your capabilities?", options);
|
||||
result.forEach(logger::info);
|
||||
}
|
||||
```
|
||||
|
||||
For streaming content handling with custom content consumers:
|
||||
|
||||
```java
|
||||
public static void runStreamingExample() {
|
||||
QwenCodeCli.simpleQuery("who are you, what are your capabilities?",
|
||||
new TransportOptions().setMessageTimeout(new Timeout(10L, TimeUnit.SECONDS)), new AssistantContentSimpleConsumers() {
|
||||
|
||||
@Override
|
||||
public void onText(Session session, TextAssistantContent textAssistantContent) {
|
||||
logger.info("Text content received: {}", textAssistantContent.getText());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onThinking(Session session, ThingkingAssistantContent thingkingAssistantContent) {
|
||||
logger.info("Thinking content received: {}", thingkingAssistantContent.getThinking());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onToolUse(Session session, ToolUseAssistantContent toolUseContent) {
|
||||
logger.info("Tool use content received: {} with arguments: {}",
|
||||
toolUseContent, toolUseContent.getInput());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onToolResult(Session session, ToolResultAssistantContent toolResultContent) {
|
||||
logger.info("Tool result content received: {}", toolResultContent.getContent());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOtherContent(Session session, AssistantContent<?> other) {
|
||||
logger.info("Other content received: {}", other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onUsage(Session session, AssistantUsage assistantUsage) {
|
||||
logger.info("Usage information received: Input tokens: {}, Output tokens: {}",
|
||||
assistantUsage.getUsage().getInputTokens(), assistantUsage.getUsage().getOutputTokens());
|
||||
}
|
||||
}.setDefaultPermissionOperation(Operation.allow));
|
||||
logger.info("Streaming example completed.");
|
||||
}
|
||||
```
|
||||
|
||||
other examples see src/test/java/com/alibaba/qwen/code/cli/example
|
||||
|
||||
## Architecture
|
||||
|
||||
The SDK follows a layered architecture:
|
||||
|
||||
- **API Layer**: Provides the main entry points through `QwenCodeCli` class with simple static methods for basic usage
|
||||
- **Session Layer**: Manages communication sessions with the Qwen Code CLI through the `Session` class
|
||||
- **Transport Layer**: Handles the communication mechanism between the SDK and CLI process (currently using process transport via `ProcessTransport`)
|
||||
- **Protocol Layer**: Defines data structures for communication based on the CLI protocol
|
||||
- **Utils**: Common utilities for concurrent execution, timeout handling, and error management
|
||||
|
||||
## Key Features
|
||||
|
||||
### Permission Modes
|
||||
|
||||
The SDK supports different permission modes for controlling tool execution:
|
||||
|
||||
- **`default`**: Write tools are denied unless approved via `canUseTool` callback or in `allowedTools`. Read-only tools execute without confirmation.
|
||||
- **`plan`**: Blocks all write tools, instructing AI to present a plan first.
|
||||
- **`auto-edit`**: Auto-approve edit tools (edit, write_file) while other tools require confirmation.
|
||||
- **`yolo`**: All tools execute automatically without confirmation.
|
||||
|
||||
### Session Event Consumers and Assistant Content Consumers
|
||||
|
||||
The SDK provides two key interfaces for handling events and content from the CLI:
|
||||
|
||||
#### SessionEventConsumers Interface
|
||||
|
||||
The `SessionEventConsumers` interface provides callbacks for different types of messages during a session:
|
||||
|
||||
- `onSystemMessage`: Handles system messages from the CLI (receives Session and SDKSystemMessage)
|
||||
- `onResultMessage`: Handles result messages from the CLI (receives Session and SDKResultMessage)
|
||||
- `onAssistantMessage`: Handles assistant messages (AI responses) (receives Session and SDKAssistantMessage)
|
||||
- `onPartialAssistantMessage`: Handles partial assistant messages during streaming (receives Session and SDKPartialAssistantMessage)
|
||||
- `onUserMessage`: Handles user messages (receives Session and SDKUserMessage)
|
||||
- `onOtherMessage`: Handles other types of messages (receives Session and String message)
|
||||
- `onControlResponse`: Handles control responses (receives Session and CLIControlResponse)
|
||||
- `onControlRequest`: Handles control requests (receives Session and CLIControlRequest, returns CLIControlResponse)
|
||||
- `onPermissionRequest`: Handles permission requests (receives Session and CLIControlRequest<CLIControlPermissionRequest>, returns Behavior)
|
||||
|
||||
#### AssistantContentConsumers Interface
|
||||
|
||||
The `AssistantContentConsumers` interface handles different types of content within assistant messages:
|
||||
|
||||
- `onText`: Handles text content (receives Session and TextAssistantContent)
|
||||
- `onThinking`: Handles thinking content (receives Session and ThingkingAssistantContent)
|
||||
- `onToolUse`: Handles tool use content (receives Session and ToolUseAssistantContent)
|
||||
- `onToolResult`: Handles tool result content (receives Session and ToolResultAssistantContent)
|
||||
- `onOtherContent`: Handles other content types (receives Session and AssistantContent)
|
||||
- `onUsage`: Handles usage information (receives Session and AssistantUsage)
|
||||
- `onPermissionRequest`: Handles permission requests (receives Session and CLIControlPermissionRequest, returns Behavior)
|
||||
- `onOtherControlRequest`: Handles other control requests (receives Session and ControlRequestPayload, returns ControlResponsePayload)
|
||||
|
||||
#### Relationship Between the Interfaces
|
||||
|
||||
**Important Note on Event Hierarchy:**
|
||||
|
||||
- `SessionEventConsumers` is the **high-level** event processor that handles different message types (system, assistant, user, etc.)
|
||||
- `AssistantContentConsumers` is the **low-level** content processor that handles different types of content within assistant messages (text, tools, thinking, etc.)
|
||||
|
||||
**Processor Relationship:**
|
||||
|
||||
- `SessionEventConsumers` → `AssistantContentConsumers` (SessionEventConsumers uses AssistantContentConsumers to process content within assistant messages)
|
||||
|
||||
**Event Derivation Relationships:**
|
||||
|
||||
- `onAssistantMessage` → `onText`, `onThinking`, `onToolUse`, `onToolResult`, `onOtherContent`, `onUsage`
|
||||
- `onPartialAssistantMessage` → `onText`, `onThinking`, `onToolUse`, `onToolResult`, `onOtherContent`
|
||||
- `onControlRequest` → `onPermissionRequest`, `onOtherControlRequest`
|
||||
|
||||
**Event Timeout Relationships:**
|
||||
|
||||
Each event handler method has a corresponding timeout method that allows customizing the timeout behavior for that specific event:
|
||||
|
||||
- `onSystemMessage` ↔ `onSystemMessageTimeout`
|
||||
- `onResultMessage` ↔ `onResultMessageTimeout`
|
||||
- `onAssistantMessage` ↔ `onAssistantMessageTimeout`
|
||||
- `onPartialAssistantMessage` ↔ `onPartialAssistantMessageTimeout`
|
||||
- `onUserMessage` ↔ `onUserMessageTimeout`
|
||||
- `onOtherMessage` ↔ `onOtherMessageTimeout`
|
||||
- `onControlResponse` ↔ `onControlResponseTimeout`
|
||||
- `onControlRequest` ↔ `onControlRequestTimeout`
|
||||
|
||||
For AssistantContentConsumers timeout methods:
|
||||
|
||||
- `onText` ↔ `onTextTimeout`
|
||||
- `onThinking` ↔ `onThinkingTimeout`
|
||||
- `onToolUse` ↔ `onToolUseTimeout`
|
||||
- `onToolResult` ↔ `onToolResultTimeout`
|
||||
- `onOtherContent` ↔ `onOtherContentTimeout`
|
||||
- `onPermissionRequest` ↔ `onPermissionRequestTimeout`
|
||||
- `onOtherControlRequest` ↔ `onOtherControlRequestTimeout`
|
||||
|
||||
**Default Timeout Values:**
|
||||
|
||||
- `SessionEventSimpleConsumers` default timeout: 180 seconds (Timeout.TIMEOUT_180_SECONDS)
|
||||
- `AssistantContentSimpleConsumers` default timeout: 60 seconds (Timeout.TIMEOUT_60_SECONDS)
|
||||
|
||||
**Timeout Hierarchy Requirements:**
|
||||
|
||||
For proper operation, the following timeout relationships should be maintained:
|
||||
|
||||
- `onAssistantMessageTimeout` return value should be greater than `onTextTimeout`, `onThinkingTimeout`, `onToolUseTimeout`, `onToolResultTimeout`, and `onOtherContentTimeout` return values
|
||||
- `onControlRequestTimeout` return value should be greater than `onPermissionRequestTimeout` and `onOtherControlRequestTimeout` return values
|
||||
|
||||
### Transport Options
|
||||
|
||||
The `TransportOptions` class allows configuration of how the SDK communicates with the Qwen Code CLI:
|
||||
|
||||
- `pathToQwenExecutable`: Path to the Qwen Code CLI executable
|
||||
- `cwd`: Working directory for the CLI process
|
||||
- `model`: AI model to use for the session
|
||||
- `permissionMode`: Permission mode that controls tool execution
|
||||
- `env`: Environment variables to pass to the CLI process
|
||||
- `maxSessionTurns`: Limits the number of conversation turns in a session
|
||||
- `coreTools`: List of core tools that should be available to the AI
|
||||
- `excludeTools`: List of tools to exclude from being available to the AI
|
||||
- `allowedTools`: List of tools that are pre-approved for use without additional confirmation
|
||||
- `authType`: Authentication type to use for the session
|
||||
- `includePartialMessages`: Enables receiving partial messages during streaming responses
|
||||
- `skillsEnable`: Enables or disables skills functionality for the session
|
||||
- `turnTimeout`: Timeout for a complete turn of conversation
|
||||
- `messageTimeout`: Timeout for individual messages within a turn
|
||||
- `resumeSessionId`: ID of a previous session to resume
|
||||
- `otherOptions`: Additional command-line options to pass to the CLI
|
||||
|
||||
### Session Control Features
|
||||
|
||||
- **Session creation**: Use `QwenCodeCli.newSession()` to create a new session with custom options
|
||||
- **Session management**: The `Session` class provides methods to send prompts, handle responses, and manage session state
|
||||
- **Session cleanup**: Always close sessions using `session.close()` to properly terminate the CLI process
|
||||
- **Session resumption**: Use `setResumeSessionId()` in `TransportOptions` to resume a previous session
|
||||
- **Session interruption**: Use `session.interrupt()` to interrupt a currently running prompt
|
||||
- **Dynamic model switching**: Use `session.setModel()` to change the model during a session
|
||||
- **Dynamic permission mode switching**: Use `session.setPermissionMode()` to change the permission mode during a session
|
||||
|
||||
### Thread Pool Configuration
|
||||
|
||||
The SDK uses a thread pool for managing concurrent operations with the following default configuration:
|
||||
|
||||
- **Core Pool Size**: 30 threads
|
||||
- **Maximum Pool Size**: 100 threads
|
||||
- **Keep-Alive Time**: 60 seconds
|
||||
- **Queue Capacity**: 300 tasks (using LinkedBlockingQueue)
|
||||
- **Thread Naming**: "qwen_code_cli-pool-{number}"
|
||||
- **Daemon Threads**: false
|
||||
- **Rejected Execution Handler**: CallerRunsPolicy
|
||||
|
||||
## Error Handling
|
||||
|
||||
The SDK provides specific exception types for different error scenarios:
|
||||
|
||||
- `SessionControlException`: Thrown when there's an issue with session control (creation, initialization, etc.)
|
||||
- `SessionSendPromptException`: Thrown when there's an issue sending a prompt or receiving a response
|
||||
- `SessionClosedException`: Thrown when attempting to use a closed session
|
||||
|
||||
## FAQ / Troubleshooting
|
||||
|
||||
### Q: Do I need to install the Qwen CLI separately?
|
||||
|
||||
A: yes, requires Qwen CLI 0.5.5 or higher.
|
||||
|
||||
### Q: What Java versions are supported?
|
||||
|
||||
A: The SDK requires Java 1.8 or higher.
|
||||
|
||||
### Q: How do I handle long-running requests?
|
||||
|
||||
A: The SDK includes timeout utilities. You can configure timeouts using the `Timeout` class in `TransportOptions`.
|
||||
|
||||
### Q: Why are some tools not executing?
|
||||
|
||||
A: This is likely due to permission modes. Check your permission mode settings and consider using `allowedTools` to pre-approve certain tools.
|
||||
|
||||
### Q: How do I resume a previous session?
|
||||
|
||||
A: Use the `setResumeSessionId()` method in `TransportOptions` to resume a previous session.
|
||||
|
||||
### Q: Can I customize the environment for the CLI process?
|
||||
|
||||
A: Yes, use the `setEnv()` method in `TransportOptions` to pass environment variables to the CLI process.
|
||||
|
||||
## License
|
||||
|
||||
Apache-2.0 - see [LICENSE](./LICENSE) for details.
|
||||
@@ -136,95 +136,6 @@ Settings are organized into categories. All settings should be placed within the
|
||||
- `"./custom-logs"` - Logs to `./custom-logs` relative to current directory
|
||||
- `"/tmp/openai-logs"` - Logs to absolute path `/tmp/openai-logs`
|
||||
|
||||
#### modelProviders
|
||||
|
||||
Use `modelProviders` to declare curated model lists per auth type that the `/model` picker can switch between. Keys must be valid auth types (`openai`, `anthropic`, `gemini`, `vertex-ai`, etc.). Each entry requires an `id` and **must include `envKey`**, with optional `name`, `description`, `baseUrl`, and `generationConfig`. Credentials are never persisted in settings; the runtime reads them from `process.env[envKey]`. Qwen OAuth models remain hard-coded and cannot be overridden.
|
||||
|
||||
##### Example
|
||||
|
||||
```json
|
||||
{
|
||||
"modelProviders": {
|
||||
"openai": [
|
||||
{
|
||||
"id": "gpt-4o",
|
||||
"name": "GPT-4o",
|
||||
"envKey": "OPENAI_API_KEY",
|
||||
"baseUrl": "https://api.openai.com/v1",
|
||||
"generationConfig": {
|
||||
"timeout": 60000,
|
||||
"maxRetries": 3,
|
||||
"samplingParams": { "temperature": 0.2 }
|
||||
}
|
||||
}
|
||||
],
|
||||
"anthropic": [
|
||||
{
|
||||
"id": "claude-3-5-sonnet",
|
||||
"envKey": "ANTHROPIC_API_KEY",
|
||||
"baseUrl": "https://api.anthropic.com/v1"
|
||||
}
|
||||
],
|
||||
"gemini": [
|
||||
{
|
||||
"id": "gemini-2.0-flash",
|
||||
"name": "Gemini 2.0 Flash",
|
||||
"envKey": "GEMINI_API_KEY",
|
||||
"baseUrl": "https://generativelanguage.googleapis.com"
|
||||
}
|
||||
],
|
||||
"vertex-ai": [
|
||||
{
|
||||
"id": "gemini-1.5-pro-vertex",
|
||||
"envKey": "GOOGLE_API_KEY",
|
||||
"baseUrl": "https://generativelanguage.googleapis.com"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
> [!note]
|
||||
> Only the `/model` command exposes non-default auth types. Anthropic, Gemini, Vertex AI, etc., must be defined via `modelProviders`. The `/auth` command intentionally lists only the built-in Qwen OAuth and OpenAI flows.
|
||||
|
||||
##### Resolution layers and atomicity
|
||||
|
||||
The effective auth/model/credential values are chosen per field using the following precedence (first present wins). You can combine `--auth-type` with `--model` to point directly at a provider entry; these CLI flags run before other layers.
|
||||
|
||||
| Layer (highest → lowest) | authType | model | apiKey | baseUrl | apiKeyEnvKey | proxy |
|
||||
| -------------------------- | ----------------------------------- | ----------------------------------------------- | --------------------------------------------------- | ---------------------------------------------------- | ---------------------- | --------------------------------- |
|
||||
| Programmatic overrides | `/auth ` | `/auth` input | `/auth` input | `/auth` input | — | — |
|
||||
| Model provider selection | — | `modelProvider.id` | `env[modelProvider.envKey]` | `modelProvider.baseUrl` | `modelProvider.envKey` | — |
|
||||
| CLI arguments | `--auth-type` | `--model` | `--openaiApiKey` (or provider-specific equivalents) | `--openaiBaseUrl` (or provider-specific equivalents) | — | — |
|
||||
| Environment variables | — | Provider-specific mapping (e.g. `OPENAI_MODEL`) | Provider-specific mapping (e.g. `OPENAI_API_KEY`) | Provider-specific mapping (e.g. `OPENAI_BASE_URL`) | — | — |
|
||||
| Settings (`settings.json`) | `security.auth.selectedType` | `model.name` | `security.auth.apiKey` | `security.auth.baseUrl` | — | — |
|
||||
| Default / computed | Falls back to `AuthType.QWEN_OAUTH` | Built-in default (OpenAI ⇒ `qwen3-coder-plus`) | — | — | — | `Config.getProxy()` if configured |
|
||||
|
||||
\*When present, CLI auth flags override settings. Otherwise, `security.auth.selectedType` or the implicit default determine the auth type. Qwen OAuth and OpenAI are the only auth types surfaced without extra configuration.
|
||||
|
||||
Model-provider sourced values are applied atomically: once a provider model is active, every field it defines is protected from lower layers until you manually clear credentials via `/auth`. The final `generationConfig` is the projection across all layers—lower layers only fill gaps left by higher ones, and the provider layer remains impenetrable.
|
||||
|
||||
The merge strategy for `modelProviders` is REPLACE: the entire `modelProviders` from project settings will override the corresponding section in user settings, rather than merging the two.
|
||||
|
||||
##### Generation config layering
|
||||
|
||||
Per-field precedence for `generationConfig`:
|
||||
|
||||
1. Programmatic overrides (e.g. runtime `/model`, `/auth` changes)
|
||||
2. `modelProviders[authType][].generationConfig`
|
||||
3. `settings.model.generationConfig`
|
||||
4. Content-generator defaults (`getDefaultGenerationConfig` for OpenAI, `getParameterValue` for Gemini, etc.)
|
||||
|
||||
`samplingParams` is treated atomically; provider values replace the entire object. Defaults from the content generator apply last so each provider retains its tuned baseline.
|
||||
|
||||
##### Selection persistence and recommendations
|
||||
|
||||
> [!important]
|
||||
> Define `modelProviders` in the user-scope `~/.qwen/settings.json` whenever possible and avoid persisting credential overrides in any scope. Keeping the provider catalog in user settings prevents merge/override conflicts between project and user scopes and ensures `/auth` and `/model` updates always write back to a consistent scope.
|
||||
|
||||
- `/model` and `/auth` persist `model.name` (where applicable) and `security.auth.selectedType` to the closest writable scope that already defines `modelProviders`; otherwise they fall back to the user scope. This keeps workspace/user files in sync with the active provider catalog.
|
||||
- Without `modelProviders`, the resolver mixes CLI/env/settings layers, which is fine for single-provider setups but cumbersome when frequently switching. Define provider catalogs whenever multi-model workflows are common so that switches stay atomic, source-attributed, and debuggable.
|
||||
|
||||
#### context
|
||||
|
||||
| Setting | Type | Description | Default |
|
||||
@@ -470,7 +381,7 @@ Arguments passed directly when running the CLI can override other configurations
|
||||
| `--telemetry-otlp-protocol` | | Sets the OTLP protocol for telemetry (`grpc` or `http`). | | Defaults to `grpc`. See [telemetry](../../developers/development/telemetry) for more information. |
|
||||
| `--telemetry-log-prompts` | | Enables logging of prompts for telemetry. | | See [telemetry](../../developers/development/telemetry) for more information. |
|
||||
| `--checkpointing` | | Enables [checkpointing](../features/checkpointing). | | |
|
||||
| `--acp` | | Enables ACP mode (Agent Control Protocol). Useful for IDE/editor integrations like [Zed](../integration-zed). | | Stable. Replaces the deprecated `--experimental-acp` flag. |
|
||||
| `--experimental-acp` | | Enables ACP mode (Agent Control Protocol). Useful for IDE/editor integrations like [Zed](../integration-zed). | | Experimental. |
|
||||
| `--experimental-skills` | | Enables experimental [Agent Skills](../features/skills) (registers the `skill` tool and loads Skills from `.qwen/skills/` and `~/.qwen/skills/`). | | Experimental. |
|
||||
| `--extensions` | `-e` | Specifies a list of extensions to use for the session. | Extension names | If not provided, all available extensions are used. Use the special term `qwen -e none` to disable all extensions. Example: `qwen -e my-extension -e my-other-extension` |
|
||||
| `--list-extensions` | `-l` | Lists all available extensions and exits. | | |
|
||||
|
||||
@@ -49,8 +49,6 @@ Cross-platform sandboxing with complete process isolation.
|
||||
|
||||
By default, Qwen Code uses a published sandbox image (configured in the CLI package) and will pull it as needed.
|
||||
|
||||
The container sandbox mounts your workspace and your `~/.qwen` directory into the container so auth and settings persist between runs.
|
||||
|
||||
**Best for**: Strong isolation on any OS, consistent tooling inside a known image.
|
||||
|
||||
### Choosing a method
|
||||
@@ -159,7 +157,7 @@ For a working allowlist-style proxy example, see: [Example Proxy Script](/develo
|
||||
|
||||
## Linux UID/GID handling
|
||||
|
||||
On Linux, Qwen Code defaults to enabling UID/GID mapping so the sandbox runs as your user (and reuses the mounted `~/.qwen`). Override with:
|
||||
The sandbox automatically handles user permissions on Linux. Override these permissions with:
|
||||
|
||||
```bash
|
||||
export SANDBOX_SET_UID_GID=true # Force host UID/GID
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
"Qwen Code": {
|
||||
"type": "custom",
|
||||
"command": "qwen",
|
||||
"args": ["--acp"],
|
||||
"args": ["--experimental-acp"],
|
||||
"env": {}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
# Qwen Code overview
|
||||
|
||||
[](https://npm-compare.com/@qwen-code/qwen-code)
|
||||
[](https://npm-compare.com/@qwen-code/qwen-code)
|
||||
[](https://www.npmjs.com/package/@qwen-code/qwen-code)
|
||||
|
||||
> Learn about Qwen Code, Qwen's agentic coding tool that lives in your terminal and helps you turn ideas into code faster than ever before.
|
||||
|
||||
@@ -9,18 +9,11 @@ This guide provides solutions to common issues and debugging tips, including top
|
||||
|
||||
## Authentication or login errors
|
||||
|
||||
- **Error: `UNABLE_TO_GET_ISSUER_CERT_LOCALLY`, `UNABLE_TO_VERIFY_LEAF_SIGNATURE`, or `unable to get local issuer certificate`**
|
||||
- **Error: `UNABLE_TO_GET_ISSUER_CERT_LOCALLY` or `unable to get local issuer certificate`**
|
||||
- **Cause:** You may be on a corporate network with a firewall that intercepts and inspects SSL/TLS traffic. This often requires a custom root CA certificate to be trusted by Node.js.
|
||||
- **Solution:** Set the `NODE_EXTRA_CA_CERTS` environment variable to the absolute path of your corporate root CA certificate file.
|
||||
- Example: `export NODE_EXTRA_CA_CERTS=/path/to/your/corporate-ca.crt`
|
||||
|
||||
- **Error: `Device authorization flow failed: fetch failed`**
|
||||
- **Cause:** Node.js could not reach Qwen OAuth endpoints (often a proxy or SSL/TLS trust issue). When available, Qwen Code will also print the underlying error cause (for example: `UNABLE_TO_VERIFY_LEAF_SIGNATURE`).
|
||||
- **Solution:**
|
||||
- Confirm you can access `https://chat.qwen.ai` from the same machine/network.
|
||||
- If you are behind a proxy, set it via `qwen --proxy <url>` (or the `proxy` setting in `settings.json`).
|
||||
- If your network uses a corporate TLS inspection CA, set `NODE_EXTRA_CA_CERTS` as described above.
|
||||
|
||||
- **Issue: Unable to display UI after authentication failure**
|
||||
- **Cause:** If authentication fails after selecting an authentication type, the `security.auth.selectedType` setting may be persisted in `settings.json`. On restart, the CLI may get stuck trying to authenticate with the failed auth type and fail to display the UI.
|
||||
- **Solution:** Clear the `security.auth.selectedType` configuration item in your `settings.json` file:
|
||||
|
||||
@@ -80,11 +80,10 @@ type PermissionHandler = (
|
||||
|
||||
/**
|
||||
* Sets up an ACP test environment with all necessary utilities.
|
||||
* @param useNewFlag - If true, uses --acp; if false, uses --experimental-acp (for backward compatibility testing)
|
||||
*/
|
||||
function setupAcpTest(
|
||||
rig: TestRig,
|
||||
options?: { permissionHandler?: PermissionHandler; useNewFlag?: boolean },
|
||||
options?: { permissionHandler?: PermissionHandler },
|
||||
) {
|
||||
const pending = new Map<number, PendingRequest>();
|
||||
let nextRequestId = 1;
|
||||
@@ -96,13 +95,9 @@ function setupAcpTest(
|
||||
const permissionHandler =
|
||||
options?.permissionHandler ?? (() => ({ optionId: 'proceed_once' }));
|
||||
|
||||
// Use --acp by default, but allow testing with --experimental-acp for backward compatibility
|
||||
const acpFlag =
|
||||
options?.useNewFlag !== false ? '--acp' : '--experimental-acp';
|
||||
|
||||
const agent = spawn(
|
||||
'node',
|
||||
[rig.bundlePath, acpFlag, '--no-chat-recording'],
|
||||
[rig.bundlePath, '--experimental-acp', '--no-chat-recording'],
|
||||
{
|
||||
cwd: rig.testDir!,
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
@@ -626,99 +621,3 @@ function setupAcpTest(
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
(IS_SANDBOX ? describe.skip : describe)(
|
||||
'acp flag backward compatibility',
|
||||
() => {
|
||||
it('should work with deprecated --experimental-acp flag and show warning', async () => {
|
||||
const rig = new TestRig();
|
||||
rig.setup('acp backward compatibility');
|
||||
|
||||
const { sendRequest, cleanup, stderr } = setupAcpTest(rig, {
|
||||
useNewFlag: false,
|
||||
});
|
||||
|
||||
try {
|
||||
const initResult = await sendRequest('initialize', {
|
||||
protocolVersion: 1,
|
||||
clientCapabilities: {
|
||||
fs: { readTextFile: true, writeTextFile: true },
|
||||
},
|
||||
});
|
||||
expect(initResult).toBeDefined();
|
||||
|
||||
// Verify deprecation warning is shown
|
||||
const stderrOutput = stderr.join('');
|
||||
expect(stderrOutput).toContain('--experimental-acp is deprecated');
|
||||
expect(stderrOutput).toContain('Please use --acp instead');
|
||||
|
||||
await sendRequest('authenticate', { methodId: 'openai' });
|
||||
|
||||
const newSession = (await sendRequest('session/new', {
|
||||
cwd: rig.testDir!,
|
||||
mcpServers: [],
|
||||
})) as { sessionId: string };
|
||||
expect(newSession.sessionId).toBeTruthy();
|
||||
|
||||
// Verify functionality still works
|
||||
const promptResult = await sendRequest('session/prompt', {
|
||||
sessionId: newSession.sessionId,
|
||||
prompt: [{ type: 'text', text: 'Say hello.' }],
|
||||
});
|
||||
expect(promptResult).toBeDefined();
|
||||
} catch (e) {
|
||||
if (stderr.length) {
|
||||
console.error('Agent stderr:', stderr.join(''));
|
||||
}
|
||||
throw e;
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('should work with new --acp flag without warnings', async () => {
|
||||
const rig = new TestRig();
|
||||
rig.setup('acp new flag');
|
||||
|
||||
const { sendRequest, cleanup, stderr } = setupAcpTest(rig, {
|
||||
useNewFlag: true,
|
||||
});
|
||||
|
||||
try {
|
||||
const initResult = await sendRequest('initialize', {
|
||||
protocolVersion: 1,
|
||||
clientCapabilities: {
|
||||
fs: { readTextFile: true, writeTextFile: true },
|
||||
},
|
||||
});
|
||||
expect(initResult).toBeDefined();
|
||||
|
||||
// Verify no deprecation warning is shown
|
||||
const stderrOutput = stderr.join('');
|
||||
expect(stderrOutput).not.toContain('--experimental-acp is deprecated');
|
||||
|
||||
await sendRequest('authenticate', { methodId: 'openai' });
|
||||
|
||||
const newSession = (await sendRequest('session/new', {
|
||||
cwd: rig.testDir!,
|
||||
mcpServers: [],
|
||||
})) as { sessionId: string };
|
||||
expect(newSession.sessionId).toBeTruthy();
|
||||
|
||||
// Verify functionality works
|
||||
const promptResult = await sendRequest('session/prompt', {
|
||||
sessionId: newSession.sessionId,
|
||||
prompt: [{ type: 'text', text: 'Say hello.' }],
|
||||
});
|
||||
expect(promptResult).toBeDefined();
|
||||
} catch (e) {
|
||||
if (stderr.length) {
|
||||
console.error('Agent stderr:', stderr.join(''));
|
||||
}
|
||||
throw e;
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
55
package-lock.json
generated
55
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@qwen-code/qwen-code",
|
||||
"version": "0.7.0",
|
||||
"version": "0.6.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@qwen-code/qwen-code",
|
||||
"version": "0.7.0",
|
||||
"version": "0.6.0",
|
||||
"workspaces": [
|
||||
"packages/*"
|
||||
],
|
||||
@@ -3875,6 +3875,17 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/prompts": {
|
||||
"version": "2.4.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/prompts/-/prompts-2.4.9.tgz",
|
||||
"integrity": "sha512-qTxFi6Buiu8+50/+3DGIWLHM6QuWsEKugJnnP6iv2Mc4ncxE4A/OJkjuVOA+5X0X1S/nq5VJRa8Lu+nwcvbrKA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"kleur": "^3.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/prop-types": {
|
||||
"version": "15.7.15",
|
||||
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz",
|
||||
@@ -10984,6 +10995,15 @@
|
||||
"json-buffer": "3.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/kleur": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz",
|
||||
"integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/ky": {
|
||||
"version": "1.8.1",
|
||||
"resolved": "https://registry.npmjs.org/ky/-/ky-1.8.1.tgz",
|
||||
@@ -13393,6 +13413,19 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/prompts": {
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz",
|
||||
"integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"kleur": "^3.0.3",
|
||||
"sisteransi": "^1.0.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/prop-types": {
|
||||
"version": "15.8.1",
|
||||
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz",
|
||||
@@ -14753,6 +14786,12 @@
|
||||
"url": "https://github.com/steveukx/git-js?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/sisteransi": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
|
||||
"integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/slash": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz",
|
||||
@@ -17316,7 +17355,7 @@
|
||||
},
|
||||
"packages/cli": {
|
||||
"name": "@qwen-code/qwen-code",
|
||||
"version": "0.7.0",
|
||||
"version": "0.6.0",
|
||||
"dependencies": {
|
||||
"@google/genai": "1.30.0",
|
||||
"@iarna/toml": "^2.2.5",
|
||||
@@ -17338,6 +17377,7 @@
|
||||
"ink-spinner": "^5.0.0",
|
||||
"lowlight": "^3.3.0",
|
||||
"open": "^10.1.2",
|
||||
"prompts": "^2.4.2",
|
||||
"qrcode-terminal": "^0.12.0",
|
||||
"react": "^19.1.0",
|
||||
"read-package-up": "^11.0.0",
|
||||
@@ -17366,6 +17406,7 @@
|
||||
"@types/diff": "^7.0.2",
|
||||
"@types/dotenv": "^6.1.1",
|
||||
"@types/node": "^20.11.24",
|
||||
"@types/prompts": "^2.4.9",
|
||||
"@types/react": "^19.1.8",
|
||||
"@types/react-dom": "^19.1.6",
|
||||
"@types/semver": "^7.7.0",
|
||||
@@ -17953,7 +17994,7 @@
|
||||
},
|
||||
"packages/core": {
|
||||
"name": "@qwen-code/qwen-code-core",
|
||||
"version": "0.7.0",
|
||||
"version": "0.6.0",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.36.1",
|
||||
@@ -18593,7 +18634,7 @@
|
||||
},
|
||||
"packages/sdk-typescript": {
|
||||
"name": "@qwen-code/sdk",
|
||||
"version": "0.1.2",
|
||||
"version": "0.1.0",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.25.1",
|
||||
@@ -21413,7 +21454,7 @@
|
||||
},
|
||||
"packages/test-utils": {
|
||||
"name": "@qwen-code/qwen-code-test-utils",
|
||||
"version": "0.7.0",
|
||||
"version": "0.6.0",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"devDependencies": {
|
||||
@@ -21425,7 +21466,7 @@
|
||||
},
|
||||
"packages/vscode-ide-companion": {
|
||||
"name": "qwen-code-vscode-ide-companion",
|
||||
"version": "0.7.0",
|
||||
"version": "0.6.0",
|
||||
"license": "LICENSE",
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.25.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@qwen-code/qwen-code",
|
||||
"version": "0.7.0",
|
||||
"version": "0.6.0",
|
||||
"engines": {
|
||||
"node": ">=20.0.0"
|
||||
},
|
||||
@@ -13,7 +13,7 @@
|
||||
"url": "git+https://github.com/QwenLM/qwen-code.git"
|
||||
},
|
||||
"config": {
|
||||
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.7.0"
|
||||
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.6.0"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "cross-env node scripts/start.js",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@qwen-code/qwen-code",
|
||||
"version": "0.7.0",
|
||||
"version": "0.6.0",
|
||||
"description": "Qwen Code",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -33,7 +33,7 @@
|
||||
"dist"
|
||||
],
|
||||
"config": {
|
||||
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.7.0"
|
||||
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.6.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@google/genai": "1.30.0",
|
||||
@@ -46,6 +46,7 @@
|
||||
"comment-json": "^4.2.5",
|
||||
"diff": "^7.0.0",
|
||||
"dotenv": "^17.1.0",
|
||||
"prompts": "^2.4.2",
|
||||
"fzf": "^0.5.2",
|
||||
"glob": "^10.5.0",
|
||||
"highlight.js": "^11.11.1",
|
||||
@@ -79,6 +80,7 @@
|
||||
"@types/command-exists": "^1.2.3",
|
||||
"@types/diff": "^7.0.2",
|
||||
"@types/dotenv": "^6.1.1",
|
||||
"@types/prompts": "^2.4.9",
|
||||
"@types/node": "^20.11.24",
|
||||
"@types/react": "^19.1.8",
|
||||
"@types/react-dom": "^19.1.6",
|
||||
|
||||
@@ -27,10 +27,8 @@ import { Readable, Writable } from 'node:stream';
|
||||
import type { LoadedSettings } from '../config/settings.js';
|
||||
import { SettingScope } from '../config/settings.js';
|
||||
import { z } from 'zod';
|
||||
import { ExtensionStorage, type Extension } from '../config/extension.js';
|
||||
import type { CliArgs } from '../config/config.js';
|
||||
import { loadCliConfig } from '../config/config.js';
|
||||
import { ExtensionEnablementManager } from '../config/extensions/extensionEnablement.js';
|
||||
|
||||
// Import the modular Session class
|
||||
import { Session } from './session/Session.js';
|
||||
@@ -38,7 +36,6 @@ import { Session } from './session/Session.js';
|
||||
export async function runAcpAgent(
|
||||
config: Config,
|
||||
settings: LoadedSettings,
|
||||
extensions: Extension[],
|
||||
argv: CliArgs,
|
||||
) {
|
||||
const stdout = Writable.toWeb(process.stdout) as WritableStream;
|
||||
@@ -51,8 +48,7 @@ export async function runAcpAgent(
|
||||
console.debug = console.error;
|
||||
|
||||
new acp.AgentSideConnection(
|
||||
(client: acp.Client) =>
|
||||
new GeminiAgent(config, settings, extensions, argv, client),
|
||||
(client: acp.Client) => new GeminiAgent(config, settings, argv, client),
|
||||
stdout,
|
||||
stdin,
|
||||
);
|
||||
@@ -65,7 +61,6 @@ class GeminiAgent {
|
||||
constructor(
|
||||
private config: Config,
|
||||
private settings: LoadedSettings,
|
||||
private extensions: Extension[],
|
||||
private argv: CliArgs,
|
||||
private client: acp.Client,
|
||||
) {}
|
||||
@@ -215,16 +210,7 @@ class GeminiAgent {
|
||||
continue: false,
|
||||
};
|
||||
|
||||
const config = await loadCliConfig(
|
||||
settings,
|
||||
this.extensions,
|
||||
new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
this.argv.extensions,
|
||||
),
|
||||
argvForSession,
|
||||
cwd,
|
||||
);
|
||||
const config = await loadCliConfig(settings, argvForSession, cwd);
|
||||
|
||||
await config.initialize();
|
||||
return config;
|
||||
|
||||
87
packages/cli/src/commands/extensions/consent.ts
Normal file
87
packages/cli/src/commands/extensions/consent.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import type { ConfirmationRequest } from '../../ui/types.js';
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, by reading a Y/n
|
||||
* character from stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentNonInteractive(
|
||||
consentDescription: string,
|
||||
): Promise<boolean> {
|
||||
console.info(consentDescription);
|
||||
const result = await promptForConsentNonInteractive(
|
||||
'Do you want to continue? [Y/n]: ',
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, in interactive mode.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will not work.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @param addExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentInteractive(
|
||||
consentDescription: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return promptForConsentInteractive(
|
||||
consentDescription + '\n\nDo you want to continue?',
|
||||
addExtensionUpdateConfirmationRequest,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users a prompt and awaits for a y/n response on stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A yes/no prompt to ask the user
|
||||
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
|
||||
*/
|
||||
async function promptForConsentNonInteractive(
|
||||
prompt: string,
|
||||
): Promise<boolean> {
|
||||
const readline = await import('node:readline');
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
rl.question(prompt, (answer) => {
|
||||
rl.close();
|
||||
resolve(['y', ''].includes(answer.trim().toLowerCase()));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users an interactive yes/no prompt.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A markdown prompt to ask the user
|
||||
* @param addExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
|
||||
* @returns Whether or not the user answers yes.
|
||||
*/
|
||||
async function promptForConsentInteractive(
|
||||
prompt: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return new Promise<boolean>((resolve) => {
|
||||
addExtensionUpdateConfirmationRequest({
|
||||
prompt,
|
||||
onConfirm: (resolvedConfirmed) => {
|
||||
resolve(resolvedConfirmed);
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -5,21 +5,22 @@
|
||||
*/
|
||||
|
||||
import { type CommandModule } from 'yargs';
|
||||
import { disableExtension } from '../../config/extension.js';
|
||||
import { SettingScope } from '../../config/settings.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface DisableArgs {
|
||||
name: string;
|
||||
scope?: string;
|
||||
}
|
||||
|
||||
export function handleDisable(args: DisableArgs) {
|
||||
export async function handleDisable(args: DisableArgs) {
|
||||
const extensionManager = await getExtensionManager();
|
||||
try {
|
||||
if (args.scope?.toLowerCase() === 'workspace') {
|
||||
disableExtension(args.name, SettingScope.Workspace);
|
||||
extensionManager.disableExtension(args.name, SettingScope.Workspace);
|
||||
} else {
|
||||
disableExtension(args.name, SettingScope.User);
|
||||
extensionManager.disableExtension(args.name, SettingScope.User);
|
||||
}
|
||||
console.log(
|
||||
`Extension "${args.name}" successfully disabled for scope "${args.scope}".`,
|
||||
@@ -61,8 +62,8 @@ export const disableCommand: CommandModule = {
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
handler: (argv) => {
|
||||
handleDisable({
|
||||
handler: async (argv) => {
|
||||
await handleDisable({
|
||||
name: argv['name'] as string,
|
||||
scope: argv['scope'] as string,
|
||||
});
|
||||
|
||||
@@ -6,20 +6,22 @@
|
||||
|
||||
import { type CommandModule } from 'yargs';
|
||||
import { FatalConfigError, getErrorMessage } from '@qwen-code/qwen-code-core';
|
||||
import { enableExtension } from '../../config/extension.js';
|
||||
import { SettingScope } from '../../config/settings.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface EnableArgs {
|
||||
name: string;
|
||||
scope?: string;
|
||||
}
|
||||
|
||||
export function handleEnable(args: EnableArgs) {
|
||||
export async function handleEnable(args: EnableArgs) {
|
||||
const extensionManager = await getExtensionManager();
|
||||
|
||||
try {
|
||||
if (args.scope?.toLowerCase() === 'workspace') {
|
||||
enableExtension(args.name, SettingScope.Workspace);
|
||||
extensionManager.enableExtension(args.name, SettingScope.Workspace);
|
||||
} else {
|
||||
enableExtension(args.name, SettingScope.User);
|
||||
extensionManager.enableExtension(args.name, SettingScope.User);
|
||||
}
|
||||
if (args.scope) {
|
||||
console.log(
|
||||
@@ -66,8 +68,8 @@ export const enableCommand: CommandModule = {
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
handler: (argv) => {
|
||||
handleEnable({
|
||||
handler: async (argv) => {
|
||||
await handleEnable({
|
||||
name: argv['name'] as string,
|
||||
scope: argv['scope'] as string,
|
||||
});
|
||||
|
||||
@@ -5,58 +5,64 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
|
||||
import {
|
||||
installExtension,
|
||||
requestConsentNonInteractive,
|
||||
} from '../../config/extension.js';
|
||||
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
|
||||
ExtensionManager,
|
||||
parseInstallSource,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { stat } from 'node:fs/promises';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import { requestConsentNonInteractive } from './consent.js';
|
||||
|
||||
interface InstallArgs {
|
||||
source: string;
|
||||
ref?: string;
|
||||
autoUpdate?: boolean;
|
||||
allowPreRelease?: boolean;
|
||||
consent?: boolean;
|
||||
}
|
||||
|
||||
export async function handleInstall(args: InstallArgs) {
|
||||
try {
|
||||
let installMetadata: ExtensionInstallMetadata;
|
||||
const { source } = args;
|
||||
const installMetadata = await parseInstallSource(args.source);
|
||||
|
||||
if (
|
||||
source.startsWith('http://') ||
|
||||
source.startsWith('https://') ||
|
||||
source.startsWith('git@') ||
|
||||
source.startsWith('sso://')
|
||||
installMetadata.type !== 'git' &&
|
||||
installMetadata.type !== 'github-release'
|
||||
) {
|
||||
installMetadata = {
|
||||
source,
|
||||
type: 'git',
|
||||
ref: args.ref,
|
||||
autoUpdate: args.autoUpdate,
|
||||
};
|
||||
} else {
|
||||
if (args.ref || args.autoUpdate) {
|
||||
throw new Error(
|
||||
'--ref and --auto-update are not applicable for local extensions.',
|
||||
'--ref and --auto-update are not applicable for marketplace extensions.',
|
||||
);
|
||||
}
|
||||
try {
|
||||
await stat(source);
|
||||
installMetadata = {
|
||||
source,
|
||||
type: 'local',
|
||||
};
|
||||
} catch {
|
||||
throw new Error('Install source not found.');
|
||||
}
|
||||
}
|
||||
|
||||
const name = await installExtension(
|
||||
installMetadata,
|
||||
requestConsentNonInteractive,
|
||||
const requestConsent = args.consent
|
||||
? () => Promise.resolve(true)
|
||||
: requestConsentNonInteractive;
|
||||
const workspaceDir = process.cwd();
|
||||
const extensionManager = new ExtensionManager({
|
||||
workspaceDir,
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(
|
||||
loadSettings(workspaceDir).merged,
|
||||
),
|
||||
requestConsent,
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
|
||||
const extension = await extensionManager.installExtension(
|
||||
{
|
||||
...installMetadata,
|
||||
ref: args.ref,
|
||||
autoUpdate: args.autoUpdate,
|
||||
allowPreRelease: args.allowPreRelease,
|
||||
},
|
||||
requestConsent,
|
||||
);
|
||||
console.log(
|
||||
`Extension "${extension.name}" installed successfully and enabled.`,
|
||||
);
|
||||
console.log(`Extension "${name}" installed successfully and enabled.`);
|
||||
} catch (error) {
|
||||
console.error(getErrorMessage(error));
|
||||
process.exit(1);
|
||||
@@ -65,11 +71,13 @@ export async function handleInstall(args: InstallArgs) {
|
||||
|
||||
export const installCommand: CommandModule = {
|
||||
command: 'install <source>',
|
||||
describe: 'Installs an extension from a git repository URL or a local path.',
|
||||
describe:
|
||||
'Installs an extension from a git repository URL, local path, or claude marketplace (marketplace-url:plugin-name).',
|
||||
builder: (yargs) =>
|
||||
yargs
|
||||
.positional('source', {
|
||||
describe: 'The github URL or local path of the extension to install.',
|
||||
describe:
|
||||
'The github URL, local path, or marketplace source (marketplace-url:plugin-name) of the extension to install.',
|
||||
type: 'string',
|
||||
demandOption: true,
|
||||
})
|
||||
@@ -81,6 +89,16 @@ export const installCommand: CommandModule = {
|
||||
describe: 'Enable auto-update for this extension.',
|
||||
type: 'boolean',
|
||||
})
|
||||
.option('pre-release', {
|
||||
describe: 'Enable pre-release versions for this extension.',
|
||||
type: 'boolean',
|
||||
})
|
||||
.option('consent', {
|
||||
describe:
|
||||
'Acknowledge the security risks of installing an extension and skip the confirmation prompt.',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
})
|
||||
.check((argv) => {
|
||||
if (!argv.source) {
|
||||
throw new Error('The source argument must be provided.');
|
||||
@@ -92,6 +110,8 @@ export const installCommand: CommandModule = {
|
||||
source: argv['source'] as string,
|
||||
ref: argv['ref'] as string | undefined,
|
||||
autoUpdate: argv['auto-update'] as boolean | undefined,
|
||||
allowPreRelease: argv['pre-release'] as boolean | undefined,
|
||||
consent: argv['consent'] as boolean | undefined,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
@@ -5,13 +5,10 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import {
|
||||
installExtension,
|
||||
requestConsentNonInteractive,
|
||||
} from '../../config/extension.js';
|
||||
import type { ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
|
||||
|
||||
import { type ExtensionInstallMetadata } from '@qwen-code/qwen-code-core';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { requestConsentNonInteractive } from './consent.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface InstallArgs {
|
||||
path: string;
|
||||
@@ -23,12 +20,14 @@ export async function handleLink(args: InstallArgs) {
|
||||
source: args.path,
|
||||
type: 'link',
|
||||
};
|
||||
const extensionName = await installExtension(
|
||||
const extensionManager = await getExtensionManager();
|
||||
|
||||
const extension = await extensionManager.installExtension(
|
||||
installMetadata,
|
||||
requestConsentNonInteractive,
|
||||
);
|
||||
console.log(
|
||||
`Extension "${extensionName}" linked successfully and enabled.`,
|
||||
`Extension "${extension.name}" linked successfully and enabled.`,
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(getErrorMessage(error));
|
||||
|
||||
@@ -5,19 +5,23 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import { loadUserExtensions, toOutputString } from '../../config/extension.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
export async function handleList() {
|
||||
try {
|
||||
const extensions = loadUserExtensions();
|
||||
const extensionManager = await getExtensionManager();
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
|
||||
if (extensions.length === 0) {
|
||||
console.log('No extensions installed.');
|
||||
return;
|
||||
}
|
||||
console.log(
|
||||
extensions
|
||||
.map((extension, _): string => toOutputString(extension, process.cwd()))
|
||||
.map((extension, _): string =>
|
||||
extensionManager.toOutputString(extension, process.cwd()),
|
||||
)
|
||||
.join('\n\n'),
|
||||
);
|
||||
} catch (error) {
|
||||
|
||||
@@ -5,8 +5,11 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import { uninstallExtension } from '../../config/extension.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { ExtensionManager } from '@qwen-code/qwen-code-core';
|
||||
import { requestConsentNonInteractive } from './consent.js';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
|
||||
interface UninstallArgs {
|
||||
name: string; // can be extension name or source URL.
|
||||
@@ -14,7 +17,16 @@ interface UninstallArgs {
|
||||
|
||||
export async function handleUninstall(args: UninstallArgs) {
|
||||
try {
|
||||
await uninstallExtension(args.name);
|
||||
const workspaceDir = process.cwd();
|
||||
const extensionManager = new ExtensionManager({
|
||||
workspaceDir,
|
||||
requestConsent: requestConsentNonInteractive,
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(
|
||||
loadSettings(workspaceDir).merged,
|
||||
),
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
await extensionManager.uninstallExtension(args.name, false);
|
||||
console.log(`Extension "${args.name}" successfully uninstalled.`);
|
||||
} catch (error) {
|
||||
console.error(getErrorMessage(error));
|
||||
|
||||
@@ -5,22 +5,13 @@
|
||||
*/
|
||||
|
||||
import type { CommandModule } from 'yargs';
|
||||
import {
|
||||
loadExtensions,
|
||||
annotateActiveExtensions,
|
||||
ExtensionStorage,
|
||||
requestConsentNonInteractive,
|
||||
} from '../../config/extension.js';
|
||||
import {
|
||||
updateAllUpdatableExtensions,
|
||||
type ExtensionUpdateInfo,
|
||||
checkForAllExtensionUpdates,
|
||||
updateExtension,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { checkForExtensionUpdate } from '../../config/extensions/github.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
|
||||
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
|
||||
import {
|
||||
checkForExtensionUpdate,
|
||||
type ExtensionUpdateInfo,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { getExtensionManager } from './utils.js';
|
||||
|
||||
interface UpdateArgs {
|
||||
name?: string;
|
||||
@@ -31,19 +22,9 @@ const updateOutput = (info: ExtensionUpdateInfo) =>
|
||||
`Extension "${info.name}" successfully updated: ${info.originalVersion} → ${info.updatedVersion}.`;
|
||||
|
||||
export async function handleUpdate(args: UpdateArgs) {
|
||||
const workingDir = process.cwd();
|
||||
const extensionEnablementManager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
// Force enable named extensions, otherwise we will only update the enabled
|
||||
// ones.
|
||||
args.name ? [args.name] : [],
|
||||
);
|
||||
const allExtensions = loadExtensions(extensionEnablementManager);
|
||||
const extensions = annotateActiveExtensions(
|
||||
allExtensions,
|
||||
workingDir,
|
||||
extensionEnablementManager,
|
||||
);
|
||||
const extensionManager = await getExtensionManager();
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
|
||||
if (args.name) {
|
||||
try {
|
||||
const extension = extensions.find(
|
||||
@@ -53,25 +34,23 @@ export async function handleUpdate(args: UpdateArgs) {
|
||||
console.log(`Extension "${args.name}" not found.`);
|
||||
return;
|
||||
}
|
||||
let updateState: ExtensionUpdateState | undefined;
|
||||
if (!extension.installMetadata) {
|
||||
console.log(
|
||||
`Unable to install extension "${args.name}" due to missing install metadata`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
await checkForExtensionUpdate(extension, (newState) => {
|
||||
updateState = newState;
|
||||
});
|
||||
const updateState = await checkForExtensionUpdate(
|
||||
extension,
|
||||
extensionManager,
|
||||
);
|
||||
if (updateState !== ExtensionUpdateState.UPDATE_AVAILABLE) {
|
||||
console.log(`Extension "${args.name}" is already up to date.`);
|
||||
return;
|
||||
}
|
||||
// TODO(chrstnb): we should list extensions if the requested extension is not installed.
|
||||
const updatedExtensionInfo = (await updateExtension(
|
||||
const updatedExtensionInfo = (await extensionManager.updateExtension(
|
||||
extension,
|
||||
workingDir,
|
||||
requestConsentNonInteractive,
|
||||
updateState,
|
||||
() => {},
|
||||
))!;
|
||||
@@ -92,18 +71,15 @@ export async function handleUpdate(args: UpdateArgs) {
|
||||
if (args.all) {
|
||||
try {
|
||||
const extensionState = new Map();
|
||||
await checkForAllExtensionUpdates(extensions, (action) => {
|
||||
if (action.type === 'SET_STATE') {
|
||||
extensionState.set(action.payload.name, {
|
||||
status: action.payload.state,
|
||||
await extensionManager.checkForAllExtensionUpdates(
|
||||
(extensionName, state) => {
|
||||
extensionState.set(extensionName, {
|
||||
status: state,
|
||||
processed: true, // No need to process as we will force the update.
|
||||
});
|
||||
}
|
||||
});
|
||||
let updateInfos = await updateAllUpdatableExtensions(
|
||||
workingDir,
|
||||
requestConsentNonInteractive,
|
||||
extensions,
|
||||
},
|
||||
);
|
||||
let updateInfos = await extensionManager.updateAllUpdatableExtensions(
|
||||
extensionState,
|
||||
() => {},
|
||||
);
|
||||
|
||||
21
packages/cli/src/commands/extensions/utils.ts
Normal file
21
packages/cli/src/commands/extensions/utils.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { ExtensionManager } from '@qwen-code/qwen-code-core';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import { requestConsentNonInteractive } from './consent.js';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
|
||||
export async function getExtensionManager(): Promise<ExtensionManager> {
|
||||
const workspaceDir = process.cwd();
|
||||
const extensionManager = new ExtensionManager({
|
||||
workspaceDir,
|
||||
requestConsent: requestConsentNonInteractive,
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(loadSettings(workspaceDir).merged),
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
return extensionManager;
|
||||
}
|
||||
@@ -7,7 +7,8 @@
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { listMcpServers } from './list.js';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
|
||||
import { loadExtensions } from '../../config/extension.js';
|
||||
import { ExtensionStorage } from '../../config/extensions/storage.js';
|
||||
import { createTransport } from '@qwen-code/qwen-code-core';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
|
||||
|
||||
@@ -8,10 +8,13 @@
|
||||
import type { CommandModule } from 'yargs';
|
||||
import { loadSettings } from '../../config/settings.js';
|
||||
import type { MCPServerConfig } from '@qwen-code/qwen-code-core';
|
||||
import { MCPServerStatus, createTransport } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
MCPServerStatus,
|
||||
createTransport,
|
||||
ExtensionManager,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import { ExtensionStorage, loadExtensions } from '../../config/extension.js';
|
||||
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
|
||||
import { isWorkspaceTrusted } from '../../config/trustedFolders.js';
|
||||
|
||||
const COLOR_GREEN = '\u001b[32m';
|
||||
const COLOR_YELLOW = '\u001b[33m';
|
||||
@@ -22,22 +25,27 @@ async function getMcpServersFromConfig(): Promise<
|
||||
Record<string, MCPServerConfig>
|
||||
> {
|
||||
const settings = loadSettings();
|
||||
const extensions = loadExtensions(
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
);
|
||||
const extensionManager = new ExtensionManager({
|
||||
isWorkspaceTrusted: !!isWorkspaceTrusted(settings.merged),
|
||||
telemetrySettings: settings.merged.telemetry,
|
||||
});
|
||||
await extensionManager.refreshCache();
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
const mcpServers = { ...(settings.merged.mcpServers || {}) };
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) {
|
||||
return;
|
||||
}
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
if (extension.isActive) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) {
|
||||
return;
|
||||
}
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
@@ -1,112 +1,41 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Qwen Team
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { AuthType } from '@qwen-code/qwen-code-core';
|
||||
import { vi } from 'vitest';
|
||||
import { validateAuthMethod } from './auth.js';
|
||||
import * as settings from './settings.js';
|
||||
|
||||
vi.mock('./settings.js', () => ({
|
||||
loadEnvironment: vi.fn(),
|
||||
loadSettings: vi.fn().mockReturnValue({
|
||||
merged: {},
|
||||
merged: vi.fn().mockReturnValue({}),
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('validateAuthMethod', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
// Reset mock to default
|
||||
vi.mocked(settings.loadSettings).mockReturnValue({
|
||||
merged: {},
|
||||
} as ReturnType<typeof settings.loadSettings>);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
delete process.env['OPENAI_API_KEY'];
|
||||
delete process.env['CUSTOM_API_KEY'];
|
||||
delete process.env['GEMINI_API_KEY'];
|
||||
delete process.env['GEMINI_API_KEY_ALTERED'];
|
||||
delete process.env['ANTHROPIC_API_KEY'];
|
||||
delete process.env['ANTHROPIC_BASE_URL'];
|
||||
delete process.env['GOOGLE_API_KEY'];
|
||||
});
|
||||
|
||||
it('should return null for USE_OPENAI with default env key', () => {
|
||||
it('should return null for USE_OPENAI', () => {
|
||||
process.env['OPENAI_API_KEY'] = 'fake-key';
|
||||
expect(validateAuthMethod(AuthType.USE_OPENAI)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return an error message for USE_OPENAI if no API key is available', () => {
|
||||
it('should return an error message for USE_OPENAI if OPENAI_API_KEY is not set', () => {
|
||||
delete process.env['OPENAI_API_KEY'];
|
||||
expect(validateAuthMethod(AuthType.USE_OPENAI)).toBe(
|
||||
"Missing API key for OpenAI-compatible auth. Set settings.security.auth.apiKey, or set the 'OPENAI_API_KEY' environment variable.",
|
||||
'OPENAI_API_KEY environment variable not found. You can enter it interactively or add it to your .env file.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return null for USE_OPENAI with custom envKey from modelProviders', () => {
|
||||
vi.mocked(settings.loadSettings).mockReturnValue({
|
||||
merged: {
|
||||
model: { name: 'custom-model' },
|
||||
modelProviders: {
|
||||
openai: [{ id: 'custom-model', envKey: 'CUSTOM_API_KEY' }],
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof settings.loadSettings>);
|
||||
process.env['CUSTOM_API_KEY'] = 'custom-key';
|
||||
|
||||
expect(validateAuthMethod(AuthType.USE_OPENAI)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return error with custom envKey hint when modelProviders envKey is set but env var is missing', () => {
|
||||
vi.mocked(settings.loadSettings).mockReturnValue({
|
||||
merged: {
|
||||
model: { name: 'custom-model' },
|
||||
modelProviders: {
|
||||
openai: [{ id: 'custom-model', envKey: 'CUSTOM_API_KEY' }],
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof settings.loadSettings>);
|
||||
|
||||
const result = validateAuthMethod(AuthType.USE_OPENAI);
|
||||
expect(result).toContain('CUSTOM_API_KEY');
|
||||
});
|
||||
|
||||
it('should return null for USE_GEMINI with custom envKey', () => {
|
||||
vi.mocked(settings.loadSettings).mockReturnValue({
|
||||
merged: {
|
||||
model: { name: 'gemini-1.5-flash' },
|
||||
modelProviders: {
|
||||
gemini: [
|
||||
{ id: 'gemini-1.5-flash', envKey: 'GEMINI_API_KEY_ALTERED' },
|
||||
],
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof settings.loadSettings>);
|
||||
process.env['GEMINI_API_KEY_ALTERED'] = 'altered-key';
|
||||
|
||||
expect(validateAuthMethod(AuthType.USE_GEMINI)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return error with custom envKey for USE_GEMINI when env var is missing', () => {
|
||||
vi.mocked(settings.loadSettings).mockReturnValue({
|
||||
merged: {
|
||||
model: { name: 'gemini-1.5-flash' },
|
||||
modelProviders: {
|
||||
gemini: [
|
||||
{ id: 'gemini-1.5-flash', envKey: 'GEMINI_API_KEY_ALTERED' },
|
||||
],
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof settings.loadSettings>);
|
||||
|
||||
const result = validateAuthMethod(AuthType.USE_GEMINI);
|
||||
expect(result).toContain('GEMINI_API_KEY_ALTERED');
|
||||
});
|
||||
|
||||
it('should return null for QWEN_OAUTH', () => {
|
||||
expect(validateAuthMethod(AuthType.QWEN_OAUTH)).toBeNull();
|
||||
});
|
||||
@@ -116,115 +45,4 @@ describe('validateAuthMethod', () => {
|
||||
'Invalid auth method selected.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return null for USE_ANTHROPIC with custom envKey and baseUrl', () => {
|
||||
vi.mocked(settings.loadSettings).mockReturnValue({
|
||||
merged: {
|
||||
model: { name: 'claude-3' },
|
||||
modelProviders: {
|
||||
anthropic: [
|
||||
{
|
||||
id: 'claude-3',
|
||||
envKey: 'CUSTOM_ANTHROPIC_KEY',
|
||||
baseUrl: 'https://api.anthropic.com',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof settings.loadSettings>);
|
||||
process.env['CUSTOM_ANTHROPIC_KEY'] = 'custom-anthropic-key';
|
||||
|
||||
expect(validateAuthMethod(AuthType.USE_ANTHROPIC)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return error for USE_ANTHROPIC when baseUrl is missing', () => {
|
||||
vi.mocked(settings.loadSettings).mockReturnValue({
|
||||
merged: {
|
||||
model: { name: 'claude-3' },
|
||||
modelProviders: {
|
||||
anthropic: [{ id: 'claude-3', envKey: 'CUSTOM_ANTHROPIC_KEY' }],
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof settings.loadSettings>);
|
||||
process.env['CUSTOM_ANTHROPIC_KEY'] = 'custom-key';
|
||||
|
||||
const result = validateAuthMethod(AuthType.USE_ANTHROPIC);
|
||||
expect(result).toContain('modelProviders[].baseUrl');
|
||||
});
|
||||
|
||||
it('should return null for USE_VERTEX_AI with custom envKey', () => {
|
||||
vi.mocked(settings.loadSettings).mockReturnValue({
|
||||
merged: {
|
||||
model: { name: 'vertex-model' },
|
||||
modelProviders: {
|
||||
'vertex-ai': [
|
||||
{ id: 'vertex-model', envKey: 'GOOGLE_API_KEY_VERTEX' },
|
||||
],
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof settings.loadSettings>);
|
||||
process.env['GOOGLE_API_KEY_VERTEX'] = 'vertex-key';
|
||||
|
||||
expect(validateAuthMethod(AuthType.USE_VERTEX_AI)).toBeNull();
|
||||
});
|
||||
|
||||
it('should use config.modelsConfig.getModel() when Config is provided', () => {
|
||||
// Settings has a different model
|
||||
vi.mocked(settings.loadSettings).mockReturnValue({
|
||||
merged: {
|
||||
model: { name: 'settings-model' },
|
||||
modelProviders: {
|
||||
openai: [
|
||||
{ id: 'settings-model', envKey: 'SETTINGS_API_KEY' },
|
||||
{ id: 'cli-model', envKey: 'CLI_API_KEY' },
|
||||
],
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof settings.loadSettings>);
|
||||
|
||||
// Mock Config object that returns a different model (e.g., from CLI args)
|
||||
const mockConfig = {
|
||||
modelsConfig: {
|
||||
getModel: vi.fn().mockReturnValue('cli-model'),
|
||||
},
|
||||
} as unknown as import('@qwen-code/qwen-code-core').Config;
|
||||
|
||||
// Set the env key for the CLI model, not the settings model
|
||||
process.env['CLI_API_KEY'] = 'cli-key';
|
||||
|
||||
// Should use 'cli-model' from config.modelsConfig.getModel(), not 'settings-model'
|
||||
const result = validateAuthMethod(AuthType.USE_OPENAI, mockConfig);
|
||||
expect(result).toBeNull();
|
||||
expect(mockConfig.modelsConfig.getModel).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail validation when Config provides different model without matching env key', () => {
|
||||
// Clean up any existing env keys first
|
||||
delete process.env['CLI_API_KEY'];
|
||||
delete process.env['SETTINGS_API_KEY'];
|
||||
delete process.env['OPENAI_API_KEY'];
|
||||
|
||||
vi.mocked(settings.loadSettings).mockReturnValue({
|
||||
merged: {
|
||||
model: { name: 'settings-model' },
|
||||
modelProviders: {
|
||||
openai: [
|
||||
{ id: 'settings-model', envKey: 'SETTINGS_API_KEY' },
|
||||
{ id: 'cli-model', envKey: 'CLI_API_KEY' },
|
||||
],
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof settings.loadSettings>);
|
||||
|
||||
const mockConfig = {
|
||||
modelsConfig: {
|
||||
getModel: vi.fn().mockReturnValue('cli-model'),
|
||||
},
|
||||
} as unknown as import('@qwen-code/qwen-code-core').Config;
|
||||
|
||||
// Don't set CLI_API_KEY - validation should fail
|
||||
const result = validateAuthMethod(AuthType.USE_OPENAI, mockConfig);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('CLI_API_KEY');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,169 +1,21 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Qwen Team
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import {
|
||||
AuthType,
|
||||
type Config,
|
||||
type ModelProvidersConfig,
|
||||
type ProviderModelConfig,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { loadEnvironment, loadSettings, type Settings } from './settings.js';
|
||||
import { t } from '../i18n/index.js';
|
||||
import { AuthType } from '@qwen-code/qwen-code-core';
|
||||
import { loadEnvironment, loadSettings } from './settings.js';
|
||||
|
||||
/**
|
||||
* Default environment variable names for each auth type
|
||||
*/
|
||||
const DEFAULT_ENV_KEYS: Record<string, string> = {
|
||||
[AuthType.USE_OPENAI]: 'OPENAI_API_KEY',
|
||||
[AuthType.USE_ANTHROPIC]: 'ANTHROPIC_API_KEY',
|
||||
[AuthType.USE_GEMINI]: 'GEMINI_API_KEY',
|
||||
[AuthType.USE_VERTEX_AI]: 'GOOGLE_API_KEY',
|
||||
};
|
||||
|
||||
/**
|
||||
* Find model configuration from modelProviders by authType and modelId
|
||||
*/
|
||||
function findModelConfig(
|
||||
modelProviders: ModelProvidersConfig | undefined,
|
||||
authType: string,
|
||||
modelId: string | undefined,
|
||||
): ProviderModelConfig | undefined {
|
||||
if (!modelProviders || !modelId) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const models = modelProviders[authType];
|
||||
if (!Array.isArray(models)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return models.find((m) => m.id === modelId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if API key is available for the given auth type and model configuration.
|
||||
* Prioritizes custom envKey from modelProviders over default environment variables.
|
||||
*/
|
||||
function hasApiKeyForAuth(
|
||||
authType: string,
|
||||
settings: Settings,
|
||||
config?: Config,
|
||||
): {
|
||||
hasKey: boolean;
|
||||
checkedEnvKey: string | undefined;
|
||||
isExplicitEnvKey: boolean;
|
||||
} {
|
||||
const modelProviders = settings.modelProviders as
|
||||
| ModelProvidersConfig
|
||||
| undefined;
|
||||
|
||||
// Use config.modelsConfig.getModel() if available for accurate model ID resolution
|
||||
// that accounts for CLI args, env vars, and settings. Fall back to settings.model.name.
|
||||
const modelId = config?.modelsConfig.getModel() ?? settings.model?.name;
|
||||
|
||||
// Try to find model-specific envKey from modelProviders
|
||||
const modelConfig = findModelConfig(modelProviders, authType, modelId);
|
||||
if (modelConfig?.envKey) {
|
||||
// Explicit envKey configured - only check this env var, no apiKey fallback
|
||||
const hasKey = !!process.env[modelConfig.envKey];
|
||||
return {
|
||||
hasKey,
|
||||
checkedEnvKey: modelConfig.envKey,
|
||||
isExplicitEnvKey: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Using default environment variable - apiKey fallback is allowed
|
||||
const defaultEnvKey = DEFAULT_ENV_KEYS[authType];
|
||||
if (defaultEnvKey) {
|
||||
const hasKey = !!process.env[defaultEnvKey];
|
||||
if (hasKey) {
|
||||
return { hasKey, checkedEnvKey: defaultEnvKey, isExplicitEnvKey: false };
|
||||
}
|
||||
}
|
||||
|
||||
// Also check settings.security.auth.apiKey as fallback (only for default env key)
|
||||
if (settings.security?.auth?.apiKey) {
|
||||
return {
|
||||
hasKey: true,
|
||||
checkedEnvKey: defaultEnvKey || undefined,
|
||||
isExplicitEnvKey: false,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
hasKey: false,
|
||||
checkedEnvKey: defaultEnvKey,
|
||||
isExplicitEnvKey: false,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate API key error message based on auth check result.
|
||||
* Returns null if API key is present, otherwise returns the appropriate error message.
|
||||
*/
|
||||
function getApiKeyError(
|
||||
authMethod: string,
|
||||
settings: Settings,
|
||||
config?: Config,
|
||||
): string | null {
|
||||
const { hasKey, checkedEnvKey, isExplicitEnvKey } = hasApiKeyForAuth(
|
||||
authMethod,
|
||||
settings,
|
||||
config,
|
||||
);
|
||||
if (hasKey) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const envKeyHint = checkedEnvKey || DEFAULT_ENV_KEYS[authMethod];
|
||||
if (isExplicitEnvKey) {
|
||||
return t(
|
||||
'{{envKeyHint}} environment variable not found. Please set it in your .env file or environment variables.',
|
||||
{ envKeyHint },
|
||||
);
|
||||
}
|
||||
return t(
|
||||
'{{envKeyHint}} environment variable not found (or set settings.security.auth.apiKey). Please set it in your .env file or environment variables.',
|
||||
{ envKeyHint },
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that the required credentials and configuration exist for the given auth method.
|
||||
*/
|
||||
export function validateAuthMethod(
|
||||
authMethod: string,
|
||||
config?: Config,
|
||||
): string | null {
|
||||
export function validateAuthMethod(authMethod: string): string | null {
|
||||
const settings = loadSettings();
|
||||
loadEnvironment(settings.merged);
|
||||
|
||||
if (authMethod === AuthType.USE_OPENAI) {
|
||||
const { hasKey, checkedEnvKey, isExplicitEnvKey } = hasApiKeyForAuth(
|
||||
authMethod,
|
||||
settings.merged,
|
||||
config,
|
||||
);
|
||||
if (!hasKey) {
|
||||
const envKeyHint = checkedEnvKey
|
||||
? `'${checkedEnvKey}'`
|
||||
: "'OPENAI_API_KEY'";
|
||||
if (isExplicitEnvKey) {
|
||||
// Explicit envKey configured - only suggest setting the env var
|
||||
return t(
|
||||
'Missing API key for OpenAI-compatible auth. Set the {{envKeyHint}} environment variable.',
|
||||
{ envKeyHint },
|
||||
);
|
||||
}
|
||||
// Default env key - can use either apiKey or env var
|
||||
return t(
|
||||
'Missing API key for OpenAI-compatible auth. Set settings.security.auth.apiKey, or set the {{envKeyHint}} environment variable.',
|
||||
{ envKeyHint },
|
||||
);
|
||||
const hasApiKey =
|
||||
process.env['OPENAI_API_KEY'] || settings.merged.security?.auth?.apiKey;
|
||||
if (!hasApiKey) {
|
||||
return 'OPENAI_API_KEY environment variable not found. You can enter it interactively or add it to your .env file.';
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -175,49 +27,36 @@ export function validateAuthMethod(
|
||||
}
|
||||
|
||||
if (authMethod === AuthType.USE_ANTHROPIC) {
|
||||
const apiKeyError = getApiKeyError(authMethod, settings.merged, config);
|
||||
if (apiKeyError) {
|
||||
return apiKeyError;
|
||||
const hasApiKey = process.env['ANTHROPIC_API_KEY'];
|
||||
if (!hasApiKey) {
|
||||
return 'ANTHROPIC_API_KEY environment variable not found.';
|
||||
}
|
||||
|
||||
// Check baseUrl - can come from modelProviders or environment
|
||||
const modelProviders = settings.merged.modelProviders as
|
||||
| ModelProvidersConfig
|
||||
| undefined;
|
||||
// Use config.modelsConfig.getModel() if available for accurate model ID
|
||||
const modelId =
|
||||
config?.modelsConfig.getModel() ?? settings.merged.model?.name;
|
||||
const modelConfig = findModelConfig(modelProviders, authMethod, modelId);
|
||||
|
||||
if (modelConfig && !modelConfig.baseUrl) {
|
||||
return t(
|
||||
'Anthropic provider missing required baseUrl in modelProviders[].baseUrl.',
|
||||
);
|
||||
}
|
||||
if (!modelConfig && !process.env['ANTHROPIC_BASE_URL']) {
|
||||
return t('ANTHROPIC_BASE_URL environment variable not found.');
|
||||
const hasBaseUrl = process.env['ANTHROPIC_BASE_URL'];
|
||||
if (!hasBaseUrl) {
|
||||
return 'ANTHROPIC_BASE_URL environment variable not found.';
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
if (authMethod === AuthType.USE_GEMINI) {
|
||||
const apiKeyError = getApiKeyError(authMethod, settings.merged, config);
|
||||
if (apiKeyError) {
|
||||
return apiKeyError;
|
||||
const hasApiKey = process.env['GEMINI_API_KEY'];
|
||||
if (!hasApiKey) {
|
||||
return 'GEMINI_API_KEY environment variable not found. Please set it in your .env file or environment variables.';
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if (authMethod === AuthType.USE_VERTEX_AI) {
|
||||
const apiKeyError = getApiKeyError(authMethod, settings.merged, config);
|
||||
if (apiKeyError) {
|
||||
return apiKeyError;
|
||||
const hasApiKey = process.env['GOOGLE_API_KEY'];
|
||||
if (!hasApiKey) {
|
||||
return 'GOOGLE_API_KEY environment variable not found. Please set it in your .env file or environment variables.';
|
||||
}
|
||||
|
||||
process.env['GOOGLE_GENAI_USE_VERTEXAI'] = 'true';
|
||||
return null;
|
||||
}
|
||||
|
||||
return t('Invalid auth method selected.');
|
||||
return 'Invalid auth method selected.';
|
||||
}
|
||||
|
||||
@@ -16,7 +16,8 @@ import {
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { loadCliConfig, parseArguments, type CliArgs } from './config.js';
|
||||
import type { Settings } from './settings.js';
|
||||
import { ExtensionStorage, type Extension } from './extension.js';
|
||||
import type { Extension } from './extension.js';
|
||||
import { ExtensionStorage } from './extensions/storage.js';
|
||||
import * as ServerConfig from '@qwen-code/qwen-code-core';
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
|
||||
@@ -77,8 +78,10 @@ vi.mock('read-package-up', () => ({
|
||||
),
|
||||
}));
|
||||
|
||||
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
|
||||
const actualServer = await importOriginal<typeof ServerConfig>();
|
||||
vi.mock('@qwen-code/qwen-code-core', async () => {
|
||||
const actualServer = await vi.importActual<typeof ServerConfig>(
|
||||
'@qwen-code/qwen-code-core',
|
||||
);
|
||||
return {
|
||||
...actualServer,
|
||||
IdeClient: {
|
||||
@@ -1595,58 +1598,6 @@ describe('Approval mode tool exclusion logic', () => {
|
||||
expect(excludedTools).toContain(WriteFileTool.Name);
|
||||
});
|
||||
|
||||
it('should not exclude a tool explicitly allowed in tools.allowed', async () => {
|
||||
process.argv = ['node', 'script.js', '-p', 'test'];
|
||||
const argv = await parseArguments({} as Settings);
|
||||
const settings: Settings = {
|
||||
tools: {
|
||||
allowed: [ShellTool.Name],
|
||||
},
|
||||
};
|
||||
const extensions: Extension[] = [];
|
||||
|
||||
const config = await loadCliConfig(
|
||||
settings,
|
||||
extensions,
|
||||
new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
argv.extensions,
|
||||
),
|
||||
argv,
|
||||
);
|
||||
|
||||
const excludedTools = config.getExcludeTools();
|
||||
expect(excludedTools).not.toContain(ShellTool.Name);
|
||||
expect(excludedTools).toContain(EditTool.Name);
|
||||
expect(excludedTools).toContain(WriteFileTool.Name);
|
||||
});
|
||||
|
||||
it('should not exclude a tool explicitly allowed in tools.core', async () => {
|
||||
process.argv = ['node', 'script.js', '-p', 'test'];
|
||||
const argv = await parseArguments({} as Settings);
|
||||
const settings: Settings = {
|
||||
tools: {
|
||||
core: [ShellTool.Name],
|
||||
},
|
||||
};
|
||||
const extensions: Extension[] = [];
|
||||
|
||||
const config = await loadCliConfig(
|
||||
settings,
|
||||
extensions,
|
||||
new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
argv.extensions,
|
||||
),
|
||||
argv,
|
||||
);
|
||||
|
||||
const excludedTools = config.getExcludeTools();
|
||||
expect(excludedTools).not.toContain(ShellTool.Name);
|
||||
expect(excludedTools).toContain(EditTool.Name);
|
||||
expect(excludedTools).toContain(WriteFileTool.Name);
|
||||
});
|
||||
|
||||
it('should exclude only shell tools in non-interactive mode with auto-edit approval mode', async () => {
|
||||
process.argv = [
|
||||
'node',
|
||||
|
||||
@@ -9,32 +9,24 @@ import {
|
||||
AuthType,
|
||||
Config,
|
||||
DEFAULT_QWEN_EMBEDDING_MODEL,
|
||||
DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
|
||||
EditTool,
|
||||
FileDiscoveryService,
|
||||
getCurrentGeminiMdFilename,
|
||||
loadServerHierarchicalMemory,
|
||||
setGeminiMdFilename as setServerGeminiMdFilename,
|
||||
ShellTool,
|
||||
WriteFileTool,
|
||||
resolveTelemetrySettings,
|
||||
FatalConfigError,
|
||||
Storage,
|
||||
InputFormat,
|
||||
OutputFormat,
|
||||
isToolEnabled,
|
||||
SessionService,
|
||||
type ResumedSessionData,
|
||||
type FileFilteringOptions,
|
||||
type MCPServerConfig,
|
||||
type ToolName,
|
||||
EditTool,
|
||||
ShellTool,
|
||||
WriteFileTool,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { extensionsCommand } from '../commands/extensions.js';
|
||||
import type { Settings } from './settings.js';
|
||||
import {
|
||||
resolveCliGenerationConfig,
|
||||
getAuthTypeFromEnv,
|
||||
} from '../utils/modelConfigUtils.js';
|
||||
import yargs, { type Argv } from 'yargs';
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import * as fs from 'node:fs';
|
||||
@@ -43,14 +35,11 @@ import { homedir } from 'node:os';
|
||||
|
||||
import { resolvePath } from '../utils/resolvePath.js';
|
||||
import { getCliVersion } from '../utils/version.js';
|
||||
import type { Extension } from './extension.js';
|
||||
import { annotateActiveExtensions } from './extension.js';
|
||||
import { loadSandboxConfig } from './sandboxConfig.js';
|
||||
import { appEvents } from '../utils/events.js';
|
||||
import { mcpCommand } from '../commands/mcp.js';
|
||||
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import type { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
|
||||
import { buildWebSearchConfig } from './webSearch.js';
|
||||
|
||||
// Simple console logger for now - replace with actual logger if available
|
||||
@@ -117,7 +106,6 @@ export interface CliArgs {
|
||||
telemetryOutfile: string | undefined;
|
||||
allowedMcpServerNames: string[] | undefined;
|
||||
allowedTools: string[] | undefined;
|
||||
acp: boolean | undefined;
|
||||
experimentalAcp: boolean | undefined;
|
||||
experimentalSkills: boolean | undefined;
|
||||
extensions: string[] | undefined;
|
||||
@@ -169,18 +157,8 @@ function normalizeOutputFormat(
|
||||
return OutputFormat.TEXT;
|
||||
}
|
||||
|
||||
export async function parseArguments(settings: Settings): Promise<CliArgs> {
|
||||
let rawArgv = hideBin(process.argv);
|
||||
|
||||
// hack: if the first argument is the CLI entry point, remove it
|
||||
if (
|
||||
rawArgv.length > 0 &&
|
||||
(rawArgv[0].endsWith('/dist/qwen-cli/cli.js') ||
|
||||
rawArgv[0].endsWith('/dist/cli.js'))
|
||||
) {
|
||||
rawArgv = rawArgv.slice(1);
|
||||
}
|
||||
|
||||
export async function parseArguments(): Promise<CliArgs> {
|
||||
const rawArgv = hideBin(process.argv);
|
||||
const yargsInstance = yargs(rawArgv)
|
||||
.locale('en')
|
||||
.scriptName('qwen')
|
||||
@@ -321,15 +299,9 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
|
||||
description: 'Enables checkpointing of file edits',
|
||||
default: false,
|
||||
})
|
||||
.option('acp', {
|
||||
type: 'boolean',
|
||||
description: 'Starts the agent in ACP mode',
|
||||
})
|
||||
.option('experimental-acp', {
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Starts the agent in ACP mode (deprecated, use --acp instead)',
|
||||
hidden: true,
|
||||
description: 'Starts the agent in ACP mode',
|
||||
})
|
||||
.option('experimental-skills', {
|
||||
type: 'boolean',
|
||||
@@ -560,11 +532,9 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
|
||||
}),
|
||||
)
|
||||
// Register MCP subcommands
|
||||
.command(mcpCommand);
|
||||
|
||||
if (settings?.experimental?.extensionManagement ?? true) {
|
||||
yargsInstance.command(extensionsCommand);
|
||||
}
|
||||
.command(mcpCommand)
|
||||
// Register Extension subcommands
|
||||
.command(extensionsCommand);
|
||||
|
||||
yargsInstance
|
||||
.version(await getCliVersion()) // This will enable the --version flag based on package.json
|
||||
@@ -612,19 +582,8 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
|
||||
// The import format is now only controlled by settings.memoryImportFormat
|
||||
// We no longer accept it as a CLI argument
|
||||
|
||||
// Handle deprecated --experimental-acp flag
|
||||
if (result['experimentalAcp']) {
|
||||
console.warn(
|
||||
'\x1b[33m⚠ Warning: --experimental-acp is deprecated and will be removed in a future release. Please use --acp instead.\x1b[0m',
|
||||
);
|
||||
// Map experimental-acp to acp if acp is not explicitly set
|
||||
if (!result['acp']) {
|
||||
(result as Record<string, unknown>)['acp'] = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply ACP fallback: if acp or experimental-acp is present but no explicit --channel, treat as ACP
|
||||
if ((result['acp'] || result['experimentalAcp']) && !result['channel']) {
|
||||
// Apply ACP fallback: if experimental-acp is present but no explicit --channel, treat as ACP
|
||||
if (result['experimentalAcp'] && !result['channel']) {
|
||||
(result as Record<string, unknown>)['channel'] = 'ACP';
|
||||
}
|
||||
|
||||
@@ -639,11 +598,11 @@ export async function loadHierarchicalGeminiMemory(
|
||||
includeDirectoriesToReadGemini: readonly string[] = [],
|
||||
debugMode: boolean,
|
||||
fileService: FileDiscoveryService,
|
||||
settings: Settings,
|
||||
extensionContextFilePaths: string[] = [],
|
||||
folderTrust: boolean,
|
||||
memoryImportFormat: 'flat' | 'tree' = 'tree',
|
||||
fileFilteringOptions?: FileFilteringOptions,
|
||||
maxDirs: number = 200,
|
||||
): Promise<{ memoryContent: string; fileCount: number }> {
|
||||
// FIX: Use real, canonical paths for a reliable comparison to handle symlinks.
|
||||
const realCwd = fs.realpathSync(path.resolve(currentWorkingDirectory));
|
||||
@@ -670,7 +629,7 @@ export async function loadHierarchicalGeminiMemory(
|
||||
folderTrust,
|
||||
memoryImportFormat,
|
||||
fileFilteringOptions,
|
||||
settings.context?.discoveryMaxDirs,
|
||||
maxDirs,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -685,30 +644,17 @@ export function isDebugMode(argv: CliArgs): boolean {
|
||||
|
||||
export async function loadCliConfig(
|
||||
settings: Settings,
|
||||
extensions: Extension[],
|
||||
extensionEnablementManager: ExtensionEnablementManager,
|
||||
argv: CliArgs,
|
||||
cwd: string = process.cwd(),
|
||||
overrideExtensions?: string[],
|
||||
): Promise<Config> {
|
||||
const debugMode = isDebugMode(argv);
|
||||
|
||||
const memoryImportFormat = settings.context?.importFormat || 'tree';
|
||||
|
||||
const ideMode = settings.ide?.enabled ?? false;
|
||||
|
||||
const folderTrust = settings.security?.folderTrust?.enabled ?? false;
|
||||
const trustedFolder = isWorkspaceTrusted(settings)?.isTrusted ?? true;
|
||||
|
||||
const allExtensions = annotateActiveExtensions(
|
||||
extensions,
|
||||
cwd,
|
||||
extensionEnablementManager,
|
||||
);
|
||||
|
||||
const activeExtensions = extensions.filter(
|
||||
(_, i) => allExtensions[i].isActive,
|
||||
);
|
||||
|
||||
// Set the context filename in the server's memoryTool module BEFORE loading memory
|
||||
// TODO(b/343434939): This is a bit of a hack. The contextFileName should ideally be passed
|
||||
// directly to the Config constructor in core, and have core handle setGeminiMdFilename.
|
||||
@@ -720,51 +666,27 @@ export async function loadCliConfig(
|
||||
setServerGeminiMdFilename(getCurrentGeminiMdFilename());
|
||||
}
|
||||
|
||||
const extensionContextFilePaths = activeExtensions.flatMap(
|
||||
(e) => e.contextFiles,
|
||||
);
|
||||
|
||||
// Automatically load output-language.md if it exists
|
||||
const outputLanguageFilePath = path.join(
|
||||
let outputLanguageFilePath: string | undefined = path.join(
|
||||
Storage.getGlobalQwenDir(),
|
||||
'output-language.md',
|
||||
);
|
||||
if (fs.existsSync(outputLanguageFilePath)) {
|
||||
extensionContextFilePaths.push(outputLanguageFilePath);
|
||||
if (debugMode) {
|
||||
logger.debug(
|
||||
`Found output-language.md, adding to context files: ${outputLanguageFilePath}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
outputLanguageFilePath = undefined;
|
||||
}
|
||||
|
||||
const fileService = new FileDiscoveryService(cwd);
|
||||
|
||||
const fileFiltering = {
|
||||
...DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
|
||||
...settings.context?.fileFiltering,
|
||||
};
|
||||
|
||||
const includeDirectories = (settings.context?.includeDirectories || [])
|
||||
.map(resolvePath)
|
||||
.concat((argv.includeDirectories || []).map(resolvePath));
|
||||
|
||||
// Call the (now wrapper) loadHierarchicalGeminiMemory which calls the server's version
|
||||
const { memoryContent, fileCount } = await loadHierarchicalGeminiMemory(
|
||||
cwd,
|
||||
settings.context?.loadMemoryFromIncludeDirectories
|
||||
? includeDirectories
|
||||
: [],
|
||||
debugMode,
|
||||
fileService,
|
||||
settings,
|
||||
extensionContextFilePaths,
|
||||
trustedFolder,
|
||||
memoryImportFormat,
|
||||
fileFiltering,
|
||||
);
|
||||
|
||||
let mcpServers = mergeMcpServers(settings, activeExtensions);
|
||||
const question = argv.promptInteractive || argv.prompt || '';
|
||||
const inputFormat: InputFormat =
|
||||
(argv.inputFormat as InputFormat | undefined) ?? InputFormat.TEXT;
|
||||
@@ -852,28 +774,6 @@ export async function loadCliConfig(
|
||||
// However, if stream-json input is used, control can be requested via JSON messages,
|
||||
// so tools should not be excluded in that case.
|
||||
const extraExcludes: string[] = [];
|
||||
const resolvedCoreTools = argv.coreTools || settings.tools?.core || [];
|
||||
const resolvedAllowedTools =
|
||||
argv.allowedTools || settings.tools?.allowed || [];
|
||||
const isExplicitlyEnabled = (toolName: ToolName): boolean => {
|
||||
if (resolvedCoreTools.length > 0) {
|
||||
if (isToolEnabled(toolName, resolvedCoreTools, [])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (resolvedAllowedTools.length > 0) {
|
||||
if (isToolEnabled(toolName, resolvedAllowedTools, [])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const excludeUnlessExplicit = (toolName: ToolName): void => {
|
||||
if (!isExplicitlyEnabled(toolName)) {
|
||||
extraExcludes.push(toolName);
|
||||
}
|
||||
};
|
||||
|
||||
if (
|
||||
!interactive &&
|
||||
!argv.experimentalAcp &&
|
||||
@@ -882,15 +782,12 @@ export async function loadCliConfig(
|
||||
switch (approvalMode) {
|
||||
case ApprovalMode.PLAN:
|
||||
case ApprovalMode.DEFAULT:
|
||||
// In default non-interactive mode, all tools that require approval are excluded,
|
||||
// unless explicitly enabled via coreTools/allowedTools.
|
||||
excludeUnlessExplicit(ShellTool.Name as ToolName);
|
||||
excludeUnlessExplicit(EditTool.Name as ToolName);
|
||||
excludeUnlessExplicit(WriteFileTool.Name as ToolName);
|
||||
// In default non-interactive mode, all tools that require approval are excluded.
|
||||
extraExcludes.push(ShellTool.Name, EditTool.Name, WriteFileTool.Name);
|
||||
break;
|
||||
case ApprovalMode.AUTO_EDIT:
|
||||
// In auto-edit non-interactive mode, only tools that still require a prompt are excluded.
|
||||
excludeUnlessExplicit(ShellTool.Name as ToolName);
|
||||
extraExcludes.push(ShellTool.Name);
|
||||
break;
|
||||
case ApprovalMode.YOLO:
|
||||
// No extra excludes for YOLO mode.
|
||||
@@ -903,60 +800,43 @@ export async function loadCliConfig(
|
||||
|
||||
const excludeTools = mergeExcludeTools(
|
||||
settings,
|
||||
activeExtensions,
|
||||
extraExcludes.length > 0 ? extraExcludes : undefined,
|
||||
argv.excludeTools,
|
||||
);
|
||||
const blockedMcpServers: Array<{ name: string; extensionName: string }> = [];
|
||||
|
||||
if (!argv.allowedMcpServerNames) {
|
||||
if (settings.mcp?.allowed) {
|
||||
mcpServers = allowedMcpServers(
|
||||
mcpServers,
|
||||
settings.mcp.allowed,
|
||||
blockedMcpServers,
|
||||
);
|
||||
}
|
||||
|
||||
if (settings.mcp?.excluded) {
|
||||
const excludedNames = new Set(settings.mcp.excluded.filter(Boolean));
|
||||
if (excludedNames.size > 0) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(([key]) => !excludedNames.has(key)),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (argv.allowedMcpServerNames) {
|
||||
mcpServers = allowedMcpServers(
|
||||
mcpServers,
|
||||
argv.allowedMcpServerNames,
|
||||
blockedMcpServers,
|
||||
);
|
||||
}
|
||||
const allowedMcpServers = argv.allowedMcpServerNames
|
||||
? new Set(argv.allowedMcpServerNames.filter(Boolean))
|
||||
: settings.mcp?.allowed
|
||||
? new Set(settings.mcp.allowed.filter(Boolean))
|
||||
: undefined;
|
||||
const excludedMcpServers = settings.mcp?.excluded
|
||||
? new Set(settings.mcp.excluded.filter(Boolean))
|
||||
: undefined;
|
||||
|
||||
const selectedAuthType =
|
||||
(argv.authType as AuthType | undefined) ||
|
||||
settings.security?.auth?.selectedType ||
|
||||
/* getAuthTypeFromEnv means no authType was explicitly provided, we infer the authType from env vars */
|
||||
getAuthTypeFromEnv();
|
||||
settings.security?.auth?.selectedType;
|
||||
|
||||
// Unified resolution of generation config with source attribution
|
||||
const resolvedCliConfig = resolveCliGenerationConfig({
|
||||
argv: {
|
||||
model: argv.model,
|
||||
openaiApiKey: argv.openaiApiKey,
|
||||
openaiBaseUrl: argv.openaiBaseUrl,
|
||||
openaiLogging: argv.openaiLogging,
|
||||
openaiLoggingDir: argv.openaiLoggingDir,
|
||||
},
|
||||
settings,
|
||||
selectedAuthType,
|
||||
env: process.env as Record<string, string | undefined>,
|
||||
});
|
||||
|
||||
const { model: resolvedModel } = resolvedCliConfig;
|
||||
const apiKey =
|
||||
(selectedAuthType === AuthType.USE_OPENAI
|
||||
? argv.openaiApiKey ||
|
||||
process.env['OPENAI_API_KEY'] ||
|
||||
settings.security?.auth?.apiKey
|
||||
: '') || '';
|
||||
const baseUrl =
|
||||
(selectedAuthType === AuthType.USE_OPENAI
|
||||
? argv.openaiBaseUrl ||
|
||||
process.env['OPENAI_BASE_URL'] ||
|
||||
settings.security?.auth?.baseUrl
|
||||
: '') || '';
|
||||
const resolvedModel =
|
||||
argv.model ||
|
||||
(selectedAuthType === AuthType.USE_OPENAI
|
||||
? process.env['OPENAI_MODEL'] ||
|
||||
process.env['QWEN_MODEL'] ||
|
||||
settings.model?.name
|
||||
: '') ||
|
||||
'';
|
||||
|
||||
const sandboxConfig = await loadSandboxConfig(settings, argv);
|
||||
const screenReader =
|
||||
@@ -990,8 +870,6 @@ export async function loadCliConfig(
|
||||
}
|
||||
}
|
||||
|
||||
const modelProvidersConfig = settings.modelProviders;
|
||||
|
||||
return new Config({
|
||||
sessionId,
|
||||
sessionData,
|
||||
@@ -1001,6 +879,8 @@ export async function loadCliConfig(
|
||||
includeDirectories,
|
||||
loadMemoryFromIncludeDirectories:
|
||||
settings.context?.loadMemoryFromIncludeDirectories || false,
|
||||
importFormat: settings.context?.importFormat || 'tree',
|
||||
discoveryMaxDirs: settings.context?.discoveryMaxDirs || 200,
|
||||
debugMode,
|
||||
question,
|
||||
fullContext: argv.allFiles || false,
|
||||
@@ -1010,9 +890,13 @@ export async function loadCliConfig(
|
||||
toolDiscoveryCommand: settings.tools?.discoveryCommand,
|
||||
toolCallCommand: settings.tools?.callCommand,
|
||||
mcpServerCommand: settings.mcp?.serverCommand,
|
||||
mcpServers,
|
||||
userMemory: memoryContent,
|
||||
geminiMdFileCount: fileCount,
|
||||
mcpServers: settings.mcpServers || {},
|
||||
allowedMcpServers: allowedMcpServers
|
||||
? Array.from(allowedMcpServers)
|
||||
: undefined,
|
||||
excludedMcpServers: excludedMcpServers
|
||||
? Array.from(excludedMcpServers)
|
||||
: undefined,
|
||||
approvalMode,
|
||||
showMemoryUsage:
|
||||
argv.showMemoryUsage || settings.ui?.showMemoryUsage || false,
|
||||
@@ -1035,25 +919,37 @@ export async function loadCliConfig(
|
||||
fileDiscoveryService: fileService,
|
||||
bugCommand: settings.advanced?.bugCommand,
|
||||
model: resolvedModel,
|
||||
extensionContextFilePaths,
|
||||
outputLanguageFilePath,
|
||||
sessionTokenLimit: settings.model?.sessionTokenLimit ?? -1,
|
||||
maxSessionTurns:
|
||||
argv.maxSessionTurns ?? settings.model?.maxSessionTurns ?? -1,
|
||||
experimentalZedIntegration: argv.acp || argv.experimentalAcp || false,
|
||||
experimentalZedIntegration: argv.experimentalAcp || false,
|
||||
experimentalSkills: argv.experimentalSkills || false,
|
||||
listExtensions: argv.listExtensions || false,
|
||||
extensions: allExtensions,
|
||||
blockedMcpServers,
|
||||
overrideExtensions: overrideExtensions || argv.extensions,
|
||||
noBrowser: !!process.env['NO_BROWSER'],
|
||||
authType: selectedAuthType,
|
||||
inputFormat,
|
||||
outputFormat,
|
||||
includePartialMessages,
|
||||
modelProvidersConfig,
|
||||
generationConfigSources: resolvedCliConfig.sources,
|
||||
generationConfig: resolvedCliConfig.generationConfig,
|
||||
generationConfig: {
|
||||
...(settings.model?.generationConfig || {}),
|
||||
model: resolvedModel,
|
||||
apiKey,
|
||||
baseUrl,
|
||||
enableOpenAILogging:
|
||||
(typeof argv.openaiLogging === 'undefined'
|
||||
? settings.model?.enableOpenAILogging
|
||||
: argv.openaiLogging) ?? false,
|
||||
openAILoggingDir:
|
||||
argv.openaiLoggingDir || settings.model?.openAILoggingDir,
|
||||
},
|
||||
cliVersion: await getCliVersion(),
|
||||
webSearch: buildWebSearchConfig(argv, settings, selectedAuthType),
|
||||
webSearch: buildWebSearchConfig(
|
||||
argv,
|
||||
settings,
|
||||
settings.security?.auth?.selectedType,
|
||||
),
|
||||
summarizeToolOutput: settings.model?.summarizeToolOutput,
|
||||
ideMode,
|
||||
chatCompression: settings.model?.chatCompression,
|
||||
@@ -1085,61 +981,8 @@ export async function loadCliConfig(
|
||||
});
|
||||
}
|
||||
|
||||
function allowedMcpServers(
|
||||
mcpServers: { [x: string]: MCPServerConfig },
|
||||
allowMCPServers: string[],
|
||||
blockedMcpServers: Array<{ name: string; extensionName: string }>,
|
||||
) {
|
||||
const allowedNames = new Set(allowMCPServers.filter(Boolean));
|
||||
if (allowedNames.size > 0) {
|
||||
mcpServers = Object.fromEntries(
|
||||
Object.entries(mcpServers).filter(([key, server]) => {
|
||||
const isAllowed = allowedNames.has(key);
|
||||
if (!isAllowed) {
|
||||
blockedMcpServers.push({
|
||||
name: key,
|
||||
extensionName: server.extensionName || '',
|
||||
});
|
||||
}
|
||||
return isAllowed;
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
blockedMcpServers.push(
|
||||
...Object.entries(mcpServers).map(([key, server]) => ({
|
||||
name: key,
|
||||
extensionName: server.extensionName || '',
|
||||
})),
|
||||
);
|
||||
mcpServers = {};
|
||||
}
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
function mergeMcpServers(settings: Settings, extensions: Extension[]) {
|
||||
const mcpServers = { ...(settings.mcpServers || {}) };
|
||||
for (const extension of extensions) {
|
||||
Object.entries(extension.config.mcpServers || {}).forEach(
|
||||
([key, server]) => {
|
||||
if (mcpServers[key]) {
|
||||
logger.warn(
|
||||
`Skipping extension MCP config for server with key "${key}" as it already exists.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
mcpServers[key] = {
|
||||
...server,
|
||||
extensionName: extension.config.name,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
return mcpServers;
|
||||
}
|
||||
|
||||
function mergeExcludeTools(
|
||||
settings: Settings,
|
||||
extensions: Extension[],
|
||||
extraExcludes?: string[] | undefined,
|
||||
cliExcludeTools?: string[] | undefined,
|
||||
): string[] {
|
||||
@@ -1148,10 +991,5 @@ function mergeExcludeTools(
|
||||
...(settings.tools?.exclude || []),
|
||||
...(extraExcludes || []),
|
||||
]);
|
||||
for (const extension of extensions) {
|
||||
for (const tool of extension.config.excludeTools || []) {
|
||||
allExcludeTools.add(tool);
|
||||
}
|
||||
}
|
||||
return [...allExcludeTools];
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,786 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type {
|
||||
MCPServerConfig,
|
||||
GeminiCLIExtension,
|
||||
ExtensionInstallMetadata,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
QWEN_DIR,
|
||||
Storage,
|
||||
Config,
|
||||
ExtensionInstallEvent,
|
||||
ExtensionUninstallEvent,
|
||||
ExtensionDisableEvent,
|
||||
ExtensionEnableEvent,
|
||||
logExtensionEnable,
|
||||
logExtensionInstallEvent,
|
||||
logExtensionUninstall,
|
||||
logExtensionDisable,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { SettingScope, loadSettings } from '../config/settings.js';
|
||||
import { getErrorMessage } from '../utils/errors.js';
|
||||
import { recursivelyHydrateStrings } from './extensions/variables.js';
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
|
||||
import {
|
||||
cloneFromGit,
|
||||
downloadFromGitHubRelease,
|
||||
} from './extensions/github.js';
|
||||
import type { LoadExtensionContext } from './extensions/variableSchema.js';
|
||||
import { ExtensionEnablementManager } from './extensions/extensionEnablement.js';
|
||||
import chalk from 'chalk';
|
||||
import type { ConfirmationRequest } from '../ui/types.js';
|
||||
|
||||
export const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
|
||||
|
||||
export const EXTENSIONS_CONFIG_FILENAME = 'qwen-extension.json';
|
||||
export const INSTALL_METADATA_FILENAME = '.qwen-extension-install.json';
|
||||
|
||||
export interface Extension {
|
||||
path: string;
|
||||
config: ExtensionConfig;
|
||||
contextFiles: string[];
|
||||
installMetadata?: ExtensionInstallMetadata | undefined;
|
||||
}
|
||||
|
||||
export interface ExtensionConfig {
|
||||
name: string;
|
||||
version: string;
|
||||
mcpServers?: Record<string, MCPServerConfig>;
|
||||
contextFileName?: string | string[];
|
||||
excludeTools?: string[];
|
||||
}
|
||||
|
||||
export interface ExtensionUpdateInfo {
|
||||
name: string;
|
||||
originalVersion: string;
|
||||
updatedVersion: string;
|
||||
}
|
||||
|
||||
export class ExtensionStorage {
|
||||
private readonly extensionName: string;
|
||||
|
||||
constructor(extensionName: string) {
|
||||
this.extensionName = extensionName;
|
||||
}
|
||||
|
||||
getExtensionDir(): string {
|
||||
return path.join(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
this.extensionName,
|
||||
);
|
||||
}
|
||||
|
||||
getConfigPath(): string {
|
||||
return path.join(this.getExtensionDir(), EXTENSIONS_CONFIG_FILENAME);
|
||||
}
|
||||
|
||||
static getUserExtensionsDir(): string {
|
||||
const storage = new Storage(os.homedir());
|
||||
return storage.getExtensionsDir();
|
||||
}
|
||||
|
||||
static async createTmpDir(): Promise<string> {
|
||||
return await fs.promises.mkdtemp(path.join(os.tmpdir(), 'qwen-extension'));
|
||||
}
|
||||
}
|
||||
|
||||
export function getWorkspaceExtensions(workspaceDir: string): Extension[] {
|
||||
// If the workspace dir is the user extensions dir, there are no workspace extensions.
|
||||
if (path.resolve(workspaceDir) === path.resolve(os.homedir())) {
|
||||
return [];
|
||||
}
|
||||
return loadExtensionsFromDir(workspaceDir);
|
||||
}
|
||||
|
||||
export async function copyExtension(
|
||||
source: string,
|
||||
destination: string,
|
||||
): Promise<void> {
|
||||
await fs.promises.cp(source, destination, { recursive: true });
|
||||
}
|
||||
|
||||
export async function performWorkspaceExtensionMigration(
|
||||
extensions: Extension[],
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
): Promise<string[]> {
|
||||
const failedInstallNames: string[] = [];
|
||||
|
||||
for (const extension of extensions) {
|
||||
try {
|
||||
const installMetadata: ExtensionInstallMetadata = {
|
||||
source: extension.path,
|
||||
type: 'local',
|
||||
};
|
||||
await installExtension(installMetadata, requestConsent);
|
||||
} catch (_) {
|
||||
failedInstallNames.push(extension.config.name);
|
||||
}
|
||||
}
|
||||
return failedInstallNames;
|
||||
}
|
||||
|
||||
function getTelemetryConfig(cwd: string) {
|
||||
const settings = loadSettings(cwd);
|
||||
const config = new Config({
|
||||
telemetry: settings.merged.telemetry,
|
||||
interactive: false,
|
||||
targetDir: cwd,
|
||||
cwd,
|
||||
model: '',
|
||||
debugMode: false,
|
||||
});
|
||||
return config;
|
||||
}
|
||||
|
||||
export function loadExtensions(
|
||||
extensionEnablementManager: ExtensionEnablementManager,
|
||||
workspaceDir: string = process.cwd(),
|
||||
): Extension[] {
|
||||
const settings = loadSettings(workspaceDir).merged;
|
||||
const allExtensions = [...loadUserExtensions()];
|
||||
|
||||
if (
|
||||
(isWorkspaceTrusted(settings) ?? true) &&
|
||||
// Default management setting to true
|
||||
!(settings.experimental?.extensionManagement ?? true)
|
||||
) {
|
||||
allExtensions.push(...getWorkspaceExtensions(workspaceDir));
|
||||
}
|
||||
|
||||
const uniqueExtensions = new Map<string, Extension>();
|
||||
|
||||
for (const extension of allExtensions) {
|
||||
if (
|
||||
!uniqueExtensions.has(extension.config.name) &&
|
||||
extensionEnablementManager.isEnabled(extension.config.name, workspaceDir)
|
||||
) {
|
||||
uniqueExtensions.set(extension.config.name, extension);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(uniqueExtensions.values());
|
||||
}
|
||||
|
||||
export function loadUserExtensions(): Extension[] {
|
||||
const userExtensions = loadExtensionsFromDir(os.homedir());
|
||||
|
||||
const uniqueExtensions = new Map<string, Extension>();
|
||||
for (const extension of userExtensions) {
|
||||
if (!uniqueExtensions.has(extension.config.name)) {
|
||||
uniqueExtensions.set(extension.config.name, extension);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(uniqueExtensions.values());
|
||||
}
|
||||
|
||||
export function loadExtensionsFromDir(dir: string): Extension[] {
|
||||
const storage = new Storage(dir);
|
||||
const extensionsDir = storage.getExtensionsDir();
|
||||
if (!fs.existsSync(extensionsDir)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const extensions: Extension[] = [];
|
||||
for (const subdir of fs.readdirSync(extensionsDir)) {
|
||||
const extensionDir = path.join(extensionsDir, subdir);
|
||||
|
||||
const extension = loadExtension({ extensionDir, workspaceDir: dir });
|
||||
if (extension != null) {
|
||||
extensions.push(extension);
|
||||
}
|
||||
}
|
||||
return extensions;
|
||||
}
|
||||
|
||||
export function loadExtension(context: LoadExtensionContext): Extension | null {
|
||||
const { extensionDir, workspaceDir } = context;
|
||||
if (!fs.statSync(extensionDir).isDirectory()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const installMetadata = loadInstallMetadata(extensionDir);
|
||||
let effectiveExtensionPath = extensionDir;
|
||||
|
||||
if (installMetadata?.type === 'link') {
|
||||
effectiveExtensionPath = installMetadata.source;
|
||||
}
|
||||
|
||||
try {
|
||||
let config = loadExtensionConfig({
|
||||
extensionDir: effectiveExtensionPath,
|
||||
workspaceDir,
|
||||
});
|
||||
|
||||
config = resolveEnvVarsInObject(config);
|
||||
|
||||
if (config.mcpServers) {
|
||||
config.mcpServers = Object.fromEntries(
|
||||
Object.entries(config.mcpServers).map(([key, value]) => [
|
||||
key,
|
||||
filterMcpConfig(value),
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
const contextFiles = getContextFileNames(config)
|
||||
.map((contextFileName) =>
|
||||
path.join(effectiveExtensionPath, contextFileName),
|
||||
)
|
||||
.filter((contextFilePath) => fs.existsSync(contextFilePath));
|
||||
|
||||
return {
|
||||
path: effectiveExtensionPath,
|
||||
config,
|
||||
contextFiles,
|
||||
installMetadata,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(
|
||||
`Warning: Skipping extension in ${effectiveExtensionPath}: ${getErrorMessage(
|
||||
e,
|
||||
)}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function loadExtensionByName(
|
||||
name: string,
|
||||
workspaceDir: string = process.cwd(),
|
||||
): Extension | null {
|
||||
const userExtensionsDir = ExtensionStorage.getUserExtensionsDir();
|
||||
if (!fs.existsSync(userExtensionsDir)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (const subdir of fs.readdirSync(userExtensionsDir)) {
|
||||
const extensionDir = path.join(userExtensionsDir, subdir);
|
||||
if (!fs.statSync(extensionDir).isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const extension = loadExtension({ extensionDir, workspaceDir });
|
||||
if (
|
||||
extension &&
|
||||
extension.config.name.toLowerCase() === name.toLowerCase()
|
||||
) {
|
||||
return extension;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function filterMcpConfig(original: MCPServerConfig): MCPServerConfig {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { trust, ...rest } = original;
|
||||
return Object.freeze(rest);
|
||||
}
|
||||
|
||||
export function loadInstallMetadata(
|
||||
extensionDir: string,
|
||||
): ExtensionInstallMetadata | undefined {
|
||||
const metadataFilePath = path.join(extensionDir, INSTALL_METADATA_FILENAME);
|
||||
try {
|
||||
const configContent = fs.readFileSync(metadataFilePath, 'utf-8');
|
||||
const metadata = JSON.parse(configContent) as ExtensionInstallMetadata;
|
||||
return metadata;
|
||||
} catch (_e) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function getContextFileNames(config: ExtensionConfig): string[] {
|
||||
if (!config.contextFileName) {
|
||||
return ['QWEN.md'];
|
||||
} else if (!Array.isArray(config.contextFileName)) {
|
||||
return [config.contextFileName];
|
||||
}
|
||||
return config.contextFileName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an annotated list of extensions. If an extension is listed in enabledExtensionNames, it will be active.
|
||||
* If enabledExtensionNames is empty, an extension is active unless it is disabled.
|
||||
* @param extensions The base list of extensions.
|
||||
* @param enabledExtensionNames The names of explicitly enabled extensions.
|
||||
* @param workspaceDir The current workspace directory.
|
||||
*/
|
||||
export function annotateActiveExtensions(
|
||||
extensions: Extension[],
|
||||
workspaceDir: string,
|
||||
manager: ExtensionEnablementManager,
|
||||
): GeminiCLIExtension[] {
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
return extensions.map((extension) => ({
|
||||
name: extension.config.name,
|
||||
version: extension.config.version,
|
||||
isActive: manager.isEnabled(extension.config.name, workspaceDir),
|
||||
path: extension.path,
|
||||
installMetadata: extension.installMetadata,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, by reading a Y/n
|
||||
* character from stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentNonInteractive(
|
||||
consentDescription: string,
|
||||
): Promise<boolean> {
|
||||
console.info(consentDescription);
|
||||
const result = await promptForConsentNonInteractive(
|
||||
'Do you want to continue? [Y/n]: ',
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to perform an action, in interactive mode.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will not work.
|
||||
*
|
||||
* @param consentDescription The description of the thing they will be consenting to.
|
||||
* @param setExtensionUpdateConfirmationRequest A function to actually add a prompt to the UI.
|
||||
* @returns boolean, whether they consented or not.
|
||||
*/
|
||||
export async function requestConsentInteractive(
|
||||
consentDescription: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return await promptForConsentInteractive(
|
||||
consentDescription + '\n\nDo you want to continue?',
|
||||
addExtensionUpdateConfirmationRequest,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users a prompt and awaits for a y/n response on stdin.
|
||||
*
|
||||
* This should not be called from interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A yes/no prompt to ask the user
|
||||
* @returns Whether or not the user answers 'y' (yes). Defaults to 'yes' on enter.
|
||||
*/
|
||||
async function promptForConsentNonInteractive(
|
||||
prompt: string,
|
||||
): Promise<boolean> {
|
||||
const readline = await import('node:readline');
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
rl.question(prompt, (answer) => {
|
||||
rl.close();
|
||||
resolve(['y', ''].includes(answer.trim().toLowerCase()));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks users an interactive yes/no prompt.
|
||||
*
|
||||
* This should not be called from non-interactive mode as it will break the CLI.
|
||||
*
|
||||
* @param prompt A markdown prompt to ask the user
|
||||
* @param setExtensionUpdateConfirmationRequest Function to update the UI state with the confirmation request.
|
||||
* @returns Whether or not the user answers yes.
|
||||
*/
|
||||
async function promptForConsentInteractive(
|
||||
prompt: string,
|
||||
addExtensionUpdateConfirmationRequest: (value: ConfirmationRequest) => void,
|
||||
): Promise<boolean> {
|
||||
return await new Promise<boolean>((resolve) => {
|
||||
addExtensionUpdateConfirmationRequest({
|
||||
prompt,
|
||||
onConfirm: (resolvedConfirmed) => {
|
||||
resolve(resolvedConfirmed);
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function installExtension(
|
||||
installMetadata: ExtensionInstallMetadata,
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
cwd: string = process.cwd(),
|
||||
previousExtensionConfig?: ExtensionConfig,
|
||||
): Promise<string> {
|
||||
const telemetryConfig = getTelemetryConfig(cwd);
|
||||
let newExtensionConfig: ExtensionConfig | null = null;
|
||||
let localSourcePath: string | undefined;
|
||||
|
||||
try {
|
||||
const settings = loadSettings(cwd).merged;
|
||||
if (!isWorkspaceTrusted(settings)) {
|
||||
throw new Error(
|
||||
`Could not install extension from untrusted folder at ${installMetadata.source}`,
|
||||
);
|
||||
}
|
||||
|
||||
const extensionsDir = ExtensionStorage.getUserExtensionsDir();
|
||||
await fs.promises.mkdir(extensionsDir, { recursive: true });
|
||||
|
||||
if (
|
||||
!path.isAbsolute(installMetadata.source) &&
|
||||
(installMetadata.type === 'local' || installMetadata.type === 'link')
|
||||
) {
|
||||
installMetadata.source = path.resolve(cwd, installMetadata.source);
|
||||
}
|
||||
|
||||
let tempDir: string | undefined;
|
||||
|
||||
if (
|
||||
installMetadata.type === 'git' ||
|
||||
installMetadata.type === 'github-release'
|
||||
) {
|
||||
tempDir = await ExtensionStorage.createTmpDir();
|
||||
try {
|
||||
const result = await downloadFromGitHubRelease(
|
||||
installMetadata,
|
||||
tempDir,
|
||||
);
|
||||
installMetadata.type = result.type;
|
||||
installMetadata.releaseTag = result.tagName;
|
||||
} catch (_error) {
|
||||
await cloneFromGit(installMetadata, tempDir);
|
||||
installMetadata.type = 'git';
|
||||
}
|
||||
localSourcePath = tempDir;
|
||||
} else if (
|
||||
installMetadata.type === 'local' ||
|
||||
installMetadata.type === 'link'
|
||||
) {
|
||||
localSourcePath = installMetadata.source;
|
||||
} else {
|
||||
throw new Error(`Unsupported install type: ${installMetadata.type}`);
|
||||
}
|
||||
|
||||
try {
|
||||
newExtensionConfig = loadExtensionConfig({
|
||||
extensionDir: localSourcePath,
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
|
||||
const newExtensionName = newExtensionConfig.name;
|
||||
const extensionStorage = new ExtensionStorage(newExtensionName);
|
||||
const destinationPath = extensionStorage.getExtensionDir();
|
||||
|
||||
const installedExtensions = loadUserExtensions();
|
||||
if (
|
||||
installedExtensions.some(
|
||||
(installed) => installed.config.name === newExtensionName,
|
||||
)
|
||||
) {
|
||||
throw new Error(
|
||||
`Extension "${newExtensionName}" is already installed. Please uninstall it first.`,
|
||||
);
|
||||
}
|
||||
await maybeRequestConsentOrFail(
|
||||
newExtensionConfig,
|
||||
requestConsent,
|
||||
previousExtensionConfig,
|
||||
);
|
||||
await fs.promises.mkdir(destinationPath, { recursive: true });
|
||||
|
||||
if (
|
||||
installMetadata.type === 'local' ||
|
||||
installMetadata.type === 'git' ||
|
||||
installMetadata.type === 'github-release'
|
||||
) {
|
||||
await copyExtension(localSourcePath, destinationPath);
|
||||
}
|
||||
|
||||
const metadataString = JSON.stringify(installMetadata, null, 2);
|
||||
const metadataPath = path.join(
|
||||
destinationPath,
|
||||
INSTALL_METADATA_FILENAME,
|
||||
);
|
||||
await fs.promises.writeFile(metadataPath, metadataString);
|
||||
} finally {
|
||||
if (tempDir) {
|
||||
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
logExtensionInstallEvent(
|
||||
telemetryConfig,
|
||||
new ExtensionInstallEvent(
|
||||
newExtensionConfig!.name,
|
||||
newExtensionConfig!.version,
|
||||
installMetadata.source,
|
||||
'success',
|
||||
),
|
||||
);
|
||||
|
||||
enableExtension(newExtensionConfig!.name, SettingScope.User);
|
||||
return newExtensionConfig!.name;
|
||||
} catch (error) {
|
||||
// Attempt to load config from the source path even if installation fails
|
||||
// to get the name and version for logging.
|
||||
if (!newExtensionConfig && localSourcePath) {
|
||||
try {
|
||||
newExtensionConfig = loadExtensionConfig({
|
||||
extensionDir: localSourcePath,
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
} catch {
|
||||
// Ignore error, this is just for logging.
|
||||
}
|
||||
}
|
||||
logExtensionInstallEvent(
|
||||
telemetryConfig,
|
||||
new ExtensionInstallEvent(
|
||||
newExtensionConfig?.name ?? '',
|
||||
newExtensionConfig?.version ?? '',
|
||||
installMetadata.source,
|
||||
'error',
|
||||
),
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a consent string for installing an extension based on it's
|
||||
* extensionConfig.
|
||||
*/
|
||||
function extensionConsentString(extensionConfig: ExtensionConfig): string {
|
||||
const output: string[] = [];
|
||||
const mcpServerEntries = Object.entries(extensionConfig.mcpServers || {});
|
||||
output.push(`Installing extension "${extensionConfig.name}".`);
|
||||
output.push(
|
||||
'**Extensions may introduce unexpected behavior. Ensure you have investigated the extension source and trust the author.**',
|
||||
);
|
||||
|
||||
if (mcpServerEntries.length) {
|
||||
output.push('This extension will run the following MCP servers:');
|
||||
for (const [key, mcpServer] of mcpServerEntries) {
|
||||
const isLocal = !!mcpServer.command;
|
||||
const source =
|
||||
mcpServer.httpUrl ??
|
||||
`${mcpServer.command || ''}${mcpServer.args ? ' ' + mcpServer.args.join(' ') : ''}`;
|
||||
output.push(` * ${key} (${isLocal ? 'local' : 'remote'}): ${source}`);
|
||||
}
|
||||
}
|
||||
if (extensionConfig.contextFileName) {
|
||||
output.push(
|
||||
`This extension will append info to your QWEN.md context using ${extensionConfig.contextFileName}`,
|
||||
);
|
||||
}
|
||||
if (extensionConfig.excludeTools) {
|
||||
output.push(
|
||||
`This extension will exclude the following core tools: ${extensionConfig.excludeTools}`,
|
||||
);
|
||||
}
|
||||
return output.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests consent from the user to install an extension (extensionConfig), if
|
||||
* there is any difference between the consent string for `extensionConfig` and
|
||||
* `previousExtensionConfig`.
|
||||
*
|
||||
* Always requests consent if previousExtensionConfig is null.
|
||||
*
|
||||
* Throws if the user does not consent.
|
||||
*/
|
||||
async function maybeRequestConsentOrFail(
|
||||
extensionConfig: ExtensionConfig,
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
previousExtensionConfig?: ExtensionConfig,
|
||||
) {
|
||||
const extensionConsent = extensionConsentString(extensionConfig);
|
||||
if (previousExtensionConfig) {
|
||||
const previousExtensionConsent = extensionConsentString(
|
||||
previousExtensionConfig,
|
||||
);
|
||||
if (previousExtensionConsent === extensionConsent) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!(await requestConsent(extensionConsent))) {
|
||||
throw new Error(`Installation cancelled for "${extensionConfig.name}".`);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateName(name: string) {
|
||||
if (!/^[a-zA-Z0-9-]+$/.test(name)) {
|
||||
throw new Error(
|
||||
`Invalid extension name: "${name}". Only letters (a-z, A-Z), numbers (0-9), and dashes (-) are allowed.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function loadExtensionConfig(
|
||||
context: LoadExtensionContext,
|
||||
): ExtensionConfig {
|
||||
const { extensionDir, workspaceDir } = context;
|
||||
const configFilePath = path.join(extensionDir, EXTENSIONS_CONFIG_FILENAME);
|
||||
if (!fs.existsSync(configFilePath)) {
|
||||
throw new Error(`Configuration file not found at ${configFilePath}`);
|
||||
}
|
||||
try {
|
||||
const configContent = fs.readFileSync(configFilePath, 'utf-8');
|
||||
const config = recursivelyHydrateStrings(JSON.parse(configContent), {
|
||||
extensionPath: extensionDir,
|
||||
workspacePath: workspaceDir,
|
||||
'/': path.sep,
|
||||
pathSeparator: path.sep,
|
||||
}) as unknown as ExtensionConfig;
|
||||
if (!config.name || !config.version) {
|
||||
throw new Error(
|
||||
`Invalid configuration in ${configFilePath}: missing ${!config.name ? '"name"' : '"version"'}`,
|
||||
);
|
||||
}
|
||||
validateName(config.name);
|
||||
return config;
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Failed to load extension config from ${configFilePath}: ${getErrorMessage(
|
||||
e,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function uninstallExtension(
|
||||
extensionIdentifier: string,
|
||||
cwd: string = process.cwd(),
|
||||
): Promise<void> {
|
||||
const telemetryConfig = getTelemetryConfig(cwd);
|
||||
const installedExtensions = loadUserExtensions();
|
||||
const extensionName = installedExtensions.find(
|
||||
(installed) =>
|
||||
installed.config.name.toLowerCase() ===
|
||||
extensionIdentifier.toLowerCase() ||
|
||||
installed.installMetadata?.source.toLowerCase() ===
|
||||
extensionIdentifier.toLowerCase(),
|
||||
)?.config.name;
|
||||
if (!extensionName) {
|
||||
throw new Error(`Extension not found.`);
|
||||
}
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
[extensionName],
|
||||
);
|
||||
manager.remove(extensionName);
|
||||
const storage = new ExtensionStorage(extensionName);
|
||||
|
||||
await fs.promises.rm(storage.getExtensionDir(), {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
logExtensionUninstall(
|
||||
telemetryConfig,
|
||||
new ExtensionUninstallEvent(extensionName, 'success'),
|
||||
);
|
||||
}
|
||||
|
||||
export function toOutputString(
|
||||
extension: Extension,
|
||||
workspaceDir: string,
|
||||
): string {
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
);
|
||||
const userEnabled = manager.isEnabled(extension.config.name, os.homedir());
|
||||
const workspaceEnabled = manager.isEnabled(
|
||||
extension.config.name,
|
||||
workspaceDir,
|
||||
);
|
||||
|
||||
const status = workspaceEnabled ? chalk.green('✓') : chalk.red('✗');
|
||||
let output = `${status} ${extension.config.name} (${extension.config.version})`;
|
||||
output += `\n Path: ${extension.path}`;
|
||||
if (extension.installMetadata) {
|
||||
output += `\n Source: ${extension.installMetadata.source} (Type: ${extension.installMetadata.type})`;
|
||||
if (extension.installMetadata.ref) {
|
||||
output += `\n Ref: ${extension.installMetadata.ref}`;
|
||||
}
|
||||
if (extension.installMetadata.releaseTag) {
|
||||
output += `\n Release tag: ${extension.installMetadata.releaseTag}`;
|
||||
}
|
||||
}
|
||||
output += `\n Enabled (User): ${userEnabled}`;
|
||||
output += `\n Enabled (Workspace): ${workspaceEnabled}`;
|
||||
if (extension.contextFiles.length > 0) {
|
||||
output += `\n Context files:`;
|
||||
extension.contextFiles.forEach((contextFile) => {
|
||||
output += `\n ${contextFile}`;
|
||||
});
|
||||
}
|
||||
if (extension.config.mcpServers) {
|
||||
output += `\n MCP servers:`;
|
||||
Object.keys(extension.config.mcpServers).forEach((key) => {
|
||||
output += `\n ${key}`;
|
||||
});
|
||||
}
|
||||
if (extension.config.excludeTools) {
|
||||
output += `\n Excluded tools:`;
|
||||
extension.config.excludeTools.forEach((tool) => {
|
||||
output += `\n ${tool}`;
|
||||
});
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
export function disableExtension(
|
||||
name: string,
|
||||
scope: SettingScope,
|
||||
cwd: string = process.cwd(),
|
||||
) {
|
||||
const config = getTelemetryConfig(cwd);
|
||||
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
|
||||
throw new Error('System and SystemDefaults scopes are not supported.');
|
||||
}
|
||||
const extension = loadExtensionByName(name, cwd);
|
||||
if (!extension) {
|
||||
throw new Error(`Extension with name ${name} does not exist.`);
|
||||
}
|
||||
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
[name],
|
||||
);
|
||||
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
|
||||
manager.disable(name, true, scopePath);
|
||||
logExtensionDisable(config, new ExtensionDisableEvent(name, scope));
|
||||
}
|
||||
|
||||
export function enableExtension(
|
||||
name: string,
|
||||
scope: SettingScope,
|
||||
cwd: string = process.cwd(),
|
||||
) {
|
||||
if (scope === SettingScope.System || scope === SettingScope.SystemDefaults) {
|
||||
throw new Error('System and SystemDefaults scopes are not supported.');
|
||||
}
|
||||
const extension = loadExtensionByName(name, cwd);
|
||||
if (!extension) {
|
||||
throw new Error(`Extension with name ${name} does not exist.`);
|
||||
}
|
||||
const manager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
);
|
||||
const scopePath = scope === SettingScope.Workspace ? cwd : os.homedir();
|
||||
manager.enable(name, true, scopePath);
|
||||
const config = getTelemetryConfig(cwd);
|
||||
logExtensionEnable(config, new ExtensionEnableEvent(name, scope));
|
||||
}
|
||||
@@ -1,424 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
import { ExtensionEnablementManager, Override } from './extensionEnablement.js';
|
||||
import type { Extension } from '../extension.js';
|
||||
|
||||
// Helper to create a temporary directory for testing
|
||||
function createTestDir() {
|
||||
const dirPath = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-test-'));
|
||||
return {
|
||||
path: dirPath,
|
||||
cleanup: () => fs.rmSync(dirPath, { recursive: true, force: true }),
|
||||
};
|
||||
}
|
||||
|
||||
let testDir: { path: string; cleanup: () => void };
|
||||
let configDir: string;
|
||||
let manager: ExtensionEnablementManager;
|
||||
|
||||
describe('ExtensionEnablementManager', () => {
|
||||
beforeEach(() => {
|
||||
testDir = createTestDir();
|
||||
configDir = path.join(testDir.path, '.gemini');
|
||||
manager = new ExtensionEnablementManager(configDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testDir.cleanup();
|
||||
// Reset the singleton instance for test isolation
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(ExtensionEnablementManager as any).instance = undefined;
|
||||
});
|
||||
|
||||
describe('isEnabled', () => {
|
||||
it('should return true if extension is not configured', () => {
|
||||
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true if no overrides match', () => {
|
||||
manager.disable('ext-test', false, '/another/path');
|
||||
expect(manager.isEnabled('ext-test', '/any/path')).toBe(true);
|
||||
});
|
||||
|
||||
it('should enable a path based on an override rule', () => {
|
||||
manager.disable('ext-test', true, '/');
|
||||
manager.enable('ext-test', true, '/home/user/projects/');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should disable a path based on a disable override rule', () => {
|
||||
manager.enable('ext-test', true, '/');
|
||||
manager.disable('ext-test', true, '/home/user/projects/');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
it('should respect the last matching rule (enable wins)', () => {
|
||||
manager.disable('ext-test', true, '/home/user/projects/');
|
||||
manager.enable('ext-test', false, '/home/user/projects/my-app');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should respect the last matching rule (disable wins)', () => {
|
||||
manager.enable('ext-test', true, '/home/user/projects/');
|
||||
manager.disable('ext-test', false, '/home/user/projects/my-app');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle', () => {
|
||||
manager.enable('ext-test', true, '/home/user/projects');
|
||||
manager.disable('ext-test', false, '/home/user/projects/my-app');
|
||||
expect(manager.isEnabled('ext-test', '/home/user/projects/my-app')).toBe(
|
||||
false,
|
||||
);
|
||||
expect(
|
||||
manager.isEnabled('ext-test', '/home/user/projects/something-else'),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('includeSubdirs', () => {
|
||||
it('should add a glob when enabling with includeSubdirs', () => {
|
||||
manager.enable('ext-test', true, '/path/to/dir');
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should not add a glob when enabling without includeSubdirs', () => {
|
||||
manager.enable('ext-test', false, '/path/to/dir');
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should add a glob when disabling with includeSubdirs', () => {
|
||||
manager.disable('ext-test', true, '/path/to/dir');
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('!/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should remove conflicting glob rule when enabling without subdirs', () => {
|
||||
manager.enable('ext-test', true, '/path/to/dir'); // Adds /path/to/dir*
|
||||
manager.enable('ext-test', false, '/path/to/dir'); // Should remove the glob
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should remove conflicting non-glob rule when enabling with subdirs', () => {
|
||||
manager.enable('ext-test', false, '/path/to/dir'); // Adds /path/to/dir
|
||||
manager.enable('ext-test', true, '/path/to/dir'); // Should remove the non-glob
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('/path/to/dir/*');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/');
|
||||
});
|
||||
|
||||
it('should remove conflicting rules when disabling', () => {
|
||||
manager.enable('ext-test', true, '/path/to/dir'); // enabled with glob
|
||||
manager.disable('ext-test', false, '/path/to/dir'); // disabled without
|
||||
const config = manager.readConfig();
|
||||
expect(config['ext-test'].overrides).toContain('!/path/to/dir/');
|
||||
expect(config['ext-test'].overrides).not.toContain('/path/to/dir/*');
|
||||
});
|
||||
|
||||
it('should correctly evaluate isEnabled with subdirs', () => {
|
||||
manager.disable('ext-test', true, '/');
|
||||
manager.enable('ext-test', true, '/path/to/dir');
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir/')).toBe(true);
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir/sub/')).toBe(true);
|
||||
expect(manager.isEnabled('ext-test', '/path/to/another/')).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly evaluate isEnabled without subdirs', () => {
|
||||
manager.disable('ext-test', true, '/*');
|
||||
manager.enable('ext-test', false, '/path/to/dir');
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(true);
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir/sub')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('pruning child rules', () => {
|
||||
it('should remove child rules when enabling a parent with subdirs', () => {
|
||||
// Pre-existing rules for children
|
||||
manager.enable('ext-test', false, '/path/to/dir/subdir1');
|
||||
manager.disable('ext-test', true, '/path/to/dir/subdir2');
|
||||
manager.enable('ext-test', false, '/path/to/another/dir');
|
||||
|
||||
// Enable the parent directory
|
||||
manager.enable('ext-test', true, '/path/to/dir');
|
||||
|
||||
const config = manager.readConfig();
|
||||
const overrides = config['ext-test'].overrides;
|
||||
|
||||
// The new parent rule should be present
|
||||
expect(overrides).toContain(`/path/to/dir/*`);
|
||||
|
||||
// Child rules should be removed
|
||||
expect(overrides).not.toContain('/path/to/dir/subdir1/');
|
||||
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
|
||||
|
||||
// Unrelated rules should remain
|
||||
expect(overrides).toContain('/path/to/another/dir/');
|
||||
});
|
||||
|
||||
it('should remove child rules when disabling a parent with subdirs', () => {
|
||||
// Pre-existing rules for children
|
||||
manager.enable('ext-test', false, '/path/to/dir/subdir1');
|
||||
manager.disable('ext-test', true, '/path/to/dir/subdir2');
|
||||
manager.enable('ext-test', false, '/path/to/another/dir');
|
||||
|
||||
// Disable the parent directory
|
||||
manager.disable('ext-test', true, '/path/to/dir');
|
||||
|
||||
const config = manager.readConfig();
|
||||
const overrides = config['ext-test'].overrides;
|
||||
|
||||
// The new parent rule should be present
|
||||
expect(overrides).toContain(`!/path/to/dir/*`);
|
||||
|
||||
// Child rules should be removed
|
||||
expect(overrides).not.toContain('/path/to/dir/subdir1/');
|
||||
expect(overrides).not.toContain(`!/path/to/dir/subdir2/*`);
|
||||
|
||||
// Unrelated rules should remain
|
||||
expect(overrides).toContain('/path/to/another/dir/');
|
||||
});
|
||||
|
||||
it('should not remove child rules if includeSubdirs is false', () => {
|
||||
manager.enable('ext-test', false, '/path/to/dir/subdir1');
|
||||
manager.enable('ext-test', false, '/path/to/dir'); // Not including subdirs
|
||||
|
||||
const config = manager.readConfig();
|
||||
const overrides = config['ext-test'].overrides;
|
||||
|
||||
expect(overrides).toContain('/path/to/dir/subdir1/');
|
||||
expect(overrides).toContain('/path/to/dir/');
|
||||
});
|
||||
});
|
||||
|
||||
it('should enable a path based on an enable override', () => {
|
||||
manager.disable('ext-test', true, '/Users/chrstn');
|
||||
manager.enable('ext-test', true, '/Users/chrstn/gemini-cli');
|
||||
|
||||
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should ignore subdirs', () => {
|
||||
manager.disable('ext-test', false, '/Users/chrstn');
|
||||
expect(manager.isEnabled('ext-test', '/Users/chrstn/gemini-cli')).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
describe('extension overrides (-e <name>)', () => {
|
||||
beforeEach(() => {
|
||||
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
|
||||
});
|
||||
|
||||
it('can enable extensions, case-insensitive', () => {
|
||||
manager.disable('ext-test', true, '/');
|
||||
expect(manager.isEnabled('ext-test', '/')).toBe(true);
|
||||
expect(manager.isEnabled('Ext-Test', '/')).toBe(true);
|
||||
// Double check that it would have been disabled otherwise
|
||||
expect(
|
||||
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('disable all other extensions', () => {
|
||||
manager = new ExtensionEnablementManager(configDir, ['ext-test']);
|
||||
manager.enable('ext-test-2', true, '/');
|
||||
expect(manager.isEnabled('ext-test-2', '/')).toBe(false);
|
||||
// Double check that it would have been enabled otherwise
|
||||
expect(
|
||||
new ExtensionEnablementManager(configDir).isEnabled('ext-test-2', '/'),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('none disables all extensions', () => {
|
||||
manager = new ExtensionEnablementManager(configDir, ['none']);
|
||||
manager.enable('ext-test', true, '/');
|
||||
expect(manager.isEnabled('ext-test', '/path/to/dir')).toBe(false);
|
||||
// Double check that it would have been enabled otherwise
|
||||
expect(
|
||||
new ExtensionEnablementManager(configDir).isEnabled('ext-test', '/'),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateExtensionOverrides', () => {
|
||||
let consoleErrorSpy: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should not log an error if enabledExtensionNamesOverride is empty', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, []);
|
||||
manager.validateExtensionOverrides([]);
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not log an error if all enabledExtensionNamesOverride are valid', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, [
|
||||
'ext-one',
|
||||
'ext-two',
|
||||
]);
|
||||
const extensions = [
|
||||
{ config: { name: 'ext-one' } },
|
||||
{ config: { name: 'ext-two' } },
|
||||
] as Extension[];
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log an error for each invalid extension name in enabledExtensionNamesOverride', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, [
|
||||
'ext-one',
|
||||
'ext-invalid',
|
||||
'ext-another-invalid',
|
||||
]);
|
||||
const extensions = [
|
||||
{ config: { name: 'ext-one' } },
|
||||
{ config: { name: 'ext-two' } },
|
||||
] as Extension[];
|
||||
manager.validateExtensionOverrides(extensions);
|
||||
expect(consoleErrorSpy).toHaveBeenCalledTimes(2);
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'Extension not found: ext-invalid',
|
||||
);
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'Extension not found: ext-another-invalid',
|
||||
);
|
||||
});
|
||||
|
||||
it('should not log an error if "none" is in enabledExtensionNamesOverride', () => {
|
||||
const manager = new ExtensionEnablementManager(configDir, ['none']);
|
||||
manager.validateExtensionOverrides([]);
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Override', () => {
|
||||
it('should create an override from input', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
expect(override.baseRule).toBe(`/path/to/dir/`);
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(true);
|
||||
});
|
||||
|
||||
it('should create a disable override from input', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.baseRule).toBe(`/path/to/dir/`);
|
||||
expect(override.isDisable).toBe(true);
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create an override from a file rule', () => {
|
||||
const override = Override.fromFileRule('/path/to/dir');
|
||||
expect(override.baseRule).toBe('/path/to/dir');
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create a disable override from a file rule', () => {
|
||||
const override = Override.fromFileRule('!/path/to/dir/');
|
||||
expect(override.isDisable).toBe(true);
|
||||
expect(override.baseRule).toBe('/path/to/dir/');
|
||||
expect(override.includeSubdirs).toBe(false);
|
||||
});
|
||||
|
||||
it('should create an override with subdirs from a file rule', () => {
|
||||
const override = Override.fromFileRule('/path/to/dir/*');
|
||||
expect(override.baseRule).toBe('/path/to/dir/');
|
||||
expect(override.isDisable).toBe(false);
|
||||
expect(override.includeSubdirs).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify conflicting overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/dir', false);
|
||||
expect(override1.conflictsWith(override2)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify non-conflicting overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/another/dir', true);
|
||||
expect(override1.conflictsWith(override2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly identify equal overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('/path/to/dir', true);
|
||||
expect(override1.isEqualTo(override2)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify unequal overrides', () => {
|
||||
const override1 = Override.fromInput('/path/to/dir', true);
|
||||
const override2 = Override.fromInput('!/path/to/dir', true);
|
||||
expect(override1.isEqualTo(override2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should generate the correct regex', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
const regex = override.asRegex();
|
||||
expect(regex.test('/path/to/dir/')).toBe(true);
|
||||
expect(regex.test('/path/to/dir/subdir')).toBe(true);
|
||||
expect(regex.test('/path/to/another/dir')).toBe(false);
|
||||
});
|
||||
|
||||
it('should correctly identify child overrides', () => {
|
||||
const parent = Override.fromInput('/path/to/dir', true);
|
||||
const child = Override.fromInput('/path/to/dir/subdir', false);
|
||||
expect(child.isChildOf(parent)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify child overrides with glob', () => {
|
||||
const parent = Override.fromInput('/path/to/dir/*', true);
|
||||
const child = Override.fromInput('/path/to/dir/subdir', false);
|
||||
expect(child.isChildOf(parent)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify non-child overrides', () => {
|
||||
const parent = Override.fromInput('/path/to/dir', true);
|
||||
const other = Override.fromInput('/path/to/another/dir', false);
|
||||
expect(other.isChildOf(parent)).toBe(false);
|
||||
});
|
||||
|
||||
it('should generate the correct output string', () => {
|
||||
const override = Override.fromInput('/path/to/dir', true);
|
||||
expect(override.output()).toBe(`/path/to/dir/*`);
|
||||
});
|
||||
|
||||
it('should generate the correct output string for a disable override', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.output()).toBe(`!/path/to/dir/`);
|
||||
});
|
||||
|
||||
it('should disable a path based on a disable override rule', () => {
|
||||
const override = Override.fromInput('!/path/to/dir', false);
|
||||
expect(override.output()).toBe(`!/path/to/dir/`);
|
||||
});
|
||||
});
|
||||
@@ -1,239 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { type Extension } from '../extension.js';
|
||||
|
||||
export interface ExtensionEnablementConfig {
|
||||
overrides: string[];
|
||||
}
|
||||
|
||||
export interface AllExtensionsEnablementConfig {
|
||||
[extensionName: string]: ExtensionEnablementConfig;
|
||||
}
|
||||
|
||||
export class Override {
|
||||
constructor(
|
||||
public baseRule: string,
|
||||
public isDisable: boolean,
|
||||
public includeSubdirs: boolean,
|
||||
) {}
|
||||
|
||||
static fromInput(inputRule: string, includeSubdirs: boolean): Override {
|
||||
const isDisable = inputRule.startsWith('!');
|
||||
let baseRule = isDisable ? inputRule.substring(1) : inputRule;
|
||||
baseRule = ensureLeadingAndTrailingSlash(baseRule);
|
||||
return new Override(baseRule, isDisable, includeSubdirs);
|
||||
}
|
||||
|
||||
static fromFileRule(fileRule: string): Override {
|
||||
const isDisable = fileRule.startsWith('!');
|
||||
let baseRule = isDisable ? fileRule.substring(1) : fileRule;
|
||||
const includeSubdirs = baseRule.endsWith('*');
|
||||
baseRule = includeSubdirs
|
||||
? baseRule.substring(0, baseRule.length - 1)
|
||||
: baseRule;
|
||||
return new Override(baseRule, isDisable, includeSubdirs);
|
||||
}
|
||||
|
||||
conflictsWith(other: Override): boolean {
|
||||
if (this.baseRule === other.baseRule) {
|
||||
return (
|
||||
this.includeSubdirs !== other.includeSubdirs ||
|
||||
this.isDisable !== other.isDisable
|
||||
);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
isEqualTo(other: Override): boolean {
|
||||
return (
|
||||
this.baseRule === other.baseRule &&
|
||||
this.includeSubdirs === other.includeSubdirs &&
|
||||
this.isDisable === other.isDisable
|
||||
);
|
||||
}
|
||||
|
||||
asRegex(): RegExp {
|
||||
return globToRegex(`${this.baseRule}${this.includeSubdirs ? '*' : ''}`);
|
||||
}
|
||||
|
||||
isChildOf(parent: Override) {
|
||||
if (!parent.includeSubdirs) {
|
||||
return false;
|
||||
}
|
||||
return parent.asRegex().test(this.baseRule);
|
||||
}
|
||||
|
||||
output(): string {
|
||||
return `${this.isDisable ? '!' : ''}${this.baseRule}${this.includeSubdirs ? '*' : ''}`;
|
||||
}
|
||||
|
||||
matchesPath(path: string) {
|
||||
return this.asRegex().test(path);
|
||||
}
|
||||
}
|
||||
|
||||
const ensureLeadingAndTrailingSlash = function (dirPath: string): string {
|
||||
// Normalize separators to forward slashes for consistent matching across platforms.
|
||||
let result = dirPath.replace(/\\/g, '/');
|
||||
if (result.charAt(0) !== '/') {
|
||||
result = '/' + result;
|
||||
}
|
||||
if (result.charAt(result.length - 1) !== '/') {
|
||||
result = result + '/';
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts a glob pattern to a RegExp object.
|
||||
* This is a simplified implementation that supports `*`.
|
||||
*
|
||||
* @param glob The glob pattern to convert.
|
||||
* @returns A RegExp object.
|
||||
*/
|
||||
function globToRegex(glob: string): RegExp {
|
||||
const regexString = glob
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex characters
|
||||
.replace(/(\/?)\*/g, '($1.*)?'); // Convert * to optional group
|
||||
|
||||
return new RegExp(`^${regexString}$`);
|
||||
}
|
||||
|
||||
export class ExtensionEnablementManager {
|
||||
private configFilePath: string;
|
||||
private configDir: string;
|
||||
// If non-empty, this overrides all other extension configuration and enables
|
||||
// only the ones in this list.
|
||||
private enabledExtensionNamesOverride: string[];
|
||||
|
||||
constructor(configDir: string, enabledExtensionNames?: string[]) {
|
||||
this.configDir = configDir;
|
||||
this.configFilePath = path.join(configDir, 'extension-enablement.json');
|
||||
this.enabledExtensionNamesOverride =
|
||||
enabledExtensionNames?.map((name) => name.toLowerCase()) ?? [];
|
||||
}
|
||||
|
||||
validateExtensionOverrides(extensions: Extension[]) {
|
||||
for (const name of this.enabledExtensionNamesOverride) {
|
||||
if (name === 'none') continue;
|
||||
if (
|
||||
!extensions.some(
|
||||
(ext) => ext.config.name.toLowerCase() === name.toLowerCase(),
|
||||
)
|
||||
) {
|
||||
console.error(`Extension not found: ${name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if an extension is enabled based on its name and the current
|
||||
* path. The last matching rule in the overrides list wins.
|
||||
*
|
||||
* @param extensionName The name of the extension.
|
||||
* @param currentPath The absolute path of the current working directory.
|
||||
* @returns True if the extension is enabled, false otherwise.
|
||||
*/
|
||||
isEnabled(extensionName: string, currentPath: string): boolean {
|
||||
// If we have a single override called 'none', this disables all extensions.
|
||||
// Typically, this comes from the user passing `-e none`.
|
||||
if (
|
||||
this.enabledExtensionNamesOverride.length === 1 &&
|
||||
this.enabledExtensionNamesOverride[0] === 'none'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If we have explicit overrides, only enable those extensions.
|
||||
if (this.enabledExtensionNamesOverride.length > 0) {
|
||||
// When checking against overrides ONLY, we use a case insensitive match.
|
||||
// The override names are already lowercased in the constructor.
|
||||
return this.enabledExtensionNamesOverride.includes(
|
||||
extensionName.toLocaleLowerCase(),
|
||||
);
|
||||
}
|
||||
|
||||
// Otherwise, we use the configuration settings
|
||||
const config = this.readConfig();
|
||||
const extensionConfig = config[extensionName];
|
||||
// Extensions are enabled by default.
|
||||
let enabled = true;
|
||||
const allOverrides = extensionConfig?.overrides ?? [];
|
||||
for (const rule of allOverrides) {
|
||||
const override = Override.fromFileRule(rule);
|
||||
if (override.matchesPath(ensureLeadingAndTrailingSlash(currentPath))) {
|
||||
enabled = !override.isDisable;
|
||||
}
|
||||
}
|
||||
return enabled;
|
||||
}
|
||||
|
||||
readConfig(): AllExtensionsEnablementConfig {
|
||||
try {
|
||||
const content = fs.readFileSync(this.configFilePath, 'utf-8');
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
'code' in error &&
|
||||
error.code === 'ENOENT'
|
||||
) {
|
||||
return {};
|
||||
}
|
||||
console.error('Error reading extension enablement config:', error);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
writeConfig(config: AllExtensionsEnablementConfig): void {
|
||||
fs.mkdirSync(this.configDir, { recursive: true });
|
||||
fs.writeFileSync(this.configFilePath, JSON.stringify(config, null, 2));
|
||||
}
|
||||
|
||||
enable(
|
||||
extensionName: string,
|
||||
includeSubdirs: boolean,
|
||||
scopePath: string,
|
||||
): void {
|
||||
const config = this.readConfig();
|
||||
if (!config[extensionName]) {
|
||||
config[extensionName] = { overrides: [] };
|
||||
}
|
||||
const override = Override.fromInput(scopePath, includeSubdirs);
|
||||
const overrides = config[extensionName].overrides.filter((rule) => {
|
||||
const fileOverride = Override.fromFileRule(rule);
|
||||
if (
|
||||
fileOverride.conflictsWith(override) ||
|
||||
fileOverride.isEqualTo(override)
|
||||
) {
|
||||
return false; // Remove conflicts and equivalent values.
|
||||
}
|
||||
return !fileOverride.isChildOf(override);
|
||||
});
|
||||
overrides.push(override.output());
|
||||
config[extensionName].overrides = overrides;
|
||||
this.writeConfig(config);
|
||||
}
|
||||
|
||||
disable(
|
||||
extensionName: string,
|
||||
includeSubdirs: boolean,
|
||||
scopePath: string,
|
||||
): void {
|
||||
this.enable(extensionName, includeSubdirs, `!${scopePath}`);
|
||||
}
|
||||
|
||||
remove(extensionName: string): void {
|
||||
const config = this.readConfig();
|
||||
if (config[extensionName]) {
|
||||
delete config[extensionName];
|
||||
this.writeConfig(config);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,468 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
ExtensionStorage,
|
||||
INSTALL_METADATA_FILENAME,
|
||||
annotateActiveExtensions,
|
||||
loadExtension,
|
||||
} from '../extension.js';
|
||||
import { checkForAllExtensionUpdates, updateExtension } from './update.js';
|
||||
import { QWEN_DIR } from '@qwen-code/qwen-code-core';
|
||||
import { isWorkspaceTrusted } from '../trustedFolders.js';
|
||||
import { ExtensionUpdateState } from '../../ui/state/extensions.js';
|
||||
import { createExtension } from '../../test-utils/createExtension.js';
|
||||
import { ExtensionEnablementManager } from './extensionEnablement.js';
|
||||
|
||||
const mockGit = {
|
||||
clone: vi.fn(),
|
||||
getRemotes: vi.fn(),
|
||||
fetch: vi.fn(),
|
||||
checkout: vi.fn(),
|
||||
listRemote: vi.fn(),
|
||||
revparse: vi.fn(),
|
||||
// Not a part of the actual API, but we need to use this to do the correct
|
||||
// file system interactions.
|
||||
path: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mock('simple-git', () => ({
|
||||
simpleGit: vi.fn((path: string) => {
|
||||
mockGit.path.mockReturnValue(path);
|
||||
return mockGit;
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('../extensions/github.js', async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<typeof import('../extensions/github.js')>();
|
||||
return {
|
||||
...actual,
|
||||
downloadFromGitHubRelease: vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error('Mocked GitHub release download failure')),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const mockedOs = await importOriginal<typeof os>();
|
||||
return {
|
||||
...mockedOs,
|
||||
homedir: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../trustedFolders.js', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../trustedFolders.js')>();
|
||||
return {
|
||||
...actual,
|
||||
isWorkspaceTrusted: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
const mockLogExtensionInstallEvent = vi.hoisted(() => vi.fn());
|
||||
const mockLogExtensionUninstall = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<typeof import('@qwen-code/qwen-code-core')>();
|
||||
return {
|
||||
...actual,
|
||||
logExtensionInstallEvent: mockLogExtensionInstallEvent,
|
||||
logExtensionUninstall: mockLogExtensionUninstall,
|
||||
ExtensionInstallEvent: vi.fn(),
|
||||
ExtensionUninstallEvent: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('update tests', () => {
|
||||
let tempHomeDir: string;
|
||||
let tempWorkspaceDir: string;
|
||||
let userExtensionsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'qwen-code-test-home-'),
|
||||
);
|
||||
tempWorkspaceDir = fs.mkdtempSync(
|
||||
path.join(tempHomeDir, 'qwen-code-test-workspace-'),
|
||||
);
|
||||
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
|
||||
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
|
||||
// Clean up before each test
|
||||
fs.rmSync(userExtensionsDir, { recursive: true, force: true });
|
||||
fs.mkdirSync(userExtensionsDir, { recursive: true });
|
||||
vi.mocked(isWorkspaceTrusted).mockReturnValue({
|
||||
isTrusted: true,
|
||||
source: 'file',
|
||||
});
|
||||
vi.spyOn(process, 'cwd').mockReturnValue(tempWorkspaceDir);
|
||||
Object.values(mockGit).forEach((fn) => fn.mockReset());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
fs.rmSync(tempWorkspaceDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('updateExtension', () => {
|
||||
it('should update a git-installed extension', async () => {
|
||||
const gitUrl = 'https://github.com/google/gemini-extensions.git';
|
||||
const extensionName = 'qwen-extensions';
|
||||
const targetExtDir = path.join(userExtensionsDir, extensionName);
|
||||
const metadataPath = path.join(targetExtDir, INSTALL_METADATA_FILENAME);
|
||||
|
||||
fs.mkdirSync(targetExtDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name: extensionName, version: '1.0.0' }),
|
||||
);
|
||||
fs.writeFileSync(
|
||||
metadataPath,
|
||||
JSON.stringify({ source: gitUrl, type: 'git' }),
|
||||
);
|
||||
|
||||
mockGit.clone.mockImplementation(async (_, destination) => {
|
||||
fs.mkdirSync(path.join(mockGit.path(), destination), {
|
||||
recursive: true,
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name: extensionName, version: '1.1.0' }),
|
||||
);
|
||||
});
|
||||
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: targetExtDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
const updateInfo = await updateExtension(
|
||||
extension,
|
||||
tempHomeDir,
|
||||
async (_) => true,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
() => {},
|
||||
);
|
||||
|
||||
expect(updateInfo).toEqual({
|
||||
name: 'qwen-extensions',
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
});
|
||||
|
||||
const updatedConfig = JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.join(targetExtDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
'utf-8',
|
||||
),
|
||||
);
|
||||
expect(updatedConfig.version).toBe('1.1.0');
|
||||
});
|
||||
|
||||
it('should call setExtensionUpdateState with UPDATING and then UPDATED_NEEDS_RESTART on success', async () => {
|
||||
const extensionName = 'test-extension';
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: extensionName,
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
|
||||
mockGit.clone.mockImplementation(async (_, destination) => {
|
||||
fs.mkdirSync(path.join(mockGit.path(), destination), {
|
||||
recursive: true,
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(mockGit.path(), destination, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name: extensionName, version: '1.1.0' }),
|
||||
);
|
||||
});
|
||||
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
|
||||
|
||||
const dispatch = vi.fn();
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
await updateExtension(
|
||||
extension,
|
||||
tempHomeDir,
|
||||
async (_) => true,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
dispatch,
|
||||
);
|
||||
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.UPDATING,
|
||||
},
|
||||
});
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should call setExtensionUpdateState with ERROR on failure', async () => {
|
||||
const extensionName = 'test-extension';
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: extensionName,
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
|
||||
mockGit.clone.mockRejectedValue(new Error('Git clone failed'));
|
||||
mockGit.getRemotes.mockResolvedValue([{ name: 'origin' }]);
|
||||
|
||||
const dispatch = vi.fn();
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
await expect(
|
||||
updateExtension(
|
||||
extension,
|
||||
tempHomeDir,
|
||||
async (_) => true,
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
dispatch,
|
||||
),
|
||||
).rejects.toThrow();
|
||||
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.UPDATING,
|
||||
},
|
||||
});
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extensionName,
|
||||
state: ExtensionUpdateState.ERROR,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkForAllExtensionUpdates', () => {
|
||||
it('should return UpdateAvailable for a git extension with updates', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
mockGit.getRemotes.mockResolvedValue([
|
||||
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
|
||||
]);
|
||||
mockGit.listRemote.mockResolvedValue('remoteHash HEAD');
|
||||
mockGit.revparse.mockResolvedValue('localHash');
|
||||
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return UpToDate for a git extension with no updates', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
mockGit.getRemotes.mockResolvedValue([
|
||||
{ name: 'origin', refs: { fetch: 'https://some.git/repo' } },
|
||||
]);
|
||||
mockGit.listRemote.mockResolvedValue('sameHash HEAD');
|
||||
mockGit.revparse.mockResolvedValue('sameHash');
|
||||
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UP_TO_DATE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return UpToDate for a local extension with no updates', async () => {
|
||||
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
|
||||
const sourceExtensionDir = createExtension({
|
||||
extensionsDir: localExtensionSourcePath,
|
||||
name: 'my-local-ext',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
const installedExtensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'local-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: { source: sourceExtensionDir, type: 'local' },
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: installedExtensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'local-extension',
|
||||
state: ExtensionUpdateState.UP_TO_DATE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return UpdateAvailable for a local extension with updates', async () => {
|
||||
const localExtensionSourcePath = path.join(tempHomeDir, 'local-source');
|
||||
const sourceExtensionDir = createExtension({
|
||||
extensionsDir: localExtensionSourcePath,
|
||||
name: 'my-local-ext',
|
||||
version: '1.1.0',
|
||||
});
|
||||
|
||||
const installedExtensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'local-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: { source: sourceExtensionDir, type: 'local' },
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: installedExtensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'local-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return Error when git check fails', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
name: 'error-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir,
|
||||
workspaceDir: tempWorkspaceDir,
|
||||
})!,
|
||||
],
|
||||
process.cwd(),
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
mockGit.getRemotes.mockRejectedValue(new Error('Git error'));
|
||||
|
||||
const dispatch = vi.fn();
|
||||
await checkForAllExtensionUpdates([extension], dispatch);
|
||||
expect(dispatch).toHaveBeenCalledWith({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'error-extension',
|
||||
state: ExtensionUpdateState.ERROR,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,182 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import {
|
||||
type ExtensionUpdateAction,
|
||||
ExtensionUpdateState,
|
||||
type ExtensionUpdateStatus,
|
||||
} from '../../ui/state/extensions.js';
|
||||
import {
|
||||
copyExtension,
|
||||
installExtension,
|
||||
uninstallExtension,
|
||||
loadExtension,
|
||||
loadInstallMetadata,
|
||||
ExtensionStorage,
|
||||
loadExtensionConfig,
|
||||
} from '../extension.js';
|
||||
import { checkForExtensionUpdate } from './github.js';
|
||||
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import * as fs from 'node:fs';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
|
||||
export interface ExtensionUpdateInfo {
|
||||
name: string;
|
||||
originalVersion: string;
|
||||
updatedVersion: string;
|
||||
}
|
||||
|
||||
export async function updateExtension(
|
||||
extension: GeminiCLIExtension,
|
||||
cwd: string = process.cwd(),
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
currentState: ExtensionUpdateState,
|
||||
dispatchExtensionStateUpdate: (action: ExtensionUpdateAction) => void,
|
||||
): Promise<ExtensionUpdateInfo | undefined> {
|
||||
if (currentState === ExtensionUpdateState.UPDATING) {
|
||||
return undefined;
|
||||
}
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.UPDATING },
|
||||
});
|
||||
const installMetadata = loadInstallMetadata(extension.path);
|
||||
|
||||
if (!installMetadata?.type) {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
|
||||
});
|
||||
throw new Error(
|
||||
`Extension ${extension.name} cannot be updated, type is unknown.`,
|
||||
);
|
||||
}
|
||||
if (installMetadata?.type === 'link') {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.UP_TO_DATE },
|
||||
});
|
||||
throw new Error(`Extension is linked so does not need to be updated`);
|
||||
}
|
||||
const originalVersion = extension.version;
|
||||
|
||||
const tempDir = await ExtensionStorage.createTmpDir();
|
||||
try {
|
||||
await copyExtension(extension.path, tempDir);
|
||||
const previousExtensionConfig = await loadExtensionConfig({
|
||||
extensionDir: extension.path,
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
await uninstallExtension(extension.name, cwd);
|
||||
await installExtension(
|
||||
installMetadata,
|
||||
requestConsent,
|
||||
cwd,
|
||||
previousExtensionConfig,
|
||||
);
|
||||
|
||||
const updatedExtensionStorage = new ExtensionStorage(extension.name);
|
||||
const updatedExtension = loadExtension({
|
||||
extensionDir: updatedExtensionStorage.getExtensionDir(),
|
||||
workspaceDir: cwd,
|
||||
});
|
||||
if (!updatedExtension) {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
|
||||
});
|
||||
throw new Error('Updated extension not found after installation.');
|
||||
}
|
||||
const updatedVersion = updatedExtension.config.version;
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extension.name,
|
||||
state: ExtensionUpdateState.UPDATED_NEEDS_RESTART,
|
||||
},
|
||||
});
|
||||
return {
|
||||
name: extension.name,
|
||||
originalVersion,
|
||||
updatedVersion,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(
|
||||
`Error updating extension, rolling back. ${getErrorMessage(e)}`,
|
||||
);
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: ExtensionUpdateState.ERROR },
|
||||
});
|
||||
await copyExtension(tempDir, extension.path);
|
||||
throw e;
|
||||
} finally {
|
||||
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateAllUpdatableExtensions(
|
||||
cwd: string = process.cwd(),
|
||||
requestConsent: (consent: string) => Promise<boolean>,
|
||||
extensions: GeminiCLIExtension[],
|
||||
extensionsState: Map<string, ExtensionUpdateStatus>,
|
||||
dispatch: (action: ExtensionUpdateAction) => void,
|
||||
): Promise<ExtensionUpdateInfo[]> {
|
||||
return (
|
||||
await Promise.all(
|
||||
extensions
|
||||
.filter(
|
||||
(extension) =>
|
||||
extensionsState.get(extension.name)?.status ===
|
||||
ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
)
|
||||
.map((extension) =>
|
||||
updateExtension(
|
||||
extension,
|
||||
cwd,
|
||||
requestConsent,
|
||||
extensionsState.get(extension.name)!.status,
|
||||
dispatch,
|
||||
),
|
||||
),
|
||||
)
|
||||
).filter((updateInfo) => !!updateInfo);
|
||||
}
|
||||
|
||||
export interface ExtensionUpdateCheckResult {
|
||||
state: ExtensionUpdateState;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export async function checkForAllExtensionUpdates(
|
||||
extensions: GeminiCLIExtension[],
|
||||
dispatch: (action: ExtensionUpdateAction) => void,
|
||||
): Promise<void> {
|
||||
dispatch({ type: 'BATCH_CHECK_START' });
|
||||
const promises: Array<Promise<void>> = [];
|
||||
for (const extension of extensions) {
|
||||
if (!extension.installMetadata) {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: extension.name,
|
||||
state: ExtensionUpdateState.NOT_UPDATABLE,
|
||||
},
|
||||
});
|
||||
continue;
|
||||
}
|
||||
promises.push(
|
||||
checkForExtensionUpdate(extension, (updatedState) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extension.name, state: updatedState },
|
||||
});
|
||||
}),
|
||||
);
|
||||
}
|
||||
await Promise.all(promises);
|
||||
dispatch({ type: 'BATCH_CHECK_END' });
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Qwen Team
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { SettingScope } from './settings.js';
|
||||
import { getPersistScopeForModelSelection } from './modelProvidersScope.js';
|
||||
|
||||
function makeSettings({
|
||||
isTrusted,
|
||||
userModelProviders,
|
||||
workspaceModelProviders,
|
||||
}: {
|
||||
isTrusted: boolean;
|
||||
userModelProviders?: unknown;
|
||||
workspaceModelProviders?: unknown;
|
||||
}) {
|
||||
const userSettings: Record<string, unknown> = {};
|
||||
const workspaceSettings: Record<string, unknown> = {};
|
||||
|
||||
// When undefined, treat as "not present in this scope" (the key is omitted),
|
||||
// matching how LoadedSettings is shaped when a settings file doesn't define it.
|
||||
if (userModelProviders !== undefined) {
|
||||
userSettings['modelProviders'] = userModelProviders;
|
||||
}
|
||||
if (workspaceModelProviders !== undefined) {
|
||||
workspaceSettings['modelProviders'] = workspaceModelProviders;
|
||||
}
|
||||
|
||||
return {
|
||||
isTrusted,
|
||||
user: { settings: userSettings },
|
||||
workspace: { settings: workspaceSettings },
|
||||
} as unknown as import('./settings.js').LoadedSettings;
|
||||
}
|
||||
|
||||
describe('getPersistScopeForModelSelection', () => {
|
||||
it('prefers workspace when trusted and workspace defines modelProviders', () => {
|
||||
const settings = makeSettings({
|
||||
isTrusted: true,
|
||||
workspaceModelProviders: {},
|
||||
userModelProviders: { anything: true },
|
||||
});
|
||||
|
||||
expect(getPersistScopeForModelSelection(settings)).toBe(
|
||||
SettingScope.Workspace,
|
||||
);
|
||||
});
|
||||
|
||||
it('falls back to user when workspace does not define modelProviders', () => {
|
||||
const settings = makeSettings({
|
||||
isTrusted: true,
|
||||
workspaceModelProviders: undefined,
|
||||
userModelProviders: {},
|
||||
});
|
||||
|
||||
expect(getPersistScopeForModelSelection(settings)).toBe(SettingScope.User);
|
||||
});
|
||||
|
||||
it('ignores workspace modelProviders when workspace is untrusted', () => {
|
||||
const settings = makeSettings({
|
||||
isTrusted: false,
|
||||
workspaceModelProviders: {},
|
||||
userModelProviders: undefined,
|
||||
});
|
||||
|
||||
expect(getPersistScopeForModelSelection(settings)).toBe(SettingScope.User);
|
||||
});
|
||||
|
||||
it('falls back to legacy trust heuristic when neither scope defines modelProviders', () => {
|
||||
const trusted = makeSettings({
|
||||
isTrusted: true,
|
||||
userModelProviders: undefined,
|
||||
workspaceModelProviders: undefined,
|
||||
});
|
||||
expect(getPersistScopeForModelSelection(trusted)).toBe(SettingScope.User);
|
||||
|
||||
const untrusted = makeSettings({
|
||||
isTrusted: false,
|
||||
userModelProviders: undefined,
|
||||
workspaceModelProviders: undefined,
|
||||
});
|
||||
expect(getPersistScopeForModelSelection(untrusted)).toBe(SettingScope.User);
|
||||
});
|
||||
});
|
||||
@@ -1,48 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Qwen Team
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { SettingScope, type LoadedSettings } from './settings.js';
|
||||
|
||||
function hasOwnModelProviders(settingsObj: unknown): boolean {
|
||||
if (!settingsObj || typeof settingsObj !== 'object') {
|
||||
return false;
|
||||
}
|
||||
const obj = settingsObj as Record<string, unknown>;
|
||||
// Treat an explicitly configured empty object (modelProviders: {}) as "owned"
|
||||
// by this scope, which is important when mergeStrategy is REPLACE.
|
||||
return Object.prototype.hasOwnProperty.call(obj, 'modelProviders');
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns which writable scope (Workspace/User) owns the effective modelProviders
|
||||
* configuration.
|
||||
*
|
||||
* Note: Workspace scope is only considered when the workspace is trusted.
|
||||
*/
|
||||
export function getModelProvidersOwnerScope(
|
||||
settings: LoadedSettings,
|
||||
): SettingScope | undefined {
|
||||
if (settings.isTrusted && hasOwnModelProviders(settings.workspace.settings)) {
|
||||
return SettingScope.Workspace;
|
||||
}
|
||||
|
||||
if (hasOwnModelProviders(settings.user.settings)) {
|
||||
return SettingScope.User;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose the settings scope to persist a model selection.
|
||||
* Prefer persisting back to the scope that contains the effective modelProviders
|
||||
* config, otherwise fall back to the legacy trust-based heuristic.
|
||||
*/
|
||||
export function getPersistScopeForModelSelection(
|
||||
settings: LoadedSettings,
|
||||
): SettingScope {
|
||||
return getModelProvidersOwnerScope(settings) ?? SettingScope.User;
|
||||
}
|
||||
@@ -51,11 +51,9 @@ import {
|
||||
import * as fs from 'node:fs'; // fs will be mocked separately
|
||||
import stripJsonComments from 'strip-json-comments'; // Will be mocked separately
|
||||
import { isWorkspaceTrusted } from './trustedFolders.js';
|
||||
import { disableExtension } from './extension.js';
|
||||
|
||||
// These imports will get the versions from the vi.mock('./settings.js', ...) factory.
|
||||
import {
|
||||
getSettingsWarnings,
|
||||
loadSettings,
|
||||
USER_SETTINGS_PATH, // This IS the mocked path.
|
||||
getSystemSettingsPath,
|
||||
@@ -65,8 +63,6 @@ import {
|
||||
needsMigration,
|
||||
type Settings,
|
||||
loadEnvironment,
|
||||
migrateDeprecatedSettings,
|
||||
SettingScope,
|
||||
SETTINGS_VERSION,
|
||||
SETTINGS_VERSION_KEY,
|
||||
} from './settings.js';
|
||||
@@ -419,86 +415,6 @@ describe('Settings Loading and Merging', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should warn about ignored legacy keys in a v2 settings file', () => {
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
const userSettingsContent = {
|
||||
[SETTINGS_VERSION_KEY]: SETTINGS_VERSION,
|
||||
usageStatisticsEnabled: false,
|
||||
};
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(getSettingsWarnings(settings)).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining(
|
||||
"Legacy setting 'usageStatisticsEnabled' will be ignored",
|
||||
),
|
||||
]),
|
||||
);
|
||||
expect(getSettingsWarnings(settings)).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining("'privacy.usageStatisticsEnabled'"),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should warn about unknown top-level keys in a v2 settings file', () => {
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
const userSettingsContent = {
|
||||
[SETTINGS_VERSION_KEY]: SETTINGS_VERSION,
|
||||
someUnknownKey: 'value',
|
||||
};
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(getSettingsWarnings(settings)).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining(
|
||||
"Unknown setting 'someUnknownKey' will be ignored",
|
||||
),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not warn for valid v2 container keys', () => {
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
);
|
||||
const userSettingsContent = {
|
||||
[SETTINGS_VERSION_KEY]: SETTINGS_VERSION,
|
||||
model: { name: 'qwen-coder' },
|
||||
};
|
||||
(fs.readFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const settings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(getSettingsWarnings(settings)).toEqual([]);
|
||||
});
|
||||
|
||||
it('should rewrite allowedTools to tools.allowed during migration', () => {
|
||||
(mockFsExistsSync as Mock).mockImplementation(
|
||||
(p: fs.PathLike) => p === USER_SETTINGS_PATH,
|
||||
@@ -2730,122 +2646,4 @@ describe('Settings Loading and Merging', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateDeprecatedSettings', () => {
|
||||
let mockFsExistsSync: Mocked<typeof fs.existsSync>;
|
||||
let mockFsReadFileSync: Mocked<typeof fs.readFileSync>;
|
||||
let mockDisableExtension: Mocked<typeof disableExtension>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
|
||||
mockFsExistsSync = vi.mocked(fs.existsSync);
|
||||
mockFsReadFileSync = vi.mocked(fs.readFileSync);
|
||||
mockDisableExtension = vi.mocked(disableExtension);
|
||||
|
||||
(mockFsExistsSync as Mock).mockReturnValue(true);
|
||||
vi.mocked(isWorkspaceTrusted).mockReturnValue(true);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should migrate disabled extensions from user and workspace settings', () => {
|
||||
const userSettingsContent = {
|
||||
extensions: {
|
||||
disabled: ['user-ext-1', 'shared-ext'],
|
||||
},
|
||||
};
|
||||
const workspaceSettingsContent = {
|
||||
extensions: {
|
||||
disabled: ['workspace-ext-1', 'shared-ext'],
|
||||
},
|
||||
};
|
||||
|
||||
(mockFsReadFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
|
||||
|
||||
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
|
||||
|
||||
// Check user settings migration
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'user-ext-1',
|
||||
SettingScope.User,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'shared-ext',
|
||||
SettingScope.User,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
|
||||
// Check workspace settings migration
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'workspace-ext-1',
|
||||
SettingScope.Workspace,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
expect(mockDisableExtension).toHaveBeenCalledWith(
|
||||
'shared-ext',
|
||||
SettingScope.Workspace,
|
||||
MOCK_WORKSPACE_DIR,
|
||||
);
|
||||
|
||||
// Check that setValue was called to remove the deprecated setting
|
||||
expect(setValueSpy).toHaveBeenCalledWith(
|
||||
SettingScope.User,
|
||||
'extensions',
|
||||
{
|
||||
disabled: undefined,
|
||||
},
|
||||
);
|
||||
expect(setValueSpy).toHaveBeenCalledWith(
|
||||
SettingScope.Workspace,
|
||||
'extensions',
|
||||
{
|
||||
disabled: undefined,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should not do anything if there are no deprecated settings', () => {
|
||||
const userSettingsContent = {
|
||||
extensions: {
|
||||
enabled: ['user-ext-1'],
|
||||
},
|
||||
};
|
||||
const workspaceSettingsContent = {
|
||||
someOtherSetting: 'value',
|
||||
};
|
||||
|
||||
(mockFsReadFileSync as Mock).mockImplementation(
|
||||
(p: fs.PathOrFileDescriptor) => {
|
||||
if (p === USER_SETTINGS_PATH)
|
||||
return JSON.stringify(userSettingsContent);
|
||||
if (p === MOCK_WORKSPACE_SETTINGS_PATH)
|
||||
return JSON.stringify(workspaceSettingsContent);
|
||||
return '{}';
|
||||
},
|
||||
);
|
||||
|
||||
const loadedSettings = loadSettings(MOCK_WORKSPACE_DIR);
|
||||
const setValueSpy = vi.spyOn(loadedSettings, 'setValue');
|
||||
|
||||
migrateDeprecatedSettings(loadedSettings, MOCK_WORKSPACE_DIR);
|
||||
|
||||
expect(mockDisableExtension).not.toHaveBeenCalled();
|
||||
expect(setValueSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -30,7 +30,6 @@ import {
|
||||
import { resolveEnvVarsInObject } from '../utils/envVarResolver.js';
|
||||
import { customDeepMerge, type MergeableObject } from '../utils/deepMerge.js';
|
||||
import { updateSettingsFilePreservingFormat } from '../utils/commentJson.js';
|
||||
import { disableExtension } from './extension.js';
|
||||
|
||||
function getMergeStrategyForPath(path: string[]): MergeStrategy | undefined {
|
||||
let current: SettingDefinition | undefined = undefined;
|
||||
@@ -81,7 +80,6 @@ const MIGRATION_MAP: Record<string, string> = {
|
||||
excludeTools: 'tools.exclude',
|
||||
excludeMCPServers: 'mcp.excluded',
|
||||
excludedProjectEnvVars: 'advanced.excludedEnvVars',
|
||||
extensionManagement: 'experimental.extensionManagement',
|
||||
extensions: 'extensions',
|
||||
fileFiltering: 'context.fileFiltering',
|
||||
folderTrustFeature: 'security.folderTrust.featureEnabled',
|
||||
@@ -344,97 +342,6 @@ const KNOWN_V2_CONTAINERS = new Set(
|
||||
Object.values(MIGRATION_MAP).map((path) => path.split('.')[0]),
|
||||
);
|
||||
|
||||
function getSettingsFileKeyWarnings(
|
||||
settings: Record<string, unknown>,
|
||||
settingsFilePath: string,
|
||||
): string[] {
|
||||
const version = settings[SETTINGS_VERSION_KEY];
|
||||
if (typeof version !== 'number' || version < SETTINGS_VERSION) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const warnings: string[] = [];
|
||||
const ignoredLegacyKeys = new Set<string>();
|
||||
|
||||
// Ignored legacy keys (V1 top-level keys that moved to a nested V2 path).
|
||||
for (const [oldKey, newPath] of Object.entries(MIGRATION_MAP)) {
|
||||
if (oldKey === newPath) {
|
||||
continue;
|
||||
}
|
||||
if (!(oldKey in settings)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const oldValue = settings[oldKey];
|
||||
|
||||
// If this key is a V2 container (like 'model') and it's already an object,
|
||||
// it's likely already in V2 format. Don't warn.
|
||||
if (
|
||||
KNOWN_V2_CONTAINERS.has(oldKey) &&
|
||||
typeof oldValue === 'object' &&
|
||||
oldValue !== null &&
|
||||
!Array.isArray(oldValue)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ignoredLegacyKeys.add(oldKey);
|
||||
warnings.push(
|
||||
`⚠️ Legacy setting '${oldKey}' will be ignored in ${settingsFilePath}. Please use '${newPath}' instead.`,
|
||||
);
|
||||
}
|
||||
|
||||
// Unknown top-level keys.
|
||||
const schemaKeys = new Set(Object.keys(getSettingsSchema()));
|
||||
for (const key of Object.keys(settings)) {
|
||||
if (key === SETTINGS_VERSION_KEY) {
|
||||
continue;
|
||||
}
|
||||
if (ignoredLegacyKeys.has(key)) {
|
||||
continue;
|
||||
}
|
||||
if (schemaKeys.has(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
warnings.push(
|
||||
`⚠️ Unknown setting '${key}' will be ignored in ${settingsFilePath}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return warnings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects warnings for ignored legacy and unknown settings keys.
|
||||
*
|
||||
* For `$version: 2` settings files, we do not apply implicit migrations.
|
||||
* Instead, we surface actionable, de-duplicated warnings in the terminal UI.
|
||||
*/
|
||||
export function getSettingsWarnings(loadedSettings: LoadedSettings): string[] {
|
||||
const warningSet = new Set<string>();
|
||||
|
||||
for (const scope of [SettingScope.User, SettingScope.Workspace]) {
|
||||
const settingsFile = loadedSettings.forScope(scope);
|
||||
if (settingsFile.rawJson === undefined) {
|
||||
continue; // File not present / not loaded.
|
||||
}
|
||||
const settingsObject = settingsFile.originalSettings as unknown as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
|
||||
for (const warning of getSettingsFileKeyWarnings(
|
||||
settingsObject,
|
||||
settingsFile.path,
|
||||
)) {
|
||||
warningSet.add(warning);
|
||||
}
|
||||
}
|
||||
|
||||
return [...warningSet];
|
||||
}
|
||||
|
||||
export function migrateSettingsToV1(
|
||||
v2Settings: Record<string, unknown>,
|
||||
): Record<string, unknown> {
|
||||
@@ -903,31 +810,6 @@ export function loadSettings(
|
||||
);
|
||||
}
|
||||
|
||||
export function migrateDeprecatedSettings(
|
||||
loadedSettings: LoadedSettings,
|
||||
workspaceDir: string = process.cwd(),
|
||||
): void {
|
||||
const processScope = (scope: SettingScope) => {
|
||||
const settings = loadedSettings.forScope(scope).settings;
|
||||
if (settings.extensions?.disabled) {
|
||||
console.log(
|
||||
`Migrating deprecated extensions.disabled settings from ${scope} settings...`,
|
||||
);
|
||||
for (const extension of settings.extensions.disabled ?? []) {
|
||||
disableExtension(extension, scope, workspaceDir);
|
||||
}
|
||||
|
||||
const newExtensionsValue = { ...settings.extensions };
|
||||
newExtensionsValue.disabled = undefined;
|
||||
|
||||
loadedSettings.setValue(scope, 'extensions', newExtensionsValue);
|
||||
}
|
||||
};
|
||||
|
||||
processScope(SettingScope.User);
|
||||
processScope(SettingScope.Workspace);
|
||||
}
|
||||
|
||||
export function saveSettings(settingsFile: SettingsFile): void {
|
||||
try {
|
||||
// Ensure the directory exists
|
||||
|
||||
@@ -10,7 +10,6 @@ import type {
|
||||
TelemetrySettings,
|
||||
AuthType,
|
||||
ChatCompressionSettings,
|
||||
ModelProvidersConfig,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
ApprovalMode,
|
||||
@@ -103,19 +102,6 @@ const SETTINGS_SCHEMA = {
|
||||
mergeStrategy: MergeStrategy.SHALLOW_MERGE,
|
||||
},
|
||||
|
||||
// Model providers configuration grouped by authType
|
||||
modelProviders: {
|
||||
type: 'object',
|
||||
label: 'Model Providers',
|
||||
category: 'Model',
|
||||
requiresRestart: false,
|
||||
default: {} as ModelProvidersConfig,
|
||||
description:
|
||||
'Model providers configuration grouped by authType. Each authType contains an array of model configurations.',
|
||||
showInDialog: false,
|
||||
mergeStrategy: MergeStrategy.REPLACE,
|
||||
},
|
||||
|
||||
general: {
|
||||
type: 'object',
|
||||
label: 'General',
|
||||
@@ -216,7 +202,6 @@ const SETTINGS_SCHEMA = {
|
||||
{ value: 'en', label: 'English' },
|
||||
{ value: 'zh', label: '中文 (Chinese)' },
|
||||
{ value: 'ru', label: 'Русский (Russian)' },
|
||||
{ value: 'de', label: 'Deutsch (German)' },
|
||||
],
|
||||
},
|
||||
terminalBell: {
|
||||
@@ -1207,15 +1192,6 @@ const SETTINGS_SCHEMA = {
|
||||
description: 'Setting to enable experimental features',
|
||||
showInDialog: false,
|
||||
properties: {
|
||||
extensionManagement: {
|
||||
type: 'boolean',
|
||||
label: 'Extension Management',
|
||||
category: 'Experimental',
|
||||
requiresRestart: true,
|
||||
default: true,
|
||||
description: 'Enable extension management features.',
|
||||
showInDialog: false,
|
||||
},
|
||||
visionModelPreview: {
|
||||
type: 'boolean',
|
||||
label: 'Vision Model Preview',
|
||||
@@ -1238,39 +1214,6 @@ const SETTINGS_SCHEMA = {
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
extensions: {
|
||||
type: 'object',
|
||||
label: 'Extensions',
|
||||
category: 'Extensions',
|
||||
requiresRestart: true,
|
||||
default: {},
|
||||
description: 'Settings for extensions.',
|
||||
showInDialog: false,
|
||||
properties: {
|
||||
disabled: {
|
||||
type: 'array',
|
||||
label: 'Disabled Extensions',
|
||||
category: 'Extensions',
|
||||
requiresRestart: true,
|
||||
default: [] as string[],
|
||||
description: 'List of disabled extensions.',
|
||||
showInDialog: false,
|
||||
mergeStrategy: MergeStrategy.UNION,
|
||||
},
|
||||
workspacesWithMigrationNudge: {
|
||||
type: 'array',
|
||||
label: 'Workspaces with Migration Nudge',
|
||||
category: 'Extensions',
|
||||
requiresRestart: false,
|
||||
default: [] as string[],
|
||||
description:
|
||||
'List of workspaces for which the migration nudge has been shown.',
|
||||
showInDialog: false,
|
||||
mergeStrategy: MergeStrategy.UNION,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as const satisfies SettingsSchema;
|
||||
|
||||
export type SettingsSchemaType = typeof SETTINGS_SCHEMA;
|
||||
|
||||
@@ -45,9 +45,7 @@ export async function initializeApp(
|
||||
// Auto-detect and set LLM output language on first use
|
||||
initializeLlmOutputLanguage();
|
||||
|
||||
// Use authType from modelsConfig which respects CLI --auth-type argument
|
||||
// over settings.security.auth.selectedType
|
||||
const authType = config.modelsConfig.getCurrentAuthType();
|
||||
const authType = settings.merged.security?.auth?.selectedType;
|
||||
const authError = await performInitialAuth(config, authType);
|
||||
|
||||
// Fallback to user select when initial authentication fails
|
||||
@@ -61,7 +59,7 @@ export async function initializeApp(
|
||||
const themeError = validateTheme(settings);
|
||||
|
||||
const shouldOpenAuthDialog =
|
||||
!config.modelsConfig.wasAuthTypeExplicitlyProvided() || !!authError;
|
||||
settings.merged.security?.auth?.selectedType === undefined || !!authError;
|
||||
|
||||
if (config.getIdeMode()) {
|
||||
const ideClient = await IdeClient.getInstance();
|
||||
|
||||
@@ -87,15 +87,6 @@ vi.mock('./config/sandboxConfig.js', () => ({
|
||||
loadSandboxConfig: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('./core/initializer.js', () => ({
|
||||
initializeApp: vi.fn().mockResolvedValue({
|
||||
authError: null,
|
||||
themeError: null,
|
||||
shouldOpenAuthDialog: false,
|
||||
geminiMdFileCount: 0,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('gemini.tsx main function', () => {
|
||||
let originalEnvGeminiSandbox: string | undefined;
|
||||
let originalEnvSandbox: string | undefined;
|
||||
@@ -271,7 +262,6 @@ describe('gemini.tsx main function', () => {
|
||||
);
|
||||
const { loadSettings } = await import('./config/settings.js');
|
||||
const cleanupModule = await import('./utils/cleanup.js');
|
||||
const extensionModule = await import('./config/extension.js');
|
||||
const validatorModule = await import('./validateNonInterActiveAuth.js');
|
||||
const streamJsonModule = await import('./nonInteractive/session.js');
|
||||
const initializerModule = await import('./core/initializer.js');
|
||||
@@ -284,11 +274,6 @@ describe('gemini.tsx main function', () => {
|
||||
vi.mocked(cleanupModule.registerCleanup).mockImplementation(() => {});
|
||||
const runExitCleanupMock = vi.mocked(cleanupModule.runExitCleanup);
|
||||
runExitCleanupMock.mockResolvedValue(undefined);
|
||||
vi.spyOn(extensionModule, 'loadExtensions').mockReturnValue([]);
|
||||
vi.spyOn(
|
||||
extensionModule.ExtensionStorage,
|
||||
'getUserExtensionsDir',
|
||||
).mockReturnValue('/tmp/extensions');
|
||||
vi.spyOn(initializerModule, 'initializeApp').mockResolvedValue({
|
||||
authError: null,
|
||||
themeError: null,
|
||||
@@ -371,6 +356,7 @@ describe('gemini.tsx main function', () => {
|
||||
expect(inputArg).toBe('hello stream');
|
||||
|
||||
expect(validateAuthSpy).toHaveBeenCalledWith(
|
||||
undefined,
|
||||
undefined,
|
||||
configStub,
|
||||
expect.any(Object),
|
||||
@@ -468,7 +454,6 @@ describe('gemini.tsx main function kitty protocol', () => {
|
||||
telemetryOutfile: undefined,
|
||||
allowedMcpServerNames: undefined,
|
||||
allowedTools: undefined,
|
||||
acp: undefined,
|
||||
experimentalAcp: undefined,
|
||||
experimentalSkills: undefined,
|
||||
extensions: undefined,
|
||||
@@ -648,37 +633,4 @@ describe('startInteractiveUI', () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
expect(checkForUpdates).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should not check for updates when update nag is disabled', async () => {
|
||||
const { checkForUpdates } = await import('./ui/utils/updateCheck.js');
|
||||
|
||||
const mockInitializationResult = {
|
||||
authError: null,
|
||||
themeError: null,
|
||||
shouldOpenAuthDialog: false,
|
||||
geminiMdFileCount: 0,
|
||||
};
|
||||
|
||||
const settingsWithUpdateNagDisabled = {
|
||||
merged: {
|
||||
general: {
|
||||
disableUpdateNag: true,
|
||||
},
|
||||
ui: {
|
||||
hideWindowTitle: false,
|
||||
},
|
||||
},
|
||||
} as LoadedSettings;
|
||||
|
||||
await startInteractiveUI(
|
||||
mockConfig,
|
||||
settingsWithUpdateNagDisabled,
|
||||
mockStartupWarnings,
|
||||
mockWorkspaceRoot,
|
||||
mockInitializationResult,
|
||||
);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
expect(checkForUpdates).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import type { Config, AuthType } from '@qwen-code/qwen-code-core';
|
||||
import { InputFormat, logUserPrompt } from '@qwen-code/qwen-code-core';
|
||||
import { render } from 'ink';
|
||||
import dns from 'node:dns';
|
||||
@@ -15,13 +15,8 @@ import React from 'react';
|
||||
import { validateAuthMethod } from './config/auth.js';
|
||||
import * as cliConfig from './config/config.js';
|
||||
import { loadCliConfig, parseArguments } from './config/config.js';
|
||||
import { ExtensionStorage, loadExtensions } from './config/extension.js';
|
||||
import type { DnsResolutionOrder, LoadedSettings } from './config/settings.js';
|
||||
import {
|
||||
getSettingsWarnings,
|
||||
loadSettings,
|
||||
migrateDeprecatedSettings,
|
||||
} from './config/settings.js';
|
||||
import { loadSettings } from './config/settings.js';
|
||||
import {
|
||||
initializeApp,
|
||||
type InitializationResult,
|
||||
@@ -107,7 +102,6 @@ function getNodeMemoryArgs(isDebugMode: boolean): string[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
import { ExtensionEnablementManager } from './config/extensions/extensionEnablement.js';
|
||||
import { loadSandboxConfig } from './config/sandboxConfig.js';
|
||||
import { runAcpAgent } from './acp-integration/acpAgent.js';
|
||||
|
||||
@@ -187,18 +181,16 @@ export async function startInteractiveUI(
|
||||
},
|
||||
);
|
||||
|
||||
if (!settings.merged.general?.disableUpdateNag) {
|
||||
checkForUpdates()
|
||||
.then((info) => {
|
||||
handleAutoUpdate(info, settings, config.getProjectRoot());
|
||||
})
|
||||
.catch((err) => {
|
||||
// Silently ignore update check errors.
|
||||
if (config.getDebugMode()) {
|
||||
console.error('Update check failed:', err);
|
||||
}
|
||||
});
|
||||
}
|
||||
checkForUpdates()
|
||||
.then((info) => {
|
||||
handleAutoUpdate(info, settings, config.getProjectRoot());
|
||||
})
|
||||
.catch((err) => {
|
||||
// Silently ignore update check errors.
|
||||
if (config.getDebugMode()) {
|
||||
console.error('Update check failed:', err);
|
||||
}
|
||||
});
|
||||
|
||||
registerCleanup(() => instance.unmount());
|
||||
}
|
||||
@@ -206,10 +198,9 @@ export async function startInteractiveUI(
|
||||
export async function main() {
|
||||
setupUnhandledRejectionHandler();
|
||||
const settings = loadSettings();
|
||||
migrateDeprecatedSettings(settings);
|
||||
await cleanupCheckpoints();
|
||||
|
||||
let argv = await parseArguments(settings.merged);
|
||||
let argv = await parseArguments();
|
||||
|
||||
// Check for invalid input combinations early to prevent crashes
|
||||
if (argv.promptInteractive && !process.stdin.isTTY) {
|
||||
@@ -251,25 +242,27 @@ export async function main() {
|
||||
if (sandboxConfig) {
|
||||
const partialConfig = await loadCliConfig(
|
||||
settings.merged,
|
||||
[],
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
argv,
|
||||
undefined,
|
||||
[],
|
||||
);
|
||||
|
||||
if (!settings.merged.security?.auth?.useExternal) {
|
||||
if (
|
||||
settings.merged.security?.auth?.selectedType &&
|
||||
!settings.merged.security?.auth?.useExternal
|
||||
) {
|
||||
// Validate authentication here because the sandbox will interfere with the Oauth2 web redirect.
|
||||
try {
|
||||
const authType = partialConfig.modelsConfig.getCurrentAuthType();
|
||||
// Fresh users may not have selected/persisted an authType yet.
|
||||
// In that case, defer auth prompting/selection to the main interactive flow.
|
||||
if (authType) {
|
||||
const err = validateAuthMethod(authType, partialConfig);
|
||||
if (err) {
|
||||
throw new Error(err);
|
||||
}
|
||||
|
||||
await partialConfig.refreshAuth(authType);
|
||||
const err = validateAuthMethod(
|
||||
settings.merged.security.auth.selectedType,
|
||||
);
|
||||
if (err) {
|
||||
throw new Error(err);
|
||||
}
|
||||
|
||||
await partialConfig.refreshAuth(
|
||||
settings.merged.security.auth.selectedType,
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('Error authenticating:', err);
|
||||
process.exit(1);
|
||||
@@ -335,25 +328,21 @@ export async function main() {
|
||||
// to run Gemini CLI. It is now safe to perform expensive initialization that
|
||||
// may have side effects.
|
||||
{
|
||||
const extensionEnablementManager = new ExtensionEnablementManager(
|
||||
ExtensionStorage.getUserExtensionsDir(),
|
||||
argv.extensions,
|
||||
);
|
||||
const extensions = loadExtensions(extensionEnablementManager);
|
||||
const config = await loadCliConfig(
|
||||
settings.merged,
|
||||
extensions,
|
||||
extensionEnablementManager,
|
||||
argv,
|
||||
process.cwd(),
|
||||
argv.extensions,
|
||||
);
|
||||
|
||||
if (config.getListExtensions()) {
|
||||
console.log('Installed extensions:');
|
||||
for (const extension of extensions) {
|
||||
console.log(`- ${extension.config.name}`);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
// FIXME: list extensions after the config initialize
|
||||
// if (config.getListExtensions()) {
|
||||
// console.log('Installed extensions:');
|
||||
// for (const extension of extensions) {
|
||||
// console.log(`- ${extension.config.name}`);
|
||||
// }
|
||||
// process.exit(0);
|
||||
// }
|
||||
|
||||
// Setup unified ConsolePatcher based on interactive mode
|
||||
const isInteractive = config.isInteractive();
|
||||
@@ -399,20 +388,17 @@ export async function main() {
|
||||
}
|
||||
|
||||
if (config.getExperimentalZedIntegration()) {
|
||||
return runAcpAgent(config, settings, extensions, argv);
|
||||
return runAcpAgent(config, settings, argv);
|
||||
}
|
||||
|
||||
let input = config.getQuestion();
|
||||
const startupWarnings = [
|
||||
...new Set([
|
||||
...(await getStartupWarnings()),
|
||||
...(await getUserStartupWarnings({
|
||||
workspaceRoot: process.cwd(),
|
||||
useRipgrep: settings.merged.tools?.useRipgrep ?? true,
|
||||
useBuiltinRipgrep: settings.merged.tools?.useBuiltinRipgrep ?? true,
|
||||
})),
|
||||
...getSettingsWarnings(settings),
|
||||
]),
|
||||
...(await getStartupWarnings()),
|
||||
...(await getUserStartupWarnings({
|
||||
workspaceRoot: process.cwd(),
|
||||
useRipgrep: settings.merged.tools?.useRipgrep ?? true,
|
||||
useBuiltinRipgrep: settings.merged.tools?.useBuiltinRipgrep ?? true,
|
||||
})),
|
||||
];
|
||||
|
||||
// Render UI, passing necessary config values. Check that there is no command line question.
|
||||
@@ -445,6 +431,8 @@ export async function main() {
|
||||
}
|
||||
|
||||
const nonInteractiveConfig = await validateNonInteractiveAuth(
|
||||
(argv.authType as AuthType) ||
|
||||
settings.merged.security?.auth?.selectedType,
|
||||
settings.merged.security?.auth?.useExternal,
|
||||
config,
|
||||
settings,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -89,9 +89,6 @@ export default {
|
||||
'No tools available': 'No tools available',
|
||||
'View or change the approval mode for tool usage':
|
||||
'View or change the approval mode for tool usage',
|
||||
'Invalid approval mode "{{arg}}". Valid modes: {{modes}}':
|
||||
'Invalid approval mode "{{arg}}". Valid modes: {{modes}}',
|
||||
'Approval mode set to "{{mode}}"': 'Approval mode set to "{{mode}}"',
|
||||
'View or change the language setting': 'View or change the language setting',
|
||||
'change the theme': 'change the theme',
|
||||
'Select Theme': 'Select Theme',
|
||||
@@ -770,21 +767,6 @@ export default {
|
||||
'Authentication timed out. Please try again.',
|
||||
'Waiting for auth... (Press ESC or CTRL+C to cancel)':
|
||||
'Waiting for auth... (Press ESC or CTRL+C to cancel)',
|
||||
'Missing API key for OpenAI-compatible auth. Set settings.security.auth.apiKey, or set the {{envKeyHint}} environment variable.':
|
||||
'Missing API key for OpenAI-compatible auth. Set settings.security.auth.apiKey, or set the {{envKeyHint}} environment variable.',
|
||||
'{{envKeyHint}} environment variable not found.':
|
||||
'{{envKeyHint}} environment variable not found.',
|
||||
'{{envKeyHint}} environment variable not found. Please set it in your .env file or environment variables.':
|
||||
'{{envKeyHint}} environment variable not found. Please set it in your .env file or environment variables.',
|
||||
'{{envKeyHint}} environment variable not found (or set settings.security.auth.apiKey). Please set it in your .env file or environment variables.':
|
||||
'{{envKeyHint}} environment variable not found (or set settings.security.auth.apiKey). Please set it in your .env file or environment variables.',
|
||||
'Missing API key for OpenAI-compatible auth. Set the {{envKeyHint}} environment variable.':
|
||||
'Missing API key for OpenAI-compatible auth. Set the {{envKeyHint}} environment variable.',
|
||||
'Anthropic provider missing required baseUrl in modelProviders[].baseUrl.':
|
||||
'Anthropic provider missing required baseUrl in modelProviders[].baseUrl.',
|
||||
'ANTHROPIC_BASE_URL environment variable not found.':
|
||||
'ANTHROPIC_BASE_URL environment variable not found.',
|
||||
'Invalid auth method selected.': 'Invalid auth method selected.',
|
||||
'Failed to authenticate. Message: {{message}}':
|
||||
'Failed to authenticate. Message: {{message}}',
|
||||
'Authenticated successfully with {{authType}} credentials.':
|
||||
@@ -806,15 +788,6 @@ export default {
|
||||
// ============================================================================
|
||||
'Select Model': 'Select Model',
|
||||
'(Press Esc to close)': '(Press Esc to close)',
|
||||
'Current (effective) configuration': 'Current (effective) configuration',
|
||||
AuthType: 'AuthType',
|
||||
'API Key': 'API Key',
|
||||
unset: 'unset',
|
||||
'(default)': '(default)',
|
||||
'(set)': '(set)',
|
||||
'(not set)': '(not set)',
|
||||
"Failed to switch model to '{{modelId}}'.\n\n{{error}}":
|
||||
"Failed to switch model to '{{modelId}}'.\n\n{{error}}",
|
||||
'The latest Qwen Coder model from Alibaba Cloud ModelStudio (version: qwen3-coder-plus-2025-09-23)':
|
||||
'The latest Qwen Coder model from Alibaba Cloud ModelStudio (version: qwen3-coder-plus-2025-09-23)',
|
||||
'The latest Qwen Vision model from Alibaba Cloud ModelStudio (version: qwen3-vl-plus-2025-09-23)':
|
||||
@@ -1064,6 +1037,7 @@ export default {
|
||||
'Applying percussive maintenance...',
|
||||
'Searching for the correct USB orientation...',
|
||||
'Ensuring the magic smoke stays inside the wires...',
|
||||
'Rewriting in Rust for no particular reason...',
|
||||
'Trying to exit Vim...',
|
||||
'Spinning up the hamster wheel...',
|
||||
"That's not a bug, it's an undocumented feature...",
|
||||
|
||||
@@ -89,10 +89,6 @@ export default {
|
||||
'No tools available': 'Нет доступных инструментов',
|
||||
'View or change the approval mode for tool usage':
|
||||
'Просмотр или изменение режима подтверждения для использования инструментов',
|
||||
'Invalid approval mode "{{arg}}". Valid modes: {{modes}}':
|
||||
'Недопустимый режим подтверждения "{{arg}}". Допустимые режимы: {{modes}}',
|
||||
'Approval mode set to "{{mode}}"':
|
||||
'Режим подтверждения установлен на "{{mode}}"',
|
||||
'View or change the language setting':
|
||||
'Просмотр или изменение настроек языка',
|
||||
'change the theme': 'Изменение темы',
|
||||
@@ -786,21 +782,6 @@ export default {
|
||||
'Время ожидания авторизации истекло. Пожалуйста, попробуйте снова.',
|
||||
'Waiting for auth... (Press ESC or CTRL+C to cancel)':
|
||||
'Ожидание авторизации... (Нажмите ESC или CTRL+C для отмены)',
|
||||
'Missing API key for OpenAI-compatible auth. Set settings.security.auth.apiKey, or set the {{envKeyHint}} environment variable.':
|
||||
'Отсутствует API-ключ для аутентификации, совместимой с OpenAI. Укажите settings.security.auth.apiKey или переменную окружения {{envKeyHint}}.',
|
||||
'{{envKeyHint}} environment variable not found.':
|
||||
'Переменная окружения {{envKeyHint}} не найдена.',
|
||||
'{{envKeyHint}} environment variable not found. Please set it in your .env file or environment variables.':
|
||||
'Переменная окружения {{envKeyHint}} не найдена. Укажите её в файле .env или среди системных переменных.',
|
||||
'{{envKeyHint}} environment variable not found (or set settings.security.auth.apiKey). Please set it in your .env file or environment variables.':
|
||||
'Переменная окружения {{envKeyHint}} не найдена (или установите settings.security.auth.apiKey). Укажите её в файле .env или среди системных переменных.',
|
||||
'Missing API key for OpenAI-compatible auth. Set the {{envKeyHint}} environment variable.':
|
||||
'Отсутствует API-ключ для аутентификации, совместимой с OpenAI. Установите переменную окружения {{envKeyHint}}.',
|
||||
'Anthropic provider missing required baseUrl in modelProviders[].baseUrl.':
|
||||
'У провайдера Anthropic отсутствует обязательный baseUrl в modelProviders[].baseUrl.',
|
||||
'ANTHROPIC_BASE_URL environment variable not found.':
|
||||
'Переменная окружения ANTHROPIC_BASE_URL не найдена.',
|
||||
'Invalid auth method selected.': 'Выбран недопустимый метод авторизации.',
|
||||
'Failed to authenticate. Message: {{message}}':
|
||||
'Не удалось авторизоваться. Сообщение: {{message}}',
|
||||
'Authenticated successfully with {{authType}} credentials.':
|
||||
@@ -822,15 +803,6 @@ export default {
|
||||
// ============================================================================
|
||||
'Select Model': 'Выбрать модель',
|
||||
'(Press Esc to close)': '(Нажмите Esc для закрытия)',
|
||||
'Current (effective) configuration': 'Текущая (фактическая) конфигурация',
|
||||
AuthType: 'Тип авторизации',
|
||||
'API Key': 'API-ключ',
|
||||
unset: 'не задано',
|
||||
'(default)': '(по умолчанию)',
|
||||
'(set)': '(установлено)',
|
||||
'(not set)': '(не задано)',
|
||||
"Failed to switch model to '{{modelId}}'.\n\n{{error}}":
|
||||
"Не удалось переключиться на модель '{{modelId}}'.\n\n{{error}}",
|
||||
'The latest Qwen Coder model from Alibaba Cloud ModelStudio (version: qwen3-coder-plus-2025-09-23)':
|
||||
'Последняя модель Qwen Coder от Alibaba Cloud ModelStudio (версия: qwen3-coder-plus-2025-09-23)',
|
||||
'The latest Qwen Vision model from Alibaba Cloud ModelStudio (version: qwen3-vl-plus-2025-09-23)':
|
||||
@@ -1084,6 +1056,7 @@ export default {
|
||||
'Провожу настройку методом тыка...',
|
||||
'Ищем, какой стороной вставлять флешку...',
|
||||
'Следим, чтобы волшебный дым не вышел из проводов...',
|
||||
'Переписываем всё на Rust без особой причины...',
|
||||
'Пытаемся выйти из Vim...',
|
||||
'Раскручиваем колесо для хомяка...',
|
||||
'Это не баг, а фича...',
|
||||
|
||||
@@ -88,9 +88,6 @@ export default {
|
||||
'No tools available': '没有可用工具',
|
||||
'View or change the approval mode for tool usage':
|
||||
'查看或更改工具使用的审批模式',
|
||||
'Invalid approval mode "{{arg}}". Valid modes: {{modes}}':
|
||||
'无效的审批模式 "{{arg}}"。有效模式:{{modes}}',
|
||||
'Approval mode set to "{{mode}}"': '审批模式已设置为 "{{mode}}"',
|
||||
'View or change the language setting': '查看或更改语言设置',
|
||||
'change the theme': '更改主题',
|
||||
'Select Theme': '选择主题',
|
||||
@@ -728,21 +725,6 @@ export default {
|
||||
'Authentication timed out. Please try again.': '认证超时。请重试。',
|
||||
'Waiting for auth... (Press ESC or CTRL+C to cancel)':
|
||||
'正在等待认证...(按 ESC 或 CTRL+C 取消)',
|
||||
'Missing API key for OpenAI-compatible auth. Set settings.security.auth.apiKey, or set the {{envKeyHint}} environment variable.':
|
||||
'缺少 OpenAI 兼容认证的 API 密钥。请设置 settings.security.auth.apiKey 或设置 {{envKeyHint}} 环境变量。',
|
||||
'{{envKeyHint}} environment variable not found.':
|
||||
'未找到 {{envKeyHint}} 环境变量。',
|
||||
'{{envKeyHint}} environment variable not found. Please set it in your .env file or environment variables.':
|
||||
'未找到 {{envKeyHint}} 环境变量。请在 .env 文件或系统环境变量中进行设置。',
|
||||
'{{envKeyHint}} environment variable not found (or set settings.security.auth.apiKey). Please set it in your .env file or environment variables.':
|
||||
'未找到 {{envKeyHint}} 环境变量(或设置 settings.security.auth.apiKey)。请在 .env 文件或系统环境变量中进行设置。',
|
||||
'Missing API key for OpenAI-compatible auth. Set the {{envKeyHint}} environment variable.':
|
||||
'缺少 OpenAI 兼容认证的 API 密钥。请设置 {{envKeyHint}} 环境变量。',
|
||||
'Anthropic provider missing required baseUrl in modelProviders[].baseUrl.':
|
||||
'Anthropic 提供商缺少必需的 baseUrl,请在 modelProviders[].baseUrl 中配置。',
|
||||
'ANTHROPIC_BASE_URL environment variable not found.':
|
||||
'未找到 ANTHROPIC_BASE_URL 环境变量。',
|
||||
'Invalid auth method selected.': '选择了无效的认证方式。',
|
||||
'Failed to authenticate. Message: {{message}}': '认证失败。消息:{{message}}',
|
||||
'Authenticated successfully with {{authType}} credentials.':
|
||||
'使用 {{authType}} 凭据成功认证。',
|
||||
@@ -762,15 +744,6 @@ export default {
|
||||
// ============================================================================
|
||||
'Select Model': '选择模型',
|
||||
'(Press Esc to close)': '(按 Esc 关闭)',
|
||||
'Current (effective) configuration': '当前(实际生效)配置',
|
||||
AuthType: '认证方式',
|
||||
'API Key': 'API 密钥',
|
||||
unset: '未设置',
|
||||
'(default)': '(默认)',
|
||||
'(set)': '(已设置)',
|
||||
'(not set)': '(未设置)',
|
||||
"Failed to switch model to '{{modelId}}'.\n\n{{error}}":
|
||||
"无法切换到模型 '{{modelId}}'.\n\n{{error}}",
|
||||
'The latest Qwen Coder model from Alibaba Cloud ModelStudio (version: qwen3-coder-plus-2025-09-23)':
|
||||
'来自阿里云 ModelStudio 的最新 Qwen Coder 模型(版本:qwen3-coder-plus-2025-09-23)',
|
||||
'The latest Qwen Vision model from Alibaba Cloud ModelStudio (version: qwen3-vl-plus-2025-09-23)':
|
||||
|
||||
@@ -630,67 +630,6 @@ describe('BaseJsonOutputAdapter', () => {
|
||||
|
||||
expect(state.blocks).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should preserve whitespace in thinking content', () => {
|
||||
const state = adapter.exposeCreateMessageState();
|
||||
adapter.startAssistantMessage();
|
||||
|
||||
adapter.exposeAppendThinking(
|
||||
state,
|
||||
'',
|
||||
'The user just said "Hello"',
|
||||
null,
|
||||
);
|
||||
|
||||
expect(state.blocks).toHaveLength(1);
|
||||
expect(state.blocks[0]).toMatchObject({
|
||||
type: 'thinking',
|
||||
thinking: 'The user just said "Hello"',
|
||||
});
|
||||
// Verify spaces are preserved
|
||||
const block = state.blocks[0] as { thinking: string };
|
||||
expect(block.thinking).toContain('user just');
|
||||
expect(block.thinking).not.toContain('userjust');
|
||||
});
|
||||
|
||||
it('should preserve whitespace when appending multiple thinking fragments', () => {
|
||||
const state = adapter.exposeCreateMessageState();
|
||||
adapter.startAssistantMessage();
|
||||
|
||||
// Simulate streaming thinking content in fragments
|
||||
adapter.exposeAppendThinking(state, '', 'The user just', null);
|
||||
adapter.exposeAppendThinking(state, '', ' said "Hello"', null);
|
||||
adapter.exposeAppendThinking(
|
||||
state,
|
||||
'',
|
||||
'. This is a simple greeting',
|
||||
null,
|
||||
);
|
||||
|
||||
expect(state.blocks).toHaveLength(1);
|
||||
const block = state.blocks[0] as { thinking: string };
|
||||
// Verify the complete text with all spaces preserved
|
||||
expect(block.thinking).toBe(
|
||||
'The user just said "Hello". This is a simple greeting',
|
||||
);
|
||||
// Verify specific space preservation
|
||||
expect(block.thinking).toContain('user just ');
|
||||
expect(block.thinking).toContain(' said');
|
||||
expect(block.thinking).toContain('". This');
|
||||
expect(block.thinking).not.toContain('userjust');
|
||||
expect(block.thinking).not.toContain('justsaid');
|
||||
});
|
||||
|
||||
it('should preserve leading and trailing whitespace in description', () => {
|
||||
const state = adapter.exposeCreateMessageState();
|
||||
adapter.startAssistantMessage();
|
||||
|
||||
adapter.exposeAppendThinking(state, '', ' content with spaces ', null);
|
||||
|
||||
expect(state.blocks).toHaveLength(1);
|
||||
const block = state.blocks[0] as { thinking: string };
|
||||
expect(block.thinking).toBe(' content with spaces ');
|
||||
});
|
||||
});
|
||||
|
||||
describe('appendToolUse', () => {
|
||||
|
||||
@@ -816,18 +816,9 @@ export abstract class BaseJsonOutputAdapter {
|
||||
parentToolUseId?: string | null,
|
||||
): void {
|
||||
const actualParentToolUseId = parentToolUseId ?? null;
|
||||
|
||||
// Build fragment without trimming to preserve whitespace in streaming content
|
||||
// Only filter out null/undefined/empty values
|
||||
const parts: string[] = [];
|
||||
if (subject && subject.length > 0) {
|
||||
parts.push(subject);
|
||||
}
|
||||
if (description && description.length > 0) {
|
||||
parts.push(description);
|
||||
}
|
||||
|
||||
const fragment = parts.join(': ');
|
||||
const fragment = [subject?.trim(), description?.trim()]
|
||||
.filter((value) => value && value.length > 0)
|
||||
.join(': ');
|
||||
if (!fragment) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -323,68 +323,6 @@ describe('StreamJsonOutputAdapter', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should preserve whitespace in thinking content (issue #1356)', () => {
|
||||
adapter.processEvent({
|
||||
type: GeminiEventType.Thought,
|
||||
value: {
|
||||
subject: '',
|
||||
description: 'The user just said "Hello"',
|
||||
},
|
||||
});
|
||||
|
||||
const message = adapter.finalizeAssistantMessage();
|
||||
expect(message.message.content).toHaveLength(1);
|
||||
const block = message.message.content[0] as {
|
||||
type: string;
|
||||
thinking: string;
|
||||
};
|
||||
expect(block.type).toBe('thinking');
|
||||
expect(block.thinking).toBe('The user just said "Hello"');
|
||||
// Verify spaces are preserved
|
||||
expect(block.thinking).toContain('user just');
|
||||
expect(block.thinking).not.toContain('userjust');
|
||||
});
|
||||
|
||||
it('should preserve whitespace when streaming multiple thinking fragments (issue #1356)', () => {
|
||||
// Simulate streaming thinking content in multiple events
|
||||
adapter.processEvent({
|
||||
type: GeminiEventType.Thought,
|
||||
value: {
|
||||
subject: '',
|
||||
description: 'The user just',
|
||||
},
|
||||
});
|
||||
adapter.processEvent({
|
||||
type: GeminiEventType.Thought,
|
||||
value: {
|
||||
subject: '',
|
||||
description: ' said "Hello"',
|
||||
},
|
||||
});
|
||||
adapter.processEvent({
|
||||
type: GeminiEventType.Thought,
|
||||
value: {
|
||||
subject: '',
|
||||
description: '. This is a simple greeting',
|
||||
},
|
||||
});
|
||||
|
||||
const message = adapter.finalizeAssistantMessage();
|
||||
expect(message.message.content).toHaveLength(1);
|
||||
const block = message.message.content[0] as {
|
||||
type: string;
|
||||
thinking: string;
|
||||
};
|
||||
expect(block.thinking).toBe(
|
||||
'The user just said "Hello". This is a simple greeting',
|
||||
);
|
||||
// Verify specific spaces are preserved
|
||||
expect(block.thinking).toContain('user just ');
|
||||
expect(block.thinking).toContain(' said');
|
||||
expect(block.thinking).not.toContain('userjust');
|
||||
expect(block.thinking).not.toContain('justsaid');
|
||||
});
|
||||
|
||||
it('should append tool use from ToolCallRequest events', () => {
|
||||
adapter.processEvent({
|
||||
type: GeminiEventType.ToolCallRequest,
|
||||
|
||||
@@ -298,9 +298,7 @@ describe('runNonInteractive', () => {
|
||||
mockConfig,
|
||||
expect.objectContaining({ name: 'testTool' }),
|
||||
expect.any(AbortSignal),
|
||||
expect.objectContaining({
|
||||
outputUpdateHandler: expect.any(Function),
|
||||
}),
|
||||
undefined,
|
||||
);
|
||||
// Verify first call has isContinuation: false
|
||||
expect(mockGeminiClient.sendMessageStream).toHaveBeenNthCalledWith(
|
||||
@@ -773,52 +771,6 @@ describe('runNonInteractive', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle API errors in text mode and exit with error code', async () => {
|
||||
(mockConfig.getOutputFormat as Mock).mockReturnValue(OutputFormat.TEXT);
|
||||
setupMetricsMock();
|
||||
|
||||
// Simulate an API error event (like 401 unauthorized)
|
||||
const apiErrorEvent: ServerGeminiStreamEvent = {
|
||||
type: GeminiEventType.Error,
|
||||
value: {
|
||||
error: {
|
||||
message: '401 Incorrect API key provided',
|
||||
status: 401,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockGeminiClient.sendMessageStream.mockReturnValue(
|
||||
createStreamFromEvents([apiErrorEvent]),
|
||||
);
|
||||
|
||||
let thrownError: Error | null = null;
|
||||
try {
|
||||
await runNonInteractive(
|
||||
mockConfig,
|
||||
mockSettings,
|
||||
'Test input',
|
||||
'prompt-id-api-error',
|
||||
);
|
||||
// Should not reach here
|
||||
expect.fail('Expected error to be thrown');
|
||||
} catch (error) {
|
||||
thrownError = error as Error;
|
||||
}
|
||||
|
||||
// Should throw with the API error message
|
||||
expect(thrownError).toBeTruthy();
|
||||
expect(thrownError?.message).toContain('401');
|
||||
expect(thrownError?.message).toContain('Incorrect API key provided');
|
||||
|
||||
// Verify error was written to stderr
|
||||
expect(processStderrSpy).toHaveBeenCalled();
|
||||
const stderrCalls = processStderrSpy.mock.calls;
|
||||
const errorOutput = stderrCalls.map((call) => call[0]).join('');
|
||||
expect(errorOutput).toContain('401');
|
||||
expect(errorOutput).toContain('Incorrect API key provided');
|
||||
});
|
||||
|
||||
it('should handle FatalInputError with custom exit code in JSON format', async () => {
|
||||
(mockConfig.getOutputFormat as Mock).mockReturnValue(OutputFormat.JSON);
|
||||
setupMetricsMock();
|
||||
@@ -1825,84 +1777,4 @@ describe('runNonInteractive', () => {
|
||||
{ isContinuation: false },
|
||||
);
|
||||
});
|
||||
|
||||
it('should print tool output to console in text mode (non-Task tools)', async () => {
|
||||
// Test that tool output is printed to stdout in text mode
|
||||
const toolCallEvent: ServerGeminiStreamEvent = {
|
||||
type: GeminiEventType.ToolCallRequest,
|
||||
value: {
|
||||
callId: 'tool-1',
|
||||
name: 'run_in_terminal',
|
||||
args: { command: 'npm outdated' },
|
||||
isClientInitiated: false,
|
||||
prompt_id: 'prompt-id-tool-output',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock tool execution with outputUpdateHandler being called
|
||||
mockCoreExecuteToolCall.mockImplementation(
|
||||
async (_config, _request, _signal, options) => {
|
||||
// Simulate tool calling outputUpdateHandler with output chunks
|
||||
if (options?.outputUpdateHandler) {
|
||||
options.outputUpdateHandler('tool-1', 'Package outdated\n');
|
||||
options.outputUpdateHandler('tool-1', 'npm@1.0.0 -> npm@2.0.0\n');
|
||||
}
|
||||
return {
|
||||
responseParts: [
|
||||
{
|
||||
functionResponse: {
|
||||
id: 'tool-1',
|
||||
name: 'run_in_terminal',
|
||||
response: {
|
||||
output: 'Package outdated\nnpm@1.0.0 -> npm@2.0.0',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
const firstCallEvents: ServerGeminiStreamEvent[] = [
|
||||
toolCallEvent,
|
||||
{
|
||||
type: GeminiEventType.Finished,
|
||||
value: { reason: undefined, usageMetadata: { totalTokenCount: 5 } },
|
||||
},
|
||||
];
|
||||
|
||||
const secondCallEvents: ServerGeminiStreamEvent[] = [
|
||||
{ type: GeminiEventType.Content, value: 'Dependencies checked' },
|
||||
{
|
||||
type: GeminiEventType.Finished,
|
||||
value: { reason: undefined, usageMetadata: { totalTokenCount: 3 } },
|
||||
},
|
||||
];
|
||||
|
||||
mockGeminiClient.sendMessageStream
|
||||
.mockReturnValueOnce(createStreamFromEvents(firstCallEvents))
|
||||
.mockReturnValueOnce(createStreamFromEvents(secondCallEvents));
|
||||
|
||||
await runNonInteractive(
|
||||
mockConfig,
|
||||
mockSettings,
|
||||
'Check dependencies',
|
||||
'prompt-id-tool-output',
|
||||
);
|
||||
|
||||
// Verify that executeToolCall was called with outputUpdateHandler
|
||||
expect(mockCoreExecuteToolCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({ name: 'run_in_terminal' }),
|
||||
expect.any(AbortSignal),
|
||||
expect.objectContaining({
|
||||
outputUpdateHandler: expect.any(Function),
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify tool output was written to stdout
|
||||
expect(processStdoutSpy).toHaveBeenCalledWith('Package outdated\n');
|
||||
expect(processStdoutSpy).toHaveBeenCalledWith('npm@1.0.0 -> npm@2.0.0\n');
|
||||
expect(processStdoutSpy).toHaveBeenCalledWith('Dependencies checked');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,11 +4,7 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type {
|
||||
Config,
|
||||
ToolCallRequestInfo,
|
||||
ToolResultDisplay,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import type { Config, ToolCallRequestInfo } from '@qwen-code/qwen-code-core';
|
||||
import { isSlashCommand } from './ui/utils/commandUtils.js';
|
||||
import type { LoadedSettings } from './config/settings.js';
|
||||
import {
|
||||
@@ -312,8 +308,6 @@ export async function runNonInteractive(
|
||||
config.getContentGeneratorConfig()?.authType,
|
||||
);
|
||||
process.stderr.write(`${errorText}\n`);
|
||||
// Throw error to exit with non-zero code
|
||||
throw new Error(errorText);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -339,7 +333,7 @@ export async function runNonInteractive(
|
||||
? options.controlService.permission.getToolCallUpdateCallback()
|
||||
: undefined;
|
||||
|
||||
// Create output handler for Task tool (for subagent execution)
|
||||
// Only pass outputUpdateHandler for Task tool
|
||||
const isTaskTool = finalRequestInfo.name === 'task';
|
||||
const taskToolProgress = isTaskTool
|
||||
? createTaskToolProgressHandler(
|
||||
@@ -349,41 +343,20 @@ export async function runNonInteractive(
|
||||
)
|
||||
: undefined;
|
||||
const taskToolProgressHandler = taskToolProgress?.handler;
|
||||
|
||||
// Create output handler for non-Task tools in text mode (for console output)
|
||||
const nonTaskOutputHandler =
|
||||
!isTaskTool && !adapter
|
||||
? (callId: string, outputChunk: ToolResultDisplay) => {
|
||||
// Print tool output to console in text mode
|
||||
if (typeof outputChunk === 'string') {
|
||||
process.stdout.write(outputChunk);
|
||||
} else if (
|
||||
outputChunk &&
|
||||
typeof outputChunk === 'object' &&
|
||||
'ansiOutput' in outputChunk
|
||||
) {
|
||||
// Handle ANSI output - just print as string for now
|
||||
process.stdout.write(String(outputChunk.ansiOutput));
|
||||
}
|
||||
}
|
||||
: undefined;
|
||||
|
||||
// Combine output handlers
|
||||
const outputUpdateHandler =
|
||||
taskToolProgressHandler || nonTaskOutputHandler;
|
||||
|
||||
const toolResponse = await executeToolCall(
|
||||
config,
|
||||
finalRequestInfo,
|
||||
abortController.signal,
|
||||
outputUpdateHandler || toolCallUpdateCallback
|
||||
isTaskTool && taskToolProgressHandler
|
||||
? {
|
||||
...(outputUpdateHandler && { outputUpdateHandler }),
|
||||
...(toolCallUpdateCallback && {
|
||||
onToolCallsUpdate: toolCallUpdateCallback,
|
||||
}),
|
||||
outputUpdateHandler: taskToolProgressHandler,
|
||||
onToolCallsUpdate: toolCallUpdateCallback,
|
||||
}
|
||||
: undefined,
|
||||
: toolCallUpdateCallback
|
||||
? {
|
||||
onToolCallsUpdate: toolCallUpdateCallback,
|
||||
}
|
||||
: undefined,
|
||||
);
|
||||
|
||||
// Note: In JSON mode, subagent messages are automatically added to the main
|
||||
|
||||
327
packages/cli/src/services/FileCommandLoader-extension.test.ts
Normal file
327
packages/cli/src/services/FileCommandLoader-extension.test.ts
Normal file
@@ -0,0 +1,327 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import * as os from 'node:os';
|
||||
import { FileCommandLoader } from './FileCommandLoader.js';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import { Storage } from '@qwen-code/qwen-code-core';
|
||||
|
||||
describe('FileCommandLoader - Extension Commands Support', () => {
|
||||
let tempDir: string;
|
||||
let mockConfig: Partial<Config>;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await fs.promises.mkdtemp(
|
||||
path.join(os.tmpdir(), 'file-command-loader-ext-test-'),
|
||||
);
|
||||
|
||||
mockConfig = {
|
||||
getFolderTrustFeature: () => false,
|
||||
getFolderTrust: () => true,
|
||||
getProjectRoot: () => tempDir,
|
||||
storage: new Storage(tempDir),
|
||||
getExtensions: () => [],
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should load commands from extension with config.commands path', async () => {
|
||||
// Setup extension structure
|
||||
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'test-ext');
|
||||
const customCommandsDir = path.join(extensionDir, 'custom-cmds');
|
||||
await fs.promises.mkdir(customCommandsDir, { recursive: true });
|
||||
|
||||
// Create extension config with custom commands path
|
||||
const extensionConfig = {
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
commands: 'custom-cmds',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Create a test command in custom directory
|
||||
const commandContent =
|
||||
'---\ndescription: Test command from extension\n---\nDo something';
|
||||
await fs.promises.writeFile(
|
||||
path.join(customCommandsDir, 'test.md'),
|
||||
commandContent,
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'test-ext',
|
||||
config: extensionConfig,
|
||||
name: 'test-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
contextFiles: [],
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('test-ext:test');
|
||||
expect(commands[0].description).toBe(
|
||||
'[test-ext] Test command from extension',
|
||||
);
|
||||
});
|
||||
|
||||
it('should load commands from extension with multiple commands paths', async () => {
|
||||
// Setup extension structure
|
||||
const extensionDir = path.join(tempDir, '.qwen', 'extensions', 'multi-ext');
|
||||
const cmdsDir1 = path.join(extensionDir, 'commands1');
|
||||
const cmdsDir2 = path.join(extensionDir, 'commands2');
|
||||
await fs.promises.mkdir(cmdsDir1, { recursive: true });
|
||||
await fs.promises.mkdir(cmdsDir2, { recursive: true });
|
||||
|
||||
// Create extension config with multiple commands paths
|
||||
const extensionConfig = {
|
||||
name: 'multi-ext',
|
||||
version: '1.0.0',
|
||||
commands: ['commands1', 'commands2'],
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Create test commands in both directories
|
||||
await fs.promises.writeFile(
|
||||
path.join(cmdsDir1, 'cmd1.md'),
|
||||
'---\n---\nCommand 1',
|
||||
);
|
||||
await fs.promises.writeFile(
|
||||
path.join(cmdsDir2, 'cmd2.md'),
|
||||
'---\n---\nCommand 2',
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'multi-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'multi-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(2);
|
||||
const commandNames = commands.map((c) => c.name).sort();
|
||||
expect(commandNames).toEqual(['multi-ext:cmd1', 'multi-ext:cmd2']);
|
||||
});
|
||||
|
||||
it('should fallback to default "commands" directory when config.commands not specified', async () => {
|
||||
// Setup extension structure with default commands directory
|
||||
const extensionDir = path.join(
|
||||
tempDir,
|
||||
'.qwen',
|
||||
'extensions',
|
||||
'default-ext',
|
||||
);
|
||||
const defaultCommandsDir = path.join(extensionDir, 'commands');
|
||||
await fs.promises.mkdir(defaultCommandsDir, { recursive: true });
|
||||
|
||||
// Create extension config without commands field
|
||||
const extensionConfig = {
|
||||
name: 'default-ext',
|
||||
version: '1.0.0',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Create a test command in default directory
|
||||
await fs.promises.writeFile(
|
||||
path.join(defaultCommandsDir, 'default.md'),
|
||||
'---\n---\nDefault command',
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'default-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'default-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('default-ext:default');
|
||||
});
|
||||
|
||||
it('should handle extension without commands directory gracefully', async () => {
|
||||
// Setup extension structure without commands directory
|
||||
const extensionDir = path.join(
|
||||
tempDir,
|
||||
'.qwen',
|
||||
'extensions',
|
||||
'no-cmds-ext',
|
||||
);
|
||||
await fs.promises.mkdir(extensionDir, { recursive: true });
|
||||
|
||||
// Create extension config
|
||||
const extensionConfig = {
|
||||
name: 'no-cmds-ext',
|
||||
version: '1.0.0',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
// Mock config to return the extension
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'no-cmds-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'no-cmds-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
// Should not throw and return empty array
|
||||
expect(commands).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should prefix extension commands with extension name', async () => {
|
||||
// Setup extension
|
||||
const extensionDir = path.join(
|
||||
tempDir,
|
||||
'.qwen',
|
||||
'extensions',
|
||||
'prefix-ext',
|
||||
);
|
||||
const commandsDir = path.join(extensionDir, 'commands');
|
||||
await fs.promises.mkdir(commandsDir, { recursive: true });
|
||||
|
||||
const extensionConfig = {
|
||||
name: 'prefix-ext',
|
||||
version: '1.0.0',
|
||||
};
|
||||
await fs.promises.writeFile(
|
||||
path.join(extensionDir, 'qwen-extension.json'),
|
||||
JSON.stringify(extensionConfig),
|
||||
);
|
||||
|
||||
await fs.promises.writeFile(
|
||||
path.join(commandsDir, 'mycommand.md'),
|
||||
'---\n---\nMy command',
|
||||
);
|
||||
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'prefix-ext',
|
||||
config: extensionConfig,
|
||||
contextFiles: [],
|
||||
name: 'prefix-ext',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: extensionDir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('prefix-ext:mycommand');
|
||||
});
|
||||
|
||||
it('should load commands from multiple extensions in alphabetical order', async () => {
|
||||
// Setup two extensions
|
||||
const ext1Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-b');
|
||||
const ext2Dir = path.join(tempDir, '.qwen', 'extensions', 'ext-a');
|
||||
|
||||
await fs.promises.mkdir(path.join(ext1Dir, 'commands'), {
|
||||
recursive: true,
|
||||
});
|
||||
await fs.promises.mkdir(path.join(ext2Dir, 'commands'), {
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
// Extension B
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext1Dir, 'qwen-extension.json'),
|
||||
JSON.stringify({ name: 'ext-b', version: '1.0.0' }),
|
||||
);
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext1Dir, 'commands', 'cmd.md'),
|
||||
'---\n---\nCommand B',
|
||||
);
|
||||
|
||||
// Extension A
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext2Dir, 'qwen-extension.json'),
|
||||
JSON.stringify({ name: 'ext-a', version: '1.0.0' }),
|
||||
);
|
||||
await fs.promises.writeFile(
|
||||
path.join(ext2Dir, 'commands', 'cmd.md'),
|
||||
'---\n---\nCommand A',
|
||||
);
|
||||
|
||||
mockConfig.getExtensions = () => [
|
||||
{
|
||||
id: 'ext-b',
|
||||
config: { name: 'ext-b', version: '1.0.0' },
|
||||
contextFiles: [],
|
||||
name: 'ext-b',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: ext1Dir,
|
||||
},
|
||||
{
|
||||
id: 'ext-a',
|
||||
config: { name: 'ext-a', version: '1.0.0' },
|
||||
contextFiles: [],
|
||||
name: 'ext-a',
|
||||
version: '1.0.0',
|
||||
isActive: true,
|
||||
path: ext2Dir,
|
||||
},
|
||||
];
|
||||
|
||||
const loader = new FileCommandLoader(mockConfig as Config);
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(2);
|
||||
// Extensions are sorted alphabetically, so ext-a comes before ext-b
|
||||
expect(commands[0].name).toBe('ext-a:cmd');
|
||||
expect(commands[1].name).toBe('ext-b:cmd');
|
||||
});
|
||||
});
|
||||
117
packages/cli/src/services/FileCommandLoader-markdown.test.ts
Normal file
117
packages/cli/src/services/FileCommandLoader-markdown.test.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import os from 'node:os';
|
||||
import { FileCommandLoader } from './FileCommandLoader.js';
|
||||
|
||||
describe('FileCommandLoader - Markdown support', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a temporary directory for test commands
|
||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-md-test-'));
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Clean up
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should load markdown commands with frontmatter', async () => {
|
||||
// Create a test markdown command file
|
||||
const mdContent = `---
|
||||
description: Test markdown command
|
||||
---
|
||||
|
||||
This is a test prompt from markdown.`;
|
||||
|
||||
const commandPath = path.join(tempDir, 'test-command.md');
|
||||
await fs.writeFile(commandPath, mdContent, 'utf-8');
|
||||
|
||||
// Create loader with temp dir as command source
|
||||
const loader = new FileCommandLoader(null);
|
||||
|
||||
// Mock the getCommandDirectories to return our temp dir
|
||||
const originalMethod = loader['getCommandDirectories'];
|
||||
loader['getCommandDirectories'] = () => [{ path: tempDir }];
|
||||
|
||||
try {
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('test-command');
|
||||
expect(commands[0].description).toBe('Test markdown command');
|
||||
} finally {
|
||||
// Restore original method
|
||||
loader['getCommandDirectories'] = originalMethod;
|
||||
}
|
||||
});
|
||||
|
||||
it('should load markdown commands without frontmatter', async () => {
|
||||
// Create a test markdown command file without frontmatter
|
||||
const mdContent = 'This is a simple prompt without frontmatter.';
|
||||
|
||||
const commandPath = path.join(tempDir, 'simple-command.md');
|
||||
await fs.writeFile(commandPath, mdContent, 'utf-8');
|
||||
|
||||
const loader = new FileCommandLoader(null);
|
||||
const originalMethod = loader['getCommandDirectories'];
|
||||
loader['getCommandDirectories'] = () => [{ path: tempDir }];
|
||||
|
||||
try {
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
const simpleCommand = commands.find(
|
||||
(cmd) => cmd.name === 'simple-command',
|
||||
);
|
||||
expect(simpleCommand).toBeDefined();
|
||||
expect(simpleCommand?.description).toContain('Custom command from');
|
||||
} finally {
|
||||
loader['getCommandDirectories'] = originalMethod;
|
||||
}
|
||||
});
|
||||
|
||||
it('should load both toml and markdown commands', async () => {
|
||||
// Create both TOML and Markdown files
|
||||
const tomlContent = `prompt = "TOML prompt"
|
||||
description = "TOML command"`;
|
||||
|
||||
const mdContent = `---
|
||||
description: Markdown command
|
||||
---
|
||||
|
||||
Markdown prompt`;
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'toml-cmd.toml'),
|
||||
tomlContent,
|
||||
'utf-8',
|
||||
);
|
||||
await fs.writeFile(path.join(tempDir, 'md-cmd.md'), mdContent, 'utf-8');
|
||||
|
||||
const loader = new FileCommandLoader(null);
|
||||
const originalMethod = loader['getCommandDirectories'];
|
||||
loader['getCommandDirectories'] = () => [{ path: tempDir }];
|
||||
|
||||
try {
|
||||
const commands = await loader.loadCommands(new AbortController().signal);
|
||||
|
||||
const tomlCommand = commands.find((cmd) => cmd.name === 'toml-cmd');
|
||||
const mdCommand = commands.find((cmd) => cmd.name === 'md-cmd');
|
||||
|
||||
expect(tomlCommand).toBeDefined();
|
||||
expect(tomlCommand?.description).toBe('TOML command');
|
||||
|
||||
expect(mdCommand).toBeDefined();
|
||||
expect(mdCommand?.description).toBe('Markdown command');
|
||||
} finally {
|
||||
loader['getCommandDirectories'] = originalMethod;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -568,9 +568,9 @@ describe('FileCommandLoader', () => {
|
||||
|
||||
expect(commands).toHaveLength(3);
|
||||
const commandNames = commands.map((cmd) => cmd.name);
|
||||
expect(commandNames).toEqual(['user', 'project', 'ext']);
|
||||
expect(commandNames).toEqual(['user', 'project', 'test-ext:ext']);
|
||||
|
||||
const extCommand = commands.find((cmd) => cmd.name === 'ext');
|
||||
const extCommand = commands.find((cmd) => cmd.name === 'test-ext:ext');
|
||||
expect(extCommand?.extensionName).toBe('test-ext');
|
||||
expect(extCommand?.description).toMatch(/^\[test-ext\]/);
|
||||
});
|
||||
@@ -656,14 +656,14 @@ describe('FileCommandLoader', () => {
|
||||
expect(result1.content).toEqual([{ text: 'Project deploy command' }]);
|
||||
}
|
||||
|
||||
expect(commands[2].name).toBe('deploy');
|
||||
expect(commands[2].name).toBe('test-ext:deploy');
|
||||
expect(commands[2].extensionName).toBe('test-ext');
|
||||
expect(commands[2].description).toMatch(/^\[test-ext\]/);
|
||||
const result2 = await commands[2].action?.(
|
||||
createMockCommandContext({
|
||||
invocation: {
|
||||
raw: '/deploy',
|
||||
name: 'deploy',
|
||||
raw: '/test-ext:deploy',
|
||||
name: 'test-ext:deploy',
|
||||
args: '',
|
||||
},
|
||||
}),
|
||||
@@ -729,7 +729,7 @@ describe('FileCommandLoader', () => {
|
||||
const commands = await loader.loadCommands(signal);
|
||||
|
||||
expect(commands).toHaveLength(1);
|
||||
expect(commands[0].name).toBe('active');
|
||||
expect(commands[0].name).toBe('active-ext:active');
|
||||
expect(commands[0].extensionName).toBe('active-ext');
|
||||
expect(commands[0].description).toMatch(/^\[active-ext\]/);
|
||||
});
|
||||
@@ -803,17 +803,17 @@ describe('FileCommandLoader', () => {
|
||||
expect(commands).toHaveLength(3);
|
||||
|
||||
const commandNames = commands.map((cmd) => cmd.name).sort();
|
||||
expect(commandNames).toEqual(['b:c', 'b:d:e', 'simple']);
|
||||
expect(commandNames).toEqual(['a:b:c', 'a:b:d:e', 'a:simple']);
|
||||
|
||||
const nestedCmd = commands.find((cmd) => cmd.name === 'b:c');
|
||||
const nestedCmd = commands.find((cmd) => cmd.name === 'a:b:c');
|
||||
expect(nestedCmd?.extensionName).toBe('a');
|
||||
expect(nestedCmd?.description).toMatch(/^\[a\]/);
|
||||
expect(nestedCmd).toBeDefined();
|
||||
const result = await nestedCmd!.action?.(
|
||||
createMockCommandContext({
|
||||
invocation: {
|
||||
raw: '/b:c',
|
||||
name: 'b:c',
|
||||
raw: '/a:b:c',
|
||||
name: 'a:b:c',
|
||||
args: '',
|
||||
},
|
||||
}),
|
||||
|
||||
@@ -5,34 +5,23 @@
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import * as fsSync from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import toml from '@iarna/toml';
|
||||
import { glob } from 'glob';
|
||||
import { z } from 'zod';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import { Storage } from '@qwen-code/qwen-code-core';
|
||||
import { EXTENSIONS_CONFIG_FILENAME, Storage } from '@qwen-code/qwen-code-core';
|
||||
import type { ICommandLoader } from './types.js';
|
||||
import type {
|
||||
CommandContext,
|
||||
SlashCommand,
|
||||
SlashCommandActionReturn,
|
||||
} from '../ui/commands/types.js';
|
||||
import { CommandKind } from '../ui/commands/types.js';
|
||||
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
|
||||
import type {
|
||||
IPromptProcessor,
|
||||
PromptPipelineContent,
|
||||
} from './prompt-processors/types.js';
|
||||
import {
|
||||
SHORTHAND_ARGS_PLACEHOLDER,
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
} from './prompt-processors/types.js';
|
||||
parseMarkdownCommand,
|
||||
MarkdownCommandDefSchema,
|
||||
} from './markdown-command-parser.js';
|
||||
import {
|
||||
ConfirmationRequiredError,
|
||||
ShellProcessor,
|
||||
} from './prompt-processors/shellProcessor.js';
|
||||
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
|
||||
createSlashCommandFromDefinition,
|
||||
type CommandDefinition,
|
||||
} from './command-factory.js';
|
||||
import type { SlashCommand } from '../ui/commands/types.js';
|
||||
|
||||
interface CommandDirectory {
|
||||
path: string;
|
||||
@@ -96,7 +85,12 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
const commandDirs = this.getCommandDirectories();
|
||||
for (const dirInfo of commandDirs) {
|
||||
try {
|
||||
const files = await glob('**/*.toml', {
|
||||
// Scan both .toml and .md files
|
||||
const tomlFiles = await glob('**/*.toml', {
|
||||
...globOptions,
|
||||
cwd: dirInfo.path,
|
||||
});
|
||||
const mdFiles = await glob('**/*.md', {
|
||||
...globOptions,
|
||||
cwd: dirInfo.path,
|
||||
});
|
||||
@@ -105,18 +99,28 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
return [];
|
||||
}
|
||||
|
||||
const commandPromises = files.map((file) =>
|
||||
this.parseAndAdaptFile(
|
||||
// Process TOML files
|
||||
const tomlCommandPromises = tomlFiles.map((file) =>
|
||||
this.parseAndAdaptTomlFile(
|
||||
path.join(dirInfo.path, file),
|
||||
dirInfo.path,
|
||||
dirInfo.extensionName,
|
||||
),
|
||||
);
|
||||
|
||||
const commands = (await Promise.all(commandPromises)).filter(
|
||||
(cmd): cmd is SlashCommand => cmd !== null,
|
||||
// Process Markdown files
|
||||
const mdCommandPromises = mdFiles.map((file) =>
|
||||
this.parseAndAdaptMarkdownFile(
|
||||
path.join(dirInfo.path, file),
|
||||
dirInfo.path,
|
||||
dirInfo.extensionName,
|
||||
),
|
||||
);
|
||||
|
||||
const commands = (
|
||||
await Promise.all([...tomlCommandPromises, ...mdCommandPromises])
|
||||
).filter((cmd): cmd is SlashCommand => cmd !== null);
|
||||
|
||||
// Add all commands without deduplication
|
||||
allCommands.push(...commands);
|
||||
} catch (error) {
|
||||
@@ -159,17 +163,73 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
.filter((ext) => ext.isActive)
|
||||
.sort((a, b) => a.name.localeCompare(b.name)); // Sort alphabetically for deterministic loading
|
||||
|
||||
const extensionCommandDirs = activeExtensions.map((ext) => ({
|
||||
path: path.join(ext.path, 'commands'),
|
||||
extensionName: ext.name,
|
||||
}));
|
||||
// Collect command directories from each extension
|
||||
for (const ext of activeExtensions) {
|
||||
// Get commands paths from extension config
|
||||
const commandsPaths = this.getExtensionCommandsPaths(ext);
|
||||
|
||||
dirs.push(...extensionCommandDirs);
|
||||
for (const cmdPath of commandsPaths) {
|
||||
dirs.push({
|
||||
path: cmdPath,
|
||||
extensionName: ext.name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dirs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get commands paths from an extension.
|
||||
* Returns paths from config.commands if specified, otherwise defaults to 'commands' directory.
|
||||
*/
|
||||
private getExtensionCommandsPaths(ext: {
|
||||
path: string;
|
||||
name: string;
|
||||
}): string[] {
|
||||
// Try to get extension config
|
||||
try {
|
||||
const configPath = path.join(ext.path, EXTENSIONS_CONFIG_FILENAME);
|
||||
if (fsSync.existsSync(configPath)) {
|
||||
const configContent = fsSync.readFileSync(configPath, 'utf-8');
|
||||
const config = JSON.parse(configContent);
|
||||
|
||||
if (config.commands) {
|
||||
const commandsArray = Array.isArray(config.commands)
|
||||
? config.commands
|
||||
: [config.commands];
|
||||
|
||||
return commandsArray
|
||||
.map((cmdPath: string) =>
|
||||
path.isAbsolute(cmdPath) ? cmdPath : path.join(ext.path, cmdPath),
|
||||
)
|
||||
.filter((cmdPath: string) => {
|
||||
try {
|
||||
return fsSync.existsSync(cmdPath);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Failed to read extension config for ${ext.name}:`, error);
|
||||
}
|
||||
|
||||
// Default fallback: use 'commands' directory
|
||||
const defaultPath = path.join(ext.path, 'commands');
|
||||
try {
|
||||
if (fsSync.existsSync(defaultPath)) {
|
||||
return [defaultPath];
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a single .toml file and transforms it into a SlashCommand object.
|
||||
* @param filePath The absolute path to the .toml file.
|
||||
@@ -177,7 +237,7 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
* @param extensionName Optional extension name to prefix commands with.
|
||||
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
|
||||
*/
|
||||
private async parseAndAdaptFile(
|
||||
private async parseAndAdaptTomlFile(
|
||||
filePath: string,
|
||||
baseDir: string,
|
||||
extensionName?: string,
|
||||
@@ -216,104 +276,79 @@ export class FileCommandLoader implements ICommandLoader {
|
||||
|
||||
const validDef = validationResult.data;
|
||||
|
||||
const relativePathWithExt = path.relative(baseDir, filePath);
|
||||
const relativePath = relativePathWithExt.substring(
|
||||
0,
|
||||
relativePathWithExt.length - 5, // length of '.toml'
|
||||
);
|
||||
const baseCommandName = relativePath
|
||||
.split(path.sep)
|
||||
// Sanitize each path segment to prevent ambiguity. Since ':' is our
|
||||
// namespace separator, we replace any literal colons in filenames
|
||||
// with underscores to avoid naming conflicts.
|
||||
.map((segment) => segment.replaceAll(':', '_'))
|
||||
.join(':');
|
||||
|
||||
// Add extension name tag for extension commands
|
||||
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
|
||||
let description = validDef.description || defaultDescription;
|
||||
if (extensionName) {
|
||||
description = `[${extensionName}] ${description}`;
|
||||
}
|
||||
|
||||
const processors: IPromptProcessor[] = [];
|
||||
const usesArgs = validDef.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
|
||||
const usesShellInjection = validDef.prompt.includes(
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
);
|
||||
const usesAtFileInjection = validDef.prompt.includes(
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
);
|
||||
|
||||
// 1. @-File Injection (Security First).
|
||||
// This runs first to ensure we're not executing shell commands that
|
||||
// could dynamically generate malicious @-paths.
|
||||
if (usesAtFileInjection) {
|
||||
processors.push(new AtFileProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 2. Argument and Shell Injection.
|
||||
// This runs after file content has been safely injected.
|
||||
if (usesShellInjection || usesArgs) {
|
||||
processors.push(new ShellProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 3. Default Argument Handling.
|
||||
// Appends the raw invocation if no explicit {{args}} are used.
|
||||
if (!usesArgs) {
|
||||
processors.push(new DefaultArgumentProcessor());
|
||||
}
|
||||
|
||||
return {
|
||||
name: baseCommandName,
|
||||
description,
|
||||
kind: CommandKind.FILE,
|
||||
// Use factory to create command
|
||||
return createSlashCommandFromDefinition(
|
||||
filePath,
|
||||
baseDir,
|
||||
validDef,
|
||||
extensionName,
|
||||
action: async (
|
||||
context: CommandContext,
|
||||
_args: string,
|
||||
): Promise<SlashCommandActionReturn> => {
|
||||
if (!context.invocation) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Critical error: Command '${baseCommandName}' was executed without invocation context.`,
|
||||
);
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: [{ text: validDef.prompt }], // Fallback to unprocessed prompt
|
||||
};
|
||||
}
|
||||
'.toml',
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
let processedContent: PromptPipelineContent = [
|
||||
{ text: validDef.prompt },
|
||||
];
|
||||
for (const processor of processors) {
|
||||
processedContent = await processor.process(
|
||||
processedContent,
|
||||
context,
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Parses a single .md file and transforms it into a SlashCommand object.
|
||||
* @param filePath The absolute path to the .md file.
|
||||
* @param baseDir The root command directory for name calculation.
|
||||
* @param extensionName Optional extension name to prefix commands with.
|
||||
* @returns A promise resolving to a SlashCommand, or null if the file is invalid.
|
||||
*/
|
||||
private async parseAndAdaptMarkdownFile(
|
||||
filePath: string,
|
||||
baseDir: string,
|
||||
extensionName?: string,
|
||||
): Promise<SlashCommand | null> {
|
||||
let fileContent: string;
|
||||
try {
|
||||
fileContent = await fs.readFile(filePath, 'utf-8');
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Failed to read file ${filePath}:`,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: processedContent,
|
||||
};
|
||||
} catch (e) {
|
||||
// Check if it's our specific error type
|
||||
if (e instanceof ConfirmationRequiredError) {
|
||||
// Halt and request confirmation from the UI layer.
|
||||
return {
|
||||
type: 'confirm_shell_commands',
|
||||
commandsToConfirm: e.commandsToConfirm,
|
||||
originalInvocation: {
|
||||
raw: context.invocation.raw,
|
||||
},
|
||||
};
|
||||
}
|
||||
// Re-throw other errors to be handled by the global error handler.
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
let parsed: ReturnType<typeof parseMarkdownCommand>;
|
||||
try {
|
||||
parsed = parseMarkdownCommand(fileContent);
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Failed to parse Markdown file ${filePath}:`,
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const validationResult = MarkdownCommandDefSchema.safeParse(parsed);
|
||||
|
||||
if (!validationResult.success) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Skipping invalid command file: ${filePath}. Validation errors:`,
|
||||
validationResult.error.flatten(),
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const validDef = validationResult.data;
|
||||
|
||||
// Convert to CommandDefinition format
|
||||
const definition: CommandDefinition = {
|
||||
prompt: validDef.prompt,
|
||||
description:
|
||||
validDef.frontmatter?.description &&
|
||||
typeof validDef.frontmatter.description === 'string'
|
||||
? validDef.frontmatter.description
|
||||
: undefined,
|
||||
};
|
||||
|
||||
// Use factory to create command
|
||||
return createSlashCommandFromDefinition(
|
||||
filePath,
|
||||
baseDir,
|
||||
definition,
|
||||
extensionName,
|
||||
'.md',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
159
packages/cli/src/services/command-factory.ts
Normal file
159
packages/cli/src/services/command-factory.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* This file contains helper functions for FileCommandLoader to create SlashCommand
|
||||
* objects from parsed command definitions (TOML or Markdown).
|
||||
*/
|
||||
|
||||
import path from 'node:path';
|
||||
import type {
|
||||
CommandContext,
|
||||
SlashCommand,
|
||||
SlashCommandActionReturn,
|
||||
} from '../ui/commands/types.js';
|
||||
import { CommandKind } from '../ui/commands/types.js';
|
||||
import { DefaultArgumentProcessor } from './prompt-processors/argumentProcessor.js';
|
||||
import type {
|
||||
IPromptProcessor,
|
||||
PromptPipelineContent,
|
||||
} from './prompt-processors/types.js';
|
||||
import {
|
||||
SHORTHAND_ARGS_PLACEHOLDER,
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
} from './prompt-processors/types.js';
|
||||
import {
|
||||
ConfirmationRequiredError,
|
||||
ShellProcessor,
|
||||
} from './prompt-processors/shellProcessor.js';
|
||||
import { AtFileProcessor } from './prompt-processors/atFileProcessor.js';
|
||||
|
||||
export interface CommandDefinition {
|
||||
prompt: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a SlashCommand from a parsed command definition.
|
||||
* This function is used by both TOML and Markdown command loaders.
|
||||
*
|
||||
* @param filePath The absolute path to the command file
|
||||
* @param baseDir The root command directory for name calculation
|
||||
* @param definition The parsed command definition (prompt and optional description)
|
||||
* @param extensionName Optional extension name to prefix commands with
|
||||
* @param fileExtension The file extension (e.g., '.toml' or '.md')
|
||||
* @returns A SlashCommand object
|
||||
*/
|
||||
export function createSlashCommandFromDefinition(
|
||||
filePath: string,
|
||||
baseDir: string,
|
||||
definition: CommandDefinition,
|
||||
extensionName: string | undefined,
|
||||
fileExtension: string,
|
||||
): SlashCommand {
|
||||
const relativePathWithExt = path.relative(baseDir, filePath);
|
||||
const relativePath = relativePathWithExt.substring(
|
||||
0,
|
||||
relativePathWithExt.length - fileExtension.length,
|
||||
);
|
||||
const baseCommandName = relativePath
|
||||
.split(path.sep)
|
||||
// Sanitize each path segment to prevent ambiguity. Since ':' is our
|
||||
// namespace separator, we replace any literal colons in filenames
|
||||
// with underscores to avoid naming conflicts.
|
||||
.map((segment) => segment.replaceAll(':', '_'))
|
||||
.join(':');
|
||||
|
||||
// Prefix command name with extension name if provided
|
||||
const commandName = extensionName
|
||||
? `${extensionName}:${baseCommandName}`
|
||||
: baseCommandName;
|
||||
|
||||
// Add extension name tag for extension commands
|
||||
const defaultDescription = `Custom command from ${path.basename(filePath)}`;
|
||||
let description = definition.description || defaultDescription;
|
||||
if (extensionName) {
|
||||
description = `[${extensionName}] ${description}`;
|
||||
}
|
||||
|
||||
const processors: IPromptProcessor[] = [];
|
||||
const usesArgs = definition.prompt.includes(SHORTHAND_ARGS_PLACEHOLDER);
|
||||
const usesShellInjection = definition.prompt.includes(
|
||||
SHELL_INJECTION_TRIGGER,
|
||||
);
|
||||
const usesAtFileInjection = definition.prompt.includes(
|
||||
AT_FILE_INJECTION_TRIGGER,
|
||||
);
|
||||
|
||||
// 1. @-File Injection (Security First).
|
||||
// This runs first to ensure we're not executing shell commands that
|
||||
// could dynamically generate malicious @-paths.
|
||||
if (usesAtFileInjection) {
|
||||
processors.push(new AtFileProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 2. Argument and Shell Injection.
|
||||
// This runs after file content has been safely injected.
|
||||
if (usesShellInjection || usesArgs) {
|
||||
processors.push(new ShellProcessor(baseCommandName));
|
||||
}
|
||||
|
||||
// 3. Default Argument Handling.
|
||||
// Appends the raw invocation if no explicit {{args}} are used.
|
||||
if (!usesArgs) {
|
||||
processors.push(new DefaultArgumentProcessor());
|
||||
}
|
||||
|
||||
return {
|
||||
name: commandName,
|
||||
description,
|
||||
kind: CommandKind.FILE,
|
||||
extensionName,
|
||||
action: async (
|
||||
context: CommandContext,
|
||||
_args: string,
|
||||
): Promise<SlashCommandActionReturn> => {
|
||||
if (!context.invocation) {
|
||||
console.error(
|
||||
`[FileCommandLoader] Critical error: Command '${commandName}' was executed without invocation context.`,
|
||||
);
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: [{ text: definition.prompt }], // Fallback to unprocessed prompt
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
let processedContent: PromptPipelineContent = [
|
||||
{ text: definition.prompt },
|
||||
];
|
||||
for (const processor of processors) {
|
||||
processedContent = await processor.process(processedContent, context);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'submit_prompt',
|
||||
content: processedContent,
|
||||
};
|
||||
} catch (e) {
|
||||
// Check if it's our specific error type
|
||||
if (e instanceof ConfirmationRequiredError) {
|
||||
// Halt and request confirmation from the UI layer.
|
||||
return {
|
||||
type: 'confirm_shell_commands',
|
||||
commandsToConfirm: e.commandsToConfirm,
|
||||
originalInvocation: {
|
||||
raw: context.invocation.raw,
|
||||
},
|
||||
};
|
||||
}
|
||||
// Re-throw other errors to be handled by the global error handler.
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
253
packages/cli/src/services/command-migration-tool.test.ts
Normal file
253
packages/cli/src/services/command-migration-tool.test.ts
Normal file
@@ -0,0 +1,253 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import os from 'node:os';
|
||||
import {
|
||||
detectTomlCommands,
|
||||
migrateTomlCommands,
|
||||
generateMigrationPrompt,
|
||||
} from './command-migration-tool.js';
|
||||
|
||||
describe('command-migration-tool', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'qwen-migration-test-'));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('detectTomlCommands', () => {
|
||||
it('should detect TOML files in directory', async () => {
|
||||
// Create some TOML files
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'cmd1.toml'),
|
||||
'prompt = "test"',
|
||||
'utf-8',
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'cmd2.toml'),
|
||||
'prompt = "test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const tomlFiles = await detectTomlCommands(tempDir);
|
||||
|
||||
expect(tomlFiles).toHaveLength(2);
|
||||
expect(tomlFiles).toContain('cmd1.toml');
|
||||
expect(tomlFiles).toContain('cmd2.toml');
|
||||
});
|
||||
|
||||
it('should detect TOML files in subdirectories', async () => {
|
||||
const subdir = path.join(tempDir, 'subdir');
|
||||
await fs.mkdir(subdir);
|
||||
await fs.writeFile(
|
||||
path.join(subdir, 'nested.toml'),
|
||||
'prompt = "test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const tomlFiles = await detectTomlCommands(tempDir);
|
||||
|
||||
expect(tomlFiles).toContain('subdir/nested.toml');
|
||||
});
|
||||
|
||||
it('should return empty array for non-existent directory', async () => {
|
||||
const nonExistent = path.join(tempDir, 'does-not-exist');
|
||||
|
||||
const tomlFiles = await detectTomlCommands(nonExistent);
|
||||
|
||||
expect(tomlFiles).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not detect non-TOML files', async () => {
|
||||
await fs.writeFile(path.join(tempDir, 'file.txt'), 'text', 'utf-8');
|
||||
await fs.writeFile(path.join(tempDir, 'file.md'), 'markdown', 'utf-8');
|
||||
|
||||
const tomlFiles = await detectTomlCommands(tempDir);
|
||||
|
||||
expect(tomlFiles).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateTomlCommands', () => {
|
||||
it('should migrate TOML file to Markdown', async () => {
|
||||
const tomlContent = `prompt = "Test prompt"
|
||||
description = "Test description"`;
|
||||
|
||||
await fs.writeFile(path.join(tempDir, 'test.toml'), tomlContent, 'utf-8');
|
||||
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: true,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.convertedFiles).toContain('test.toml');
|
||||
expect(result.failedFiles).toHaveLength(0);
|
||||
|
||||
// Check Markdown file was created
|
||||
const mdPath = path.join(tempDir, 'test.md');
|
||||
const mdContent = await fs.readFile(mdPath, 'utf-8');
|
||||
expect(mdContent).toContain('description: Test description');
|
||||
expect(mdContent).toContain('Test prompt');
|
||||
|
||||
// Check backup was created (original renamed to .toml.backup)
|
||||
const backupPath = path.join(tempDir, 'test.toml.backup');
|
||||
const backupExists = await fs
|
||||
.access(backupPath)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(backupExists).toBe(true);
|
||||
|
||||
// Original .toml file should not exist (renamed to .backup)
|
||||
const tomlExists = await fs
|
||||
.access(path.join(tempDir, 'test.toml'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(tomlExists).toBe(false);
|
||||
});
|
||||
|
||||
it('should delete original TOML when deleteOriginal is true', async () => {
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'delete-me.toml'),
|
||||
'prompt = "Test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: false,
|
||||
deleteOriginal: true,
|
||||
});
|
||||
|
||||
// Original should be deleted
|
||||
const tomlExists = await fs
|
||||
.access(path.join(tempDir, 'delete-me.toml'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(tomlExists).toBe(false);
|
||||
|
||||
// Markdown should exist
|
||||
const mdExists = await fs
|
||||
.access(path.join(tempDir, 'delete-me.md'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(mdExists).toBe(true);
|
||||
|
||||
// Backup should not exist (createBackup was false)
|
||||
const backupExists = await fs
|
||||
.access(path.join(tempDir, 'delete-me.toml.backup'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(backupExists).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail if Markdown file already exists', async () => {
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'existing.toml'),
|
||||
'prompt = "Test"',
|
||||
'utf-8',
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'existing.md'),
|
||||
'Already exists',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.failedFiles).toHaveLength(1);
|
||||
expect(result.failedFiles[0].file).toBe('existing.toml');
|
||||
expect(result.failedFiles[0].error).toContain('already exists');
|
||||
});
|
||||
|
||||
it('should handle migration without backup', async () => {
|
||||
await fs.writeFile(
|
||||
path.join(tempDir, 'no-backup.toml'),
|
||||
'prompt = "Test"',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
createBackup: false,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Original TOML file should still exist (no backup, no delete)
|
||||
const tomlExists = await fs
|
||||
.access(path.join(tempDir, 'no-backup.toml'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(tomlExists).toBe(true);
|
||||
|
||||
// Backup should not exist
|
||||
const backupExists = await fs
|
||||
.access(path.join(tempDir, 'no-backup.toml.backup'))
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
expect(backupExists).toBe(false);
|
||||
});
|
||||
|
||||
it('should return success with empty results for no TOML files', async () => {
|
||||
const result = await migrateTomlCommands({
|
||||
commandDir: tempDir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.convertedFiles).toHaveLength(0);
|
||||
expect(result.failedFiles).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateMigrationPrompt', () => {
|
||||
it('should generate prompt for few files', () => {
|
||||
const files = ['cmd1.toml', 'cmd2.toml'];
|
||||
|
||||
const prompt = generateMigrationPrompt(files);
|
||||
|
||||
expect(prompt).toContain('Found 2 command files');
|
||||
expect(prompt).toContain('cmd1.toml');
|
||||
expect(prompt).toContain('cmd2.toml');
|
||||
expect(prompt).toContain('qwen-code migrate-commands');
|
||||
});
|
||||
|
||||
it('should truncate file list for many files', () => {
|
||||
const files = Array.from({ length: 10 }, (_, i) => `cmd${i}.toml`);
|
||||
|
||||
const prompt = generateMigrationPrompt(files);
|
||||
|
||||
expect(prompt).toContain('Found 10 command files');
|
||||
expect(prompt).toContain('... and 7 more');
|
||||
});
|
||||
|
||||
it('should return empty string for no files', () => {
|
||||
const prompt = generateMigrationPrompt([]);
|
||||
|
||||
expect(prompt).toBe('');
|
||||
});
|
||||
|
||||
it('should use singular form for single file', () => {
|
||||
const prompt = generateMigrationPrompt(['single.toml']);
|
||||
|
||||
expect(prompt).toContain('Found 1 command file');
|
||||
// Don't check for plural since "files" appears in other parts of the message
|
||||
});
|
||||
});
|
||||
});
|
||||
169
packages/cli/src/services/command-migration-tool.ts
Normal file
169
packages/cli/src/services/command-migration-tool.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Tool for migrating TOML commands to Markdown format.
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { glob } from 'glob';
|
||||
import { convertTomlToMarkdown } from '@qwen-code/qwen-code-core';
|
||||
|
||||
export interface MigrationResult {
|
||||
success: boolean;
|
||||
convertedFiles: string[];
|
||||
failedFiles: Array<{ file: string; error: string }>;
|
||||
}
|
||||
|
||||
export interface MigrationOptions {
|
||||
/** Directory containing command files */
|
||||
commandDir: string;
|
||||
/** Whether to create backups (default: true) */
|
||||
createBackup?: boolean;
|
||||
/** Whether to delete original TOML files after migration (default: false) */
|
||||
deleteOriginal?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a directory for TOML command files.
|
||||
* @param commandDir Directory to scan
|
||||
* @returns Array of TOML file paths (relative to commandDir)
|
||||
*/
|
||||
export async function detectTomlCommands(
|
||||
commandDir: string,
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
await fs.access(commandDir);
|
||||
} catch {
|
||||
// Directory doesn't exist
|
||||
return [];
|
||||
}
|
||||
|
||||
const tomlFiles = await glob('**/*.toml', {
|
||||
cwd: commandDir,
|
||||
nodir: true,
|
||||
dot: false,
|
||||
});
|
||||
|
||||
return tomlFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates TOML command files to Markdown format.
|
||||
* @param options Migration options
|
||||
* @returns Migration result with details
|
||||
*/
|
||||
export async function migrateTomlCommands(
|
||||
options: MigrationOptions,
|
||||
): Promise<MigrationResult> {
|
||||
const { commandDir, createBackup = true, deleteOriginal = false } = options;
|
||||
|
||||
const result: MigrationResult = {
|
||||
success: true,
|
||||
convertedFiles: [],
|
||||
failedFiles: [],
|
||||
};
|
||||
|
||||
// Detect TOML files
|
||||
const tomlFiles = await detectTomlCommands(commandDir);
|
||||
|
||||
if (tomlFiles.length === 0) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Process each TOML file
|
||||
for (const relativeFile of tomlFiles) {
|
||||
const tomlPath = path.join(commandDir, relativeFile);
|
||||
|
||||
try {
|
||||
// Read TOML file
|
||||
const tomlContent = await fs.readFile(tomlPath, 'utf-8');
|
||||
|
||||
// Convert to Markdown
|
||||
const markdownContent = convertTomlToMarkdown(tomlContent);
|
||||
|
||||
// Generate Markdown file path (same location, .md extension)
|
||||
const markdownPath = tomlPath.replace(/\.toml$/, '.md');
|
||||
|
||||
// Check if Markdown file already exists
|
||||
try {
|
||||
await fs.access(markdownPath);
|
||||
throw new Error(
|
||||
`Markdown file already exists: ${path.basename(markdownPath)}`,
|
||||
);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
// File doesn't exist, continue
|
||||
}
|
||||
|
||||
// Write Markdown file
|
||||
await fs.writeFile(markdownPath, markdownContent, 'utf-8');
|
||||
|
||||
// Backup original if requested (rename to .toml.backup)
|
||||
if (createBackup) {
|
||||
const backupPath = `${tomlPath}.backup`;
|
||||
await fs.rename(tomlPath, backupPath);
|
||||
} else if (deleteOriginal) {
|
||||
// Delete original if requested and no backup
|
||||
await fs.unlink(tomlPath);
|
||||
}
|
||||
|
||||
result.convertedFiles.push(relativeFile);
|
||||
} catch (error) {
|
||||
result.success = false;
|
||||
result.failedFiles.push({
|
||||
file: relativeFile,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a migration report message.
|
||||
* @param tomlFiles List of TOML files found
|
||||
* @returns Human-readable migration prompt message
|
||||
*/
|
||||
export function generateMigrationPrompt(tomlFiles: string[]): string {
|
||||
if (tomlFiles.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const count = tomlFiles.length;
|
||||
const fileList =
|
||||
tomlFiles.length <= 5
|
||||
? tomlFiles.map((f) => ` - ${f}`).join('\n')
|
||||
: ` - ${tomlFiles.slice(0, 3).join('\n - ')}\n - ... and ${tomlFiles.length - 3} more`;
|
||||
|
||||
return `
|
||||
⚠️ TOML Command Format Deprecation Notice
|
||||
|
||||
Found ${count} command file${count > 1 ? 's' : ''} in TOML format:
|
||||
${fileList}
|
||||
|
||||
The TOML format for commands is being deprecated in favor of Markdown format.
|
||||
Markdown format is more readable and easier to edit.
|
||||
|
||||
You can migrate these files automatically using:
|
||||
qwen-code migrate-commands
|
||||
|
||||
Or manually convert each file:
|
||||
- TOML: prompt = "..." / description = "..."
|
||||
- Markdown: YAML frontmatter + content
|
||||
|
||||
The migration tool will:
|
||||
✓ Convert TOML files to Markdown
|
||||
✓ Create backups of original files
|
||||
✓ Preserve all command functionality
|
||||
|
||||
TOML format will continue to work for now, but migration is recommended.
|
||||
`.trim();
|
||||
}
|
||||
144
packages/cli/src/services/markdown-command-parser.test.ts
Normal file
144
packages/cli/src/services/markdown-command-parser.test.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
parseMarkdownCommand,
|
||||
MarkdownCommandDefSchema,
|
||||
} from './markdown-command-parser.js';
|
||||
|
||||
describe('parseMarkdownCommand', () => {
|
||||
it('should parse markdown with YAML frontmatter', () => {
|
||||
const content = `---
|
||||
description: Test command
|
||||
---
|
||||
|
||||
This is the prompt content.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result).toEqual({
|
||||
frontmatter: {
|
||||
description: 'Test command',
|
||||
},
|
||||
prompt: 'This is the prompt content.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse markdown without frontmatter', () => {
|
||||
const content = 'This is just a prompt without frontmatter.';
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result).toEqual({
|
||||
prompt: 'This is just a prompt without frontmatter.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multi-line prompts', () => {
|
||||
const content = `---
|
||||
description: Multi-line test
|
||||
---
|
||||
|
||||
First line of prompt.
|
||||
Second line of prompt.
|
||||
Third line of prompt.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result.prompt).toBe(
|
||||
'First line of prompt.\nSecond line of prompt.\nThird line of prompt.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should trim whitespace from prompt', () => {
|
||||
const content = `---
|
||||
description: Whitespace test
|
||||
---
|
||||
|
||||
Prompt with leading and trailing spaces
|
||||
`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result.prompt).toBe('Prompt with leading and trailing spaces');
|
||||
});
|
||||
|
||||
it('should handle empty frontmatter', () => {
|
||||
const content = `---
|
||||
---
|
||||
|
||||
Prompt content after empty frontmatter.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
// Empty YAML frontmatter returns undefined, not {}
|
||||
expect(result.frontmatter).toBeUndefined();
|
||||
expect(result.prompt).toBe('Prompt content after empty frontmatter.');
|
||||
});
|
||||
|
||||
it('should handle invalid YAML frontmatter gracefully', () => {
|
||||
// The YAML parser we use is quite tolerant, so most "invalid" YAML
|
||||
// actually parses successfully. This test verifies that behavior.
|
||||
const content = `---
|
||||
description: test
|
||||
---
|
||||
|
||||
Prompt content.`;
|
||||
|
||||
const result = parseMarkdownCommand(content);
|
||||
|
||||
expect(result.frontmatter).toBeDefined();
|
||||
expect(result.prompt).toBe('Prompt content.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('MarkdownCommandDefSchema', () => {
|
||||
it('should validate valid markdown command def', () => {
|
||||
const validDef = {
|
||||
frontmatter: {
|
||||
description: 'Test description',
|
||||
},
|
||||
prompt: 'Test prompt',
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(validDef);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate markdown command def without frontmatter', () => {
|
||||
const validDef = {
|
||||
prompt: 'Test prompt',
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(validDef);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject command def without prompt', () => {
|
||||
const invalidDef = {
|
||||
frontmatter: {
|
||||
description: 'Test description',
|
||||
},
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject command def with non-string prompt', () => {
|
||||
const invalidDef = {
|
||||
prompt: 123,
|
||||
};
|
||||
|
||||
const result = MarkdownCommandDefSchema.safeParse(invalidDef);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
64
packages/cli/src/services/markdown-command-parser.ts
Normal file
64
packages/cli/src/services/markdown-command-parser.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
import { parse as parseYaml } from '@qwen-code/qwen-code-core';
|
||||
|
||||
/**
|
||||
* Defines the Zod schema for a Markdown command definition file.
|
||||
* The frontmatter contains optional metadata, and the body is the prompt.
|
||||
*/
|
||||
export const MarkdownCommandDefSchema = z.object({
|
||||
frontmatter: z
|
||||
.object({
|
||||
description: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
prompt: z.string({
|
||||
required_error: 'The prompt content is required.',
|
||||
invalid_type_error: 'The prompt content must be a string.',
|
||||
}),
|
||||
});
|
||||
|
||||
export type MarkdownCommandDef = z.infer<typeof MarkdownCommandDefSchema>;
|
||||
|
||||
/**
|
||||
* Parses a Markdown command file with optional YAML frontmatter.
|
||||
* @param content The file content
|
||||
* @returns Parsed command definition with frontmatter and prompt
|
||||
*/
|
||||
export function parseMarkdownCommand(content: string): MarkdownCommandDef {
|
||||
// Match YAML frontmatter pattern: ---\n...\n---\n
|
||||
// Allow empty frontmatter: ---\n---\n // Use (?:[\s\S]*?) to make the frontmatter content optional
|
||||
const frontmatterRegex = /^---\n([\s\S]*?)---\n([\s\S]*)$/;
|
||||
const match = content.match(frontmatterRegex);
|
||||
|
||||
if (!match) {
|
||||
// No frontmatter, entire content is the prompt
|
||||
return {
|
||||
prompt: content.trim(),
|
||||
};
|
||||
}
|
||||
|
||||
const [, frontmatterYaml, body] = match;
|
||||
|
||||
// Parse YAML frontmatter if not empty
|
||||
let frontmatter: Record<string, unknown> | undefined;
|
||||
if (frontmatterYaml.trim()) {
|
||||
try {
|
||||
frontmatter = parseYaml(frontmatterYaml) as Record<string, unknown>;
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to parse YAML frontmatter: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
frontmatter,
|
||||
prompt: body.trim(),
|
||||
};
|
||||
}
|
||||
@@ -72,7 +72,6 @@ describe('ShellProcessor', () => {
|
||||
getApprovalMode: vi.fn().mockReturnValue(ApprovalMode.DEFAULT),
|
||||
getShouldUseNodePtyShell: vi.fn().mockReturnValue(false),
|
||||
getShellExecutionConfig: vi.fn().mockReturnValue({}),
|
||||
getAllowedTools: vi.fn().mockReturnValue([]),
|
||||
};
|
||||
|
||||
context = createMockCommandContext({
|
||||
@@ -197,35 +196,6 @@ describe('ShellProcessor', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should NOT throw ConfirmationRequiredError when a command matches allowedTools', async () => {
|
||||
const processor = new ShellProcessor('test-command');
|
||||
const prompt: PromptPipelineContent = createPromptPipelineContent(
|
||||
'Do something dangerous: !{rm -rf /}',
|
||||
);
|
||||
mockCheckCommandPermissions.mockReturnValue({
|
||||
allAllowed: false,
|
||||
disallowedCommands: ['rm -rf /'],
|
||||
});
|
||||
(mockConfig.getAllowedTools as Mock).mockReturnValue([
|
||||
'ShellTool(rm -rf /)',
|
||||
]);
|
||||
mockShellExecute.mockReturnValue({
|
||||
result: Promise.resolve({ ...SUCCESS_RESULT, output: 'deleted' }),
|
||||
});
|
||||
|
||||
const result = await processor.process(prompt, context);
|
||||
|
||||
expect(mockShellExecute).toHaveBeenCalledWith(
|
||||
'rm -rf /',
|
||||
expect.any(String),
|
||||
expect.any(Function),
|
||||
expect.any(Object),
|
||||
false,
|
||||
expect.any(Object),
|
||||
);
|
||||
expect(result).toEqual([{ text: 'Do something dangerous: deleted' }]);
|
||||
});
|
||||
|
||||
it('should NOT throw ConfirmationRequiredError if a command is not allowed but approval mode is YOLO', async () => {
|
||||
const processor = new ShellProcessor('test-command');
|
||||
const prompt: PromptPipelineContent = createPromptPipelineContent(
|
||||
|
||||
@@ -7,13 +7,11 @@
|
||||
import {
|
||||
ApprovalMode,
|
||||
checkCommandPermissions,
|
||||
doesToolInvocationMatch,
|
||||
escapeShellArg,
|
||||
getShellConfiguration,
|
||||
ShellExecutionService,
|
||||
flatMapTextParts,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import type { AnyToolInvocation } from '@qwen-code/qwen-code-core';
|
||||
|
||||
import type { CommandContext } from '../../ui/commands/types.js';
|
||||
import type { IPromptProcessor, PromptPipelineContent } from './types.js';
|
||||
@@ -126,15 +124,6 @@ export class ShellProcessor implements IPromptProcessor {
|
||||
// Security check on the final, escaped command string.
|
||||
const { allAllowed, disallowedCommands, blockReason, isHardDenial } =
|
||||
checkCommandPermissions(command, config, sessionShellAllowlist);
|
||||
const allowedTools = config.getAllowedTools() || [];
|
||||
const invocation = {
|
||||
params: { command },
|
||||
} as AnyToolInvocation;
|
||||
const isAllowedBySettings = doesToolInvocationMatch(
|
||||
'run_shell_command',
|
||||
invocation,
|
||||
allowedTools,
|
||||
);
|
||||
|
||||
if (!allAllowed) {
|
||||
if (isHardDenial) {
|
||||
@@ -143,17 +132,10 @@ export class ShellProcessor implements IPromptProcessor {
|
||||
);
|
||||
}
|
||||
|
||||
// If the command is allowed by settings, skip confirmation.
|
||||
if (isAllowedBySettings) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If not a hard denial, respect YOLO mode and auto-approve.
|
||||
if (config.getApprovalMode() === ApprovalMode.YOLO) {
|
||||
continue;
|
||||
if (config.getApprovalMode() !== ApprovalMode.YOLO) {
|
||||
disallowedCommands.forEach((uc) => commandsToConfirm.add(uc));
|
||||
}
|
||||
|
||||
disallowedCommands.forEach((uc) => commandsToConfirm.add(uc));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
5
packages/cli/src/services/test-commands/example.md
Normal file
5
packages/cli/src/services/test-commands/example.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
description: Example markdown command
|
||||
---
|
||||
|
||||
This is an example prompt from a markdown file.
|
||||
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
EXTENSIONS_CONFIG_FILENAME,
|
||||
INSTALL_METADATA_FILENAME,
|
||||
} from '../config/extension.js';
|
||||
import {
|
||||
type MCPServerConfig,
|
||||
type ExtensionInstallMetadata,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
|
||||
export function createExtension({
|
||||
extensionsDir = 'extensions-dir',
|
||||
name = 'my-extension',
|
||||
version = '1.0.0',
|
||||
addContextFile = false,
|
||||
contextFileName = undefined as string | undefined,
|
||||
mcpServers = {} as Record<string, MCPServerConfig>,
|
||||
installMetadata = undefined as ExtensionInstallMetadata | undefined,
|
||||
} = {}): string {
|
||||
const extDir = path.join(extensionsDir, name);
|
||||
fs.mkdirSync(extDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, EXTENSIONS_CONFIG_FILENAME),
|
||||
JSON.stringify({ name, version, contextFileName, mcpServers }),
|
||||
);
|
||||
|
||||
if (addContextFile) {
|
||||
fs.writeFileSync(path.join(extDir, 'QWEN.md'), 'context');
|
||||
}
|
||||
|
||||
if (contextFileName) {
|
||||
fs.writeFileSync(path.join(extDir, contextFileName), 'context');
|
||||
}
|
||||
|
||||
if (installMetadata) {
|
||||
fs.writeFileSync(
|
||||
path.join(extDir, INSTALL_METADATA_FILENAME),
|
||||
JSON.stringify(installMetadata),
|
||||
);
|
||||
}
|
||||
return extDir;
|
||||
}
|
||||
@@ -6,12 +6,10 @@
|
||||
|
||||
import { render } from 'ink-testing-library';
|
||||
import type React from 'react';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import { LoadedSettings } from '../config/settings.js';
|
||||
import { KeypressProvider } from '../ui/contexts/KeypressContext.js';
|
||||
import { SettingsContext } from '../ui/contexts/SettingsContext.js';
|
||||
import { ShellFocusContext } from '../ui/contexts/ShellFocusContext.js';
|
||||
import { ConfigContext } from '../ui/contexts/ConfigContext.js';
|
||||
|
||||
const mockSettings = new LoadedSettings(
|
||||
{ path: '', settings: {}, originalSettings: {} },
|
||||
@@ -24,24 +22,14 @@ const mockSettings = new LoadedSettings(
|
||||
|
||||
export const renderWithProviders = (
|
||||
component: React.ReactElement,
|
||||
{
|
||||
shellFocus = true,
|
||||
settings = mockSettings,
|
||||
config = undefined,
|
||||
}: {
|
||||
shellFocus?: boolean;
|
||||
settings?: LoadedSettings;
|
||||
config?: Config;
|
||||
} = {},
|
||||
{ shellFocus = true, settings = mockSettings } = {},
|
||||
): ReturnType<typeof render> =>
|
||||
render(
|
||||
<SettingsContext.Provider value={settings}>
|
||||
<ConfigContext.Provider value={config}>
|
||||
<ShellFocusContext.Provider value={shellFocus}>
|
||||
<KeypressProvider kittyProtocolEnabled={true}>
|
||||
{component}
|
||||
</KeypressProvider>
|
||||
</ShellFocusContext.Provider>
|
||||
</ConfigContext.Provider>
|
||||
<ShellFocusContext.Provider value={shellFocus}>
|
||||
<KeypressProvider kittyProtocolEnabled={true}>
|
||||
{component}
|
||||
</KeypressProvider>
|
||||
</ShellFocusContext.Provider>
|
||||
</SettingsContext.Provider>,
|
||||
);
|
||||
|
||||
@@ -76,7 +76,6 @@ vi.mock('./hooks/useFolderTrust.js');
|
||||
vi.mock('./hooks/useIdeTrustListener.js');
|
||||
vi.mock('./hooks/useMessageQueue.js');
|
||||
vi.mock('./hooks/useAutoAcceptIndicator.js');
|
||||
vi.mock('./hooks/useWorkspaceMigration.js');
|
||||
vi.mock('./hooks/useGitBranchName.js');
|
||||
vi.mock('./contexts/VimModeContext.js');
|
||||
vi.mock('./contexts/SessionContext.js');
|
||||
@@ -103,7 +102,6 @@ import { useFolderTrust } from './hooks/useFolderTrust.js';
|
||||
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
|
||||
import { useMessageQueue } from './hooks/useMessageQueue.js';
|
||||
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
|
||||
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
|
||||
import { useGitBranchName } from './hooks/useGitBranchName.js';
|
||||
import { useVimMode } from './contexts/VimModeContext.js';
|
||||
import { useSessionStats } from './contexts/SessionContext.js';
|
||||
@@ -134,7 +132,6 @@ describe('AppContainer State Management', () => {
|
||||
const mockedUseIdeTrustListener = useIdeTrustListener as Mock;
|
||||
const mockedUseMessageQueue = useMessageQueue as Mock;
|
||||
const mockedUseAutoAcceptIndicator = useAutoAcceptIndicator as Mock;
|
||||
const mockedUseWorkspaceMigration = useWorkspaceMigration as Mock;
|
||||
const mockedUseGitBranchName = useGitBranchName as Mock;
|
||||
const mockedUseVimMode = useVimMode as Mock;
|
||||
const mockedUseSessionStats = useSessionStats as Mock;
|
||||
@@ -239,12 +236,6 @@ describe('AppContainer State Management', () => {
|
||||
getQueuedMessagesText: vi.fn().mockReturnValue(''),
|
||||
});
|
||||
mockedUseAutoAcceptIndicator.mockReturnValue(false);
|
||||
mockedUseWorkspaceMigration.mockReturnValue({
|
||||
showWorkspaceMigrationDialog: false,
|
||||
workspaceExtensions: [],
|
||||
onWorkspaceMigrationDialogOpen: vi.fn(),
|
||||
onWorkspaceMigrationDialogClose: vi.fn(),
|
||||
});
|
||||
mockedUseGitBranchName.mockReturnValue('main');
|
||||
mockedUseVimMode.mockReturnValue({
|
||||
isVimEnabled: false,
|
||||
|
||||
@@ -32,11 +32,13 @@ import {
|
||||
type Config,
|
||||
type IdeInfo,
|
||||
type IdeContext,
|
||||
DEFAULT_GEMINI_FLASH_MODEL,
|
||||
IdeClient,
|
||||
ideContextStore,
|
||||
getErrorMessage,
|
||||
getAllGeminiMdFilenames,
|
||||
ShellExecutionService,
|
||||
Storage,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { buildResumedHistoryItems } from './utils/resumeHistoryUtils.js';
|
||||
import { validateAuthMethod } from '../config/auth.js';
|
||||
@@ -75,6 +77,9 @@ import { useLoadingIndicator } from './hooks/useLoadingIndicator.js';
|
||||
import { useFolderTrust } from './hooks/useFolderTrust.js';
|
||||
import { useIdeTrustListener } from './hooks/useIdeTrustListener.js';
|
||||
import { type IdeIntegrationNudgeResult } from './IdeIntegrationNudge.js';
|
||||
import { type CommandMigrationNudgeResult } from './CommandFormatMigrationNudge.js';
|
||||
import { useCommandMigration } from './hooks/useCommandMigration.js';
|
||||
import { migrateTomlCommands } from '../services/command-migration-tool.js';
|
||||
import { appEvents, AppEvent } from '../utils/events.js';
|
||||
import { type UpdateObject } from './utils/updateCheck.js';
|
||||
import { setUpdateHandler } from '../utils/handleAutoUpdate.js';
|
||||
@@ -82,10 +87,12 @@ import { ConsolePatcher } from './utils/ConsolePatcher.js';
|
||||
import { registerCleanup, runExitCleanup } from '../utils/cleanup.js';
|
||||
import { useMessageQueue } from './hooks/useMessageQueue.js';
|
||||
import { useAutoAcceptIndicator } from './hooks/useAutoAcceptIndicator.js';
|
||||
import { useWorkspaceMigration } from './hooks/useWorkspaceMigration.js';
|
||||
import { useSessionStats } from './contexts/SessionContext.js';
|
||||
import { useGitBranchName } from './hooks/useGitBranchName.js';
|
||||
import { useExtensionUpdates } from './hooks/useExtensionUpdates.js';
|
||||
import {
|
||||
useExtensionUpdates,
|
||||
useConfirmUpdateRequests,
|
||||
} from './hooks/useExtensionUpdates.js';
|
||||
import { ShellFocusContext } from './contexts/ShellFocusContext.js';
|
||||
import { t } from '../i18n/index.js';
|
||||
import { useWelcomeBack } from './hooks/useWelcomeBack.js';
|
||||
@@ -96,6 +103,7 @@ import { processVisionSwitchOutcome } from './hooks/useVisionAutoSwitch.js';
|
||||
import { useSubagentCreateDialog } from './hooks/useSubagentCreateDialog.js';
|
||||
import { useAgentsManagerDialog } from './hooks/useAgentsManagerDialog.js';
|
||||
import { useAttentionNotifications } from './hooks/useAttentionNotifications.js';
|
||||
import { requestConsentInteractive } from '../commands/extensions/consent.js';
|
||||
|
||||
const CTRL_EXIT_PROMPT_DURATION_MS = 1000;
|
||||
|
||||
@@ -156,15 +164,21 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
config.isTrustedFolder(),
|
||||
);
|
||||
|
||||
const extensions = config.getExtensions();
|
||||
const extensionManager = config.getExtensionManager();
|
||||
|
||||
extensionManager.setRequestConsent((description) =>
|
||||
requestConsentInteractive(description, addConfirmUpdateExtensionRequest),
|
||||
);
|
||||
|
||||
const { addConfirmUpdateExtensionRequest, confirmUpdateExtensionRequests } =
|
||||
useConfirmUpdateRequests();
|
||||
|
||||
const {
|
||||
extensionsUpdateState,
|
||||
extensionsUpdateStateInternal,
|
||||
dispatchExtensionStateUpdate,
|
||||
confirmUpdateExtensionRequests,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
} = useExtensionUpdates(
|
||||
extensions,
|
||||
extensionManager,
|
||||
historyManager.addItem,
|
||||
config.getWorkingDir(),
|
||||
);
|
||||
@@ -179,10 +193,15 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
[],
|
||||
);
|
||||
|
||||
// Helper to determine the current model (polled, since Config has no model-change event).
|
||||
const getCurrentModel = useCallback(() => config.getModel(), [config]);
|
||||
// Helper to determine the effective model, considering the fallback state.
|
||||
const getEffectiveModel = useCallback(() => {
|
||||
if (config.isInFallbackMode()) {
|
||||
return DEFAULT_GEMINI_FLASH_MODEL;
|
||||
}
|
||||
return config.getModel();
|
||||
}, [config]);
|
||||
|
||||
const [currentModel, setCurrentModel] = useState(getCurrentModel());
|
||||
const [currentModel, setCurrentModel] = useState(getEffectiveModel());
|
||||
|
||||
const [isConfigInitialized, setConfigInitialized] = useState(false);
|
||||
|
||||
@@ -235,12 +254,12 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
[historyManager.addItem],
|
||||
);
|
||||
|
||||
// Watch for model changes (e.g., user switches model via /model)
|
||||
// Watch for model changes (e.g., from Flash fallback)
|
||||
useEffect(() => {
|
||||
const checkModelChange = () => {
|
||||
const model = getCurrentModel();
|
||||
if (model !== currentModel) {
|
||||
setCurrentModel(model);
|
||||
const effectiveModel = getEffectiveModel();
|
||||
if (effectiveModel !== currentModel) {
|
||||
setCurrentModel(effectiveModel);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -248,7 +267,7 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
const interval = setInterval(checkModelChange, 1000); // Check every second
|
||||
|
||||
return () => clearInterval(interval);
|
||||
}, [config, currentModel, getCurrentModel]);
|
||||
}, [config, currentModel, getEffectiveModel]);
|
||||
|
||||
const {
|
||||
consoleMessages,
|
||||
@@ -370,36 +389,37 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
// Check for enforced auth type mismatch
|
||||
useEffect(() => {
|
||||
// Check for initialization error first
|
||||
const currentAuthType = config.modelsConfig.getCurrentAuthType();
|
||||
|
||||
if (
|
||||
settings.merged.security?.auth?.enforcedType &&
|
||||
currentAuthType &&
|
||||
settings.merged.security?.auth.enforcedType !== currentAuthType
|
||||
settings.merged.security?.auth.selectedType &&
|
||||
settings.merged.security?.auth.enforcedType !==
|
||||
settings.merged.security?.auth.selectedType
|
||||
) {
|
||||
onAuthError(
|
||||
t(
|
||||
'Authentication is enforced to be {{enforcedType}}, but you are currently using {{currentType}}.',
|
||||
{
|
||||
enforcedType: String(settings.merged.security?.auth.enforcedType),
|
||||
currentType: String(currentAuthType),
|
||||
enforcedType: settings.merged.security?.auth.enforcedType,
|
||||
currentType: settings.merged.security?.auth.selectedType,
|
||||
},
|
||||
),
|
||||
);
|
||||
} else if (!settings.merged.security?.auth?.useExternal) {
|
||||
// If no authType is selected yet, allow the auth UI flow to prompt the user.
|
||||
// Only validate credentials once a concrete authType exists.
|
||||
if (currentAuthType) {
|
||||
const error = validateAuthMethod(currentAuthType, config);
|
||||
if (error) {
|
||||
onAuthError(error);
|
||||
}
|
||||
} else if (
|
||||
settings.merged.security?.auth?.selectedType &&
|
||||
!settings.merged.security?.auth?.useExternal
|
||||
) {
|
||||
const error = validateAuthMethod(
|
||||
settings.merged.security.auth.selectedType,
|
||||
);
|
||||
if (error) {
|
||||
onAuthError(error);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
settings.merged.security?.auth?.selectedType,
|
||||
settings.merged.security?.auth?.enforcedType,
|
||||
settings.merged.security?.auth?.useExternal,
|
||||
config,
|
||||
onAuthError,
|
||||
]);
|
||||
|
||||
@@ -429,13 +449,6 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
remount: refreshStatic,
|
||||
});
|
||||
|
||||
const {
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
} = useWorkspaceMigration(settings);
|
||||
|
||||
const { toggleVimEnabled } = useVimMode();
|
||||
|
||||
const {
|
||||
@@ -571,11 +584,11 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
: [],
|
||||
config.getDebugMode(),
|
||||
config.getFileService(),
|
||||
settings.merged,
|
||||
config.getExtensionContextFilePaths(),
|
||||
config.isTrustedFolder(),
|
||||
settings.merged.context?.importFormat || 'tree', // Use setting or default to 'tree'
|
||||
config.getFileFilteringOptions(),
|
||||
config.getDiscoveryMaxDirs(),
|
||||
);
|
||||
|
||||
config.setUserMemory(memoryContent);
|
||||
@@ -838,6 +851,13 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
!idePromptAnswered,
|
||||
);
|
||||
|
||||
// Command migration nudge
|
||||
const {
|
||||
showMigrationNudge: shouldShowCommandMigrationNudge,
|
||||
tomlFiles: commandMigrationTomlFiles,
|
||||
setShowMigrationNudge: setShowCommandMigrationNudge,
|
||||
} = useCommandMigration(settings, config.storage);
|
||||
|
||||
const [showErrorDetails, setShowErrorDetails] = useState<boolean>(false);
|
||||
const [showToolDescriptions, setShowToolDescriptions] =
|
||||
useState<boolean>(false);
|
||||
@@ -918,12 +938,7 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
const handleIdePromptComplete = useCallback(
|
||||
(result: IdeIntegrationNudgeResult) => {
|
||||
if (result.userSelection === 'yes') {
|
||||
// Check whether the extension has been pre-installed
|
||||
if (result.isExtensionPreInstalled) {
|
||||
handleSlashCommand('/ide enable');
|
||||
} else {
|
||||
handleSlashCommand('/ide install');
|
||||
}
|
||||
handleSlashCommand('/ide install');
|
||||
settings.setValue(SettingScope.User, 'ide.hasSeenNudge', true);
|
||||
} else if (result.userSelection === 'dismiss') {
|
||||
settings.setValue(SettingScope.User, 'ide.hasSeenNudge', true);
|
||||
@@ -933,6 +948,92 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
[handleSlashCommand, settings],
|
||||
);
|
||||
|
||||
const handleCommandMigrationComplete = useCallback(
|
||||
async (result: CommandMigrationNudgeResult) => {
|
||||
setShowCommandMigrationNudge(false);
|
||||
|
||||
if (result.userSelection === 'yes') {
|
||||
// Perform migration for both workspace and user levels
|
||||
try {
|
||||
const results = [];
|
||||
|
||||
// Migrate workspace commands
|
||||
const workspaceCommandsDir = config.storage.getProjectCommandsDir();
|
||||
const workspaceResult = await migrateTomlCommands({
|
||||
commandDir: workspaceCommandsDir,
|
||||
createBackup: true,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
if (
|
||||
workspaceResult.convertedFiles.length > 0 ||
|
||||
workspaceResult.failedFiles.length > 0
|
||||
) {
|
||||
results.push({ level: 'workspace', result: workspaceResult });
|
||||
}
|
||||
|
||||
// Migrate user commands
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const userResult = await migrateTomlCommands({
|
||||
commandDir: userCommandsDir,
|
||||
createBackup: true,
|
||||
deleteOriginal: false,
|
||||
});
|
||||
if (
|
||||
userResult.convertedFiles.length > 0 ||
|
||||
userResult.failedFiles.length > 0
|
||||
) {
|
||||
results.push({ level: 'user', result: userResult });
|
||||
}
|
||||
|
||||
// Report results
|
||||
for (const { level, result: migrationResult } of results) {
|
||||
if (
|
||||
migrationResult.success &&
|
||||
migrationResult.convertedFiles.length > 0
|
||||
) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: `[${level}] Successfully migrated ${migrationResult.convertedFiles.length} command file${migrationResult.convertedFiles.length > 1 ? 's' : ''} to Markdown format. Original files backed up as .toml.backup`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
|
||||
if (migrationResult.failedFiles.length > 0) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: `[${level}] Failed to migrate ${migrationResult.failedFiles.length} file${migrationResult.failedFiles.length > 1 ? 's' : ''}:\n${migrationResult.failedFiles.map((f) => ` • ${f.file}: ${f.error}`).join('\n')}`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (results.length === 0) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'No TOML files found to migrate.',
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
historyManager.addItem(
|
||||
{
|
||||
type: MessageType.ERROR,
|
||||
text: `❌ Migration failed: ${getErrorMessage(error)}`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
[historyManager, setShowCommandMigrationNudge, config.storage],
|
||||
);
|
||||
|
||||
const { elapsedTime, currentLoadingPhrase } = useLoadingIndicator(
|
||||
streamingState,
|
||||
settings.merged.ui?.customWittyPhrases,
|
||||
@@ -1175,8 +1276,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
|
||||
const dialogsVisible =
|
||||
showWelcomeBackDialog ||
|
||||
showWorkspaceMigrationDialog ||
|
||||
shouldShowIdePrompt ||
|
||||
shouldShowCommandMigrationNudge ||
|
||||
isFolderTrustDialogOpen ||
|
||||
!!shellConfirmationRequest ||
|
||||
!!confirmationRequest ||
|
||||
@@ -1242,6 +1343,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
suggestionsWidth,
|
||||
isInputActive,
|
||||
shouldShowIdePrompt,
|
||||
shouldShowCommandMigrationNudge,
|
||||
commandMigrationTomlFiles,
|
||||
isFolderTrustDialogOpen: isFolderTrustDialogOpen ?? false,
|
||||
isTrustedFolder,
|
||||
constrainHeight,
|
||||
@@ -1258,8 +1361,6 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
historyRemountKey,
|
||||
messageQueue,
|
||||
showAutoAcceptIndicator,
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
currentModel,
|
||||
contextFileNames,
|
||||
errorCount,
|
||||
@@ -1331,6 +1432,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
suggestionsWidth,
|
||||
isInputActive,
|
||||
shouldShowIdePrompt,
|
||||
shouldShowCommandMigrationNudge,
|
||||
commandMigrationTomlFiles,
|
||||
isFolderTrustDialogOpen,
|
||||
isTrustedFolder,
|
||||
constrainHeight,
|
||||
@@ -1347,8 +1450,6 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
historyRemountKey,
|
||||
messageQueue,
|
||||
showAutoAcceptIndicator,
|
||||
showWorkspaceMigrationDialog,
|
||||
workspaceExtensions,
|
||||
contextFileNames,
|
||||
errorCount,
|
||||
availableTerminalHeight,
|
||||
@@ -1402,14 +1503,13 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
setShellModeActive,
|
||||
vimHandleInput,
|
||||
handleIdePromptComplete,
|
||||
handleCommandMigrationComplete,
|
||||
handleFolderTrustSelect,
|
||||
setConstrainHeight,
|
||||
onEscapePromptChange: handleEscapePromptChange,
|
||||
refreshStatic,
|
||||
handleFinalSubmit,
|
||||
handleClearScreen,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
// Vision switch dialog
|
||||
handleVisionSwitchSelect,
|
||||
// Welcome back dialog
|
||||
@@ -1439,14 +1539,13 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
setShellModeActive,
|
||||
vimHandleInput,
|
||||
handleIdePromptComplete,
|
||||
handleCommandMigrationComplete,
|
||||
handleFolderTrustSelect,
|
||||
setConstrainHeight,
|
||||
handleEscapePromptChange,
|
||||
refreshStatic,
|
||||
handleFinalSubmit,
|
||||
handleClearScreen,
|
||||
onWorkspaceMigrationDialogOpen,
|
||||
onWorkspaceMigrationDialogClose,
|
||||
handleVisionSwitchSelect,
|
||||
handleWelcomeBackSelection,
|
||||
handleWelcomeBackClose,
|
||||
|
||||
90
packages/cli/src/ui/CommandFormatMigrationNudge.tsx
Normal file
90
packages/cli/src/ui/CommandFormatMigrationNudge.tsx
Normal file
@@ -0,0 +1,90 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Box, Text } from 'ink';
|
||||
import type { RadioSelectItem } from './components/shared/RadioButtonSelect.js';
|
||||
import { RadioButtonSelect } from './components/shared/RadioButtonSelect.js';
|
||||
import { useKeypress } from './hooks/useKeypress.js';
|
||||
import { theme } from './semantic-colors.js';
|
||||
|
||||
export type CommandMigrationNudgeResult = {
|
||||
userSelection: 'yes' | 'no';
|
||||
};
|
||||
|
||||
interface CommandFormatMigrationNudgeProps {
|
||||
tomlFiles: string[];
|
||||
onComplete: (result: CommandMigrationNudgeResult) => void;
|
||||
}
|
||||
|
||||
export function CommandFormatMigrationNudge({
|
||||
tomlFiles,
|
||||
onComplete,
|
||||
}: CommandFormatMigrationNudgeProps) {
|
||||
useKeypress(
|
||||
(key) => {
|
||||
if (key.name === 'escape') {
|
||||
onComplete({
|
||||
userSelection: 'no',
|
||||
});
|
||||
}
|
||||
},
|
||||
{ isActive: true },
|
||||
);
|
||||
|
||||
const OPTIONS: Array<RadioSelectItem<CommandMigrationNudgeResult>> = [
|
||||
{
|
||||
label: 'Yes',
|
||||
value: {
|
||||
userSelection: 'yes',
|
||||
},
|
||||
key: 'Yes',
|
||||
},
|
||||
{
|
||||
label: 'No (esc)',
|
||||
value: {
|
||||
userSelection: 'no',
|
||||
},
|
||||
key: 'No (esc)',
|
||||
},
|
||||
];
|
||||
|
||||
const count = tomlFiles.length;
|
||||
const fileList =
|
||||
count <= 3
|
||||
? tomlFiles.map((f) => ` • ${f}`).join('\n')
|
||||
: ` • ${tomlFiles.slice(0, 2).join('\n • ')}\n • ... and ${count - 2} more`;
|
||||
|
||||
return (
|
||||
<Box
|
||||
flexDirection="column"
|
||||
borderStyle="round"
|
||||
borderColor={theme.status.warning}
|
||||
padding={1}
|
||||
width="100%"
|
||||
marginLeft={1}
|
||||
>
|
||||
<Box marginBottom={1} flexDirection="column">
|
||||
<Text>
|
||||
<Text color={theme.status.warning}>{'⚠️ '}</Text>
|
||||
<Text bold>Command Format Migration</Text>
|
||||
</Text>
|
||||
<Text color={theme.text.secondary}>
|
||||
{`Found ${count} TOML command file${count > 1 ? 's' : ''}:`}
|
||||
</Text>
|
||||
<Text color={theme.text.secondary}>{fileList}</Text>
|
||||
<Text>{''}</Text>
|
||||
<Text color={theme.text.secondary}>
|
||||
The TOML format is deprecated. Would you like to migrate them to
|
||||
Markdown format?
|
||||
</Text>
|
||||
<Text color={theme.text.secondary}>
|
||||
(Backups will be created and original files will be preserved)
|
||||
</Text>
|
||||
</Box>
|
||||
<RadioButtonSelect items={OPTIONS} onSelect={onComplete} />
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -38,7 +38,6 @@ export function IdeIntegrationNudge({
|
||||
);
|
||||
|
||||
const { displayName: ideName } = ide;
|
||||
const isInSandbox = !!process.env['SANDBOX'];
|
||||
// Assume extension is already installed if the env variables are set.
|
||||
const isExtensionPreInstalled =
|
||||
!!process.env['QWEN_CODE_IDE_SERVER_PORT'] &&
|
||||
@@ -71,15 +70,13 @@ export function IdeIntegrationNudge({
|
||||
},
|
||||
];
|
||||
|
||||
const installText = isInSandbox
|
||||
? `Note: In sandbox environments, IDE integration requires manual setup on the host system. If you select Yes, you'll receive instructions on how to set this up.`
|
||||
: isExtensionPreInstalled
|
||||
? `If you select Yes, the CLI will connect to your ${
|
||||
ideName ?? 'editor'
|
||||
} and have access to your open files and display diffs directly.`
|
||||
: `If you select Yes, we'll install an extension that allows the CLI to access your open files and display diffs directly in ${
|
||||
ideName ?? 'your editor'
|
||||
}.`;
|
||||
const installText = isExtensionPreInstalled
|
||||
? `If you select Yes, the CLI will have access to your open files and display diffs directly in ${
|
||||
ideName ?? 'your editor'
|
||||
}.`
|
||||
: `If you select Yes, we'll install an extension that allows the CLI to access your open files and display diffs directly in ${
|
||||
ideName ?? 'your editor'
|
||||
}.`;
|
||||
|
||||
return (
|
||||
<Box
|
||||
|
||||
@@ -6,8 +6,7 @@
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { AuthDialog } from './AuthDialog.js';
|
||||
import { LoadedSettings } from '../../config/settings.js';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import { LoadedSettings, SettingScope } from '../../config/settings.js';
|
||||
import { AuthType } from '@qwen-code/qwen-code-core';
|
||||
import { renderWithProviders } from '../../test-utils/render.js';
|
||||
import { UIStateContext } from '../contexts/UIStateContext.js';
|
||||
@@ -44,24 +43,17 @@ const renderAuthDialog = (
|
||||
settings: LoadedSettings,
|
||||
uiStateOverrides: Partial<UIState> = {},
|
||||
uiActionsOverrides: Partial<UIActions> = {},
|
||||
configAuthType: AuthType | undefined = undefined,
|
||||
configApiKey: string | undefined = undefined,
|
||||
) => {
|
||||
const uiState = createMockUIState(uiStateOverrides);
|
||||
const uiActions = createMockUIActions(uiActionsOverrides);
|
||||
|
||||
const mockConfig = {
|
||||
getAuthType: vi.fn(() => configAuthType),
|
||||
getContentGeneratorConfig: vi.fn(() => ({ apiKey: configApiKey })),
|
||||
} as unknown as Config;
|
||||
|
||||
return renderWithProviders(
|
||||
<UIStateContext.Provider value={uiState}>
|
||||
<UIActionsContext.Provider value={uiActions}>
|
||||
<AuthDialog />
|
||||
</UIActionsContext.Provider>
|
||||
</UIStateContext.Provider>,
|
||||
{ settings, config: mockConfig },
|
||||
{ settings },
|
||||
);
|
||||
};
|
||||
|
||||
@@ -429,7 +421,6 @@ describe('AuthDialog', () => {
|
||||
settings,
|
||||
{},
|
||||
{ handleAuthSelect },
|
||||
undefined, // config.getAuthType() returns undefined
|
||||
);
|
||||
await wait();
|
||||
|
||||
@@ -484,7 +475,6 @@ describe('AuthDialog', () => {
|
||||
settings,
|
||||
{ authError: 'Initial error' },
|
||||
{ handleAuthSelect },
|
||||
undefined, // config.getAuthType() returns undefined
|
||||
);
|
||||
await wait();
|
||||
|
||||
@@ -538,7 +528,6 @@ describe('AuthDialog', () => {
|
||||
settings,
|
||||
{},
|
||||
{ handleAuthSelect },
|
||||
AuthType.USE_OPENAI, // config.getAuthType() returns USE_OPENAI
|
||||
);
|
||||
await wait();
|
||||
|
||||
@@ -547,7 +536,7 @@ describe('AuthDialog', () => {
|
||||
await wait();
|
||||
|
||||
// Should call handleAuthSelect with undefined to exit
|
||||
expect(handleAuthSelect).toHaveBeenCalledWith(undefined);
|
||||
expect(handleAuthSelect).toHaveBeenCalledWith(undefined, SettingScope.User);
|
||||
unmount();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,12 +8,13 @@ import type React from 'react';
|
||||
import { useState } from 'react';
|
||||
import { AuthType } from '@qwen-code/qwen-code-core';
|
||||
import { Box, Text } from 'ink';
|
||||
import { SettingScope } from '../../config/settings.js';
|
||||
import { Colors } from '../colors.js';
|
||||
import { useKeypress } from '../hooks/useKeypress.js';
|
||||
import { RadioButtonSelect } from '../components/shared/RadioButtonSelect.js';
|
||||
import { useUIState } from '../contexts/UIStateContext.js';
|
||||
import { useUIActions } from '../contexts/UIActionsContext.js';
|
||||
import { useConfig } from '../contexts/ConfigContext.js';
|
||||
import { useSettings } from '../contexts/SettingsContext.js';
|
||||
import { t } from '../../i18n/index.js';
|
||||
|
||||
function parseDefaultAuthType(
|
||||
@@ -31,7 +32,7 @@ function parseDefaultAuthType(
|
||||
export function AuthDialog(): React.JSX.Element {
|
||||
const { pendingAuthType, authError } = useUIState();
|
||||
const { handleAuthSelect: onAuthSelect } = useUIActions();
|
||||
const config = useConfig();
|
||||
const settings = useSettings();
|
||||
|
||||
const [errorMessage, setErrorMessage] = useState<string | null>(null);
|
||||
const [selectedIndex, setSelectedIndex] = useState<number | null>(null);
|
||||
@@ -57,10 +58,9 @@ export function AuthDialog(): React.JSX.Element {
|
||||
return item.value === pendingAuthType;
|
||||
}
|
||||
|
||||
// Priority 2: config.getAuthType() - the source of truth
|
||||
const currentAuthType = config.getAuthType();
|
||||
if (currentAuthType) {
|
||||
return item.value === currentAuthType;
|
||||
// Priority 2: settings.merged.security?.auth?.selectedType
|
||||
if (settings.merged.security?.auth?.selectedType) {
|
||||
return item.value === settings.merged.security?.auth?.selectedType;
|
||||
}
|
||||
|
||||
// Priority 3: QWEN_DEFAULT_AUTH_TYPE env var
|
||||
@@ -76,7 +76,7 @@ export function AuthDialog(): React.JSX.Element {
|
||||
}),
|
||||
);
|
||||
|
||||
const hasApiKey = Boolean(config.getContentGeneratorConfig()?.apiKey);
|
||||
const hasApiKey = Boolean(settings.merged.security?.auth?.apiKey);
|
||||
const currentSelectedAuthType =
|
||||
selectedIndex !== null
|
||||
? items[selectedIndex]?.value
|
||||
@@ -84,7 +84,7 @@ export function AuthDialog(): React.JSX.Element {
|
||||
|
||||
const handleAuthSelect = async (authMethod: AuthType) => {
|
||||
setErrorMessage(null);
|
||||
await onAuthSelect(authMethod);
|
||||
await onAuthSelect(authMethod, SettingScope.User);
|
||||
};
|
||||
|
||||
const handleHighlight = (authMethod: AuthType) => {
|
||||
@@ -100,7 +100,7 @@ export function AuthDialog(): React.JSX.Element {
|
||||
if (errorMessage) {
|
||||
return;
|
||||
}
|
||||
if (config.getAuthType() === undefined) {
|
||||
if (settings.merged.security?.auth?.selectedType === undefined) {
|
||||
// Prevent exiting if no auth method is set
|
||||
setErrorMessage(
|
||||
t(
|
||||
@@ -109,7 +109,7 @@ export function AuthDialog(): React.JSX.Element {
|
||||
);
|
||||
return;
|
||||
}
|
||||
onAuthSelect(undefined);
|
||||
onAuthSelect(undefined, SettingScope.User);
|
||||
}
|
||||
},
|
||||
{ isActive: true },
|
||||
|
||||
@@ -4,16 +4,16 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type { Config, ModelProvidersConfig } from '@qwen-code/qwen-code-core';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
AuthEvent,
|
||||
AuthType,
|
||||
clearCachedCredentialFile,
|
||||
getErrorMessage,
|
||||
logAuth,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import type { LoadedSettings } from '../../config/settings.js';
|
||||
import { getPersistScopeForModelSelection } from '../../config/modelProvidersScope.js';
|
||||
import type { LoadedSettings, SettingScope } from '../../config/settings.js';
|
||||
import type { OpenAICredentials } from '../components/OpenAIKeyPrompt.js';
|
||||
import { useQwenAuth } from '../hooks/useQwenAuth.js';
|
||||
import { AuthState, MessageType } from '../types.js';
|
||||
@@ -27,7 +27,8 @@ export const useAuthCommand = (
|
||||
config: Config,
|
||||
addItem: (item: Omit<HistoryItem, 'id'>, timestamp: number) => void,
|
||||
) => {
|
||||
const unAuthenticated = config.getAuthType() === undefined;
|
||||
const unAuthenticated =
|
||||
settings.merged.security?.auth?.selectedType === undefined;
|
||||
|
||||
const [authState, setAuthState] = useState<AuthState>(
|
||||
unAuthenticated ? AuthState.Updating : AuthState.Unauthenticated,
|
||||
@@ -80,35 +81,35 @@ export const useAuthCommand = (
|
||||
);
|
||||
|
||||
const handleAuthSuccess = useCallback(
|
||||
async (authType: AuthType, credentials?: OpenAICredentials) => {
|
||||
async (
|
||||
authType: AuthType,
|
||||
scope: SettingScope,
|
||||
credentials?: OpenAICredentials,
|
||||
) => {
|
||||
try {
|
||||
const authTypeScope = getPersistScopeForModelSelection(settings);
|
||||
settings.setValue(
|
||||
authTypeScope,
|
||||
'security.auth.selectedType',
|
||||
authType,
|
||||
);
|
||||
settings.setValue(scope, 'security.auth.selectedType', authType);
|
||||
|
||||
// Only update credentials if not switching to QWEN_OAUTH,
|
||||
// so that OpenAI credentials are preserved when switching to QWEN_OAUTH.
|
||||
if (authType !== AuthType.QWEN_OAUTH && credentials) {
|
||||
if (credentials?.apiKey != null) {
|
||||
settings.setValue(
|
||||
authTypeScope,
|
||||
scope,
|
||||
'security.auth.apiKey',
|
||||
credentials.apiKey,
|
||||
);
|
||||
}
|
||||
if (credentials?.baseUrl != null) {
|
||||
settings.setValue(
|
||||
authTypeScope,
|
||||
scope,
|
||||
'security.auth.baseUrl',
|
||||
credentials.baseUrl,
|
||||
);
|
||||
}
|
||||
if (credentials?.model != null) {
|
||||
settings.setValue(authTypeScope, 'model.name', credentials.model);
|
||||
settings.setValue(scope, 'model.name', credentials.model);
|
||||
}
|
||||
await clearCachedCredentialFile();
|
||||
}
|
||||
} catch (error) {
|
||||
handleAuthFailure(error);
|
||||
@@ -140,10 +141,14 @@ export const useAuthCommand = (
|
||||
);
|
||||
|
||||
const performAuth = useCallback(
|
||||
async (authType: AuthType, credentials?: OpenAICredentials) => {
|
||||
async (
|
||||
authType: AuthType,
|
||||
scope: SettingScope,
|
||||
credentials?: OpenAICredentials,
|
||||
) => {
|
||||
try {
|
||||
await config.refreshAuth(authType);
|
||||
handleAuthSuccess(authType, credentials);
|
||||
handleAuthSuccess(authType, scope, credentials);
|
||||
} catch (e) {
|
||||
handleAuthFailure(e);
|
||||
}
|
||||
@@ -151,51 +156,18 @@ export const useAuthCommand = (
|
||||
[config, handleAuthSuccess, handleAuthFailure],
|
||||
);
|
||||
|
||||
const isProviderManagedModel = useCallback(
|
||||
(authType: AuthType, modelId: string | undefined) => {
|
||||
if (!modelId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const modelProviders = settings.merged.modelProviders as
|
||||
| ModelProvidersConfig
|
||||
| undefined;
|
||||
if (!modelProviders) {
|
||||
return false;
|
||||
}
|
||||
const providerModels = modelProviders[authType];
|
||||
if (!Array.isArray(providerModels)) {
|
||||
return false;
|
||||
}
|
||||
return providerModels.some(
|
||||
(providerModel) => providerModel.id === modelId,
|
||||
);
|
||||
},
|
||||
[settings],
|
||||
);
|
||||
|
||||
const handleAuthSelect = useCallback(
|
||||
async (authType: AuthType | undefined, credentials?: OpenAICredentials) => {
|
||||
async (
|
||||
authType: AuthType | undefined,
|
||||
scope: SettingScope,
|
||||
credentials?: OpenAICredentials,
|
||||
) => {
|
||||
if (!authType) {
|
||||
setIsAuthDialogOpen(false);
|
||||
setAuthError(null);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
authType === AuthType.USE_OPENAI &&
|
||||
credentials?.model &&
|
||||
isProviderManagedModel(authType, credentials.model)
|
||||
) {
|
||||
onAuthError(
|
||||
t(
|
||||
'Model "{{modelName}}" is managed via settings.modelProviders. Please complete the fields in settings, or use another model id.',
|
||||
{ modelName: credentials.model },
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
setPendingAuthType(authType);
|
||||
setAuthError(null);
|
||||
setIsAuthDialogOpen(false);
|
||||
@@ -208,14 +180,14 @@ export const useAuthCommand = (
|
||||
baseUrl: credentials.baseUrl,
|
||||
model: credentials.model,
|
||||
});
|
||||
await performAuth(authType, credentials);
|
||||
await performAuth(authType, scope, credentials);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
await performAuth(authType);
|
||||
await performAuth(authType, scope);
|
||||
},
|
||||
[config, performAuth, isProviderManagedModel, onAuthError],
|
||||
[config, performAuth],
|
||||
);
|
||||
|
||||
const openAuthDialog = useCallback(() => {
|
||||
|
||||
@@ -4,28 +4,31 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { approvalModeCommand } from './approvalModeCommand.js';
|
||||
import {
|
||||
type CommandContext,
|
||||
CommandKind,
|
||||
type OpenDialogActionReturn,
|
||||
type MessageActionReturn,
|
||||
} from './types.js';
|
||||
import { createMockCommandContext } from '../../test-utils/mockCommandContext.js';
|
||||
import type { LoadedSettings } from '../../config/settings.js';
|
||||
|
||||
describe('approvalModeCommand', () => {
|
||||
let mockContext: CommandContext;
|
||||
let mockSetApprovalMode: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockSetApprovalMode = vi.fn();
|
||||
mockContext = createMockCommandContext({
|
||||
services: {
|
||||
config: {
|
||||
getApprovalMode: () => 'default',
|
||||
setApprovalMode: mockSetApprovalMode,
|
||||
setApprovalMode: () => {},
|
||||
},
|
||||
settings: {
|
||||
merged: {},
|
||||
setValue: () => {},
|
||||
forScope: () => ({}),
|
||||
} as unknown as LoadedSettings,
|
||||
},
|
||||
});
|
||||
});
|
||||
@@ -38,7 +41,7 @@ describe('approvalModeCommand', () => {
|
||||
expect(approvalModeCommand.kind).toBe(CommandKind.BUILT_IN);
|
||||
});
|
||||
|
||||
it('should open approval mode dialog when invoked without arguments', async () => {
|
||||
it('should open approval mode dialog when invoked', async () => {
|
||||
const result = (await approvalModeCommand.action?.(
|
||||
mockContext,
|
||||
'',
|
||||
@@ -48,123 +51,16 @@ describe('approvalModeCommand', () => {
|
||||
expect(result.dialog).toBe('approval-mode');
|
||||
});
|
||||
|
||||
it('should open approval mode dialog when invoked with whitespace only', async () => {
|
||||
it('should open approval mode dialog with arguments (ignored)', async () => {
|
||||
const result = (await approvalModeCommand.action?.(
|
||||
mockContext,
|
||||
' ',
|
||||
'some arguments',
|
||||
)) as OpenDialogActionReturn;
|
||||
|
||||
expect(result.type).toBe('dialog');
|
||||
expect(result.dialog).toBe('approval-mode');
|
||||
});
|
||||
|
||||
describe('direct mode setting (session-only)', () => {
|
||||
it('should set approval mode to "plan" when argument is "plan"', async () => {
|
||||
const result = (await approvalModeCommand.action?.(
|
||||
mockContext,
|
||||
'plan',
|
||||
)) as MessageActionReturn;
|
||||
|
||||
expect(result.type).toBe('message');
|
||||
expect(result.messageType).toBe('info');
|
||||
expect(result.content).toContain('plan');
|
||||
expect(mockSetApprovalMode).toHaveBeenCalledWith('plan');
|
||||
});
|
||||
|
||||
it('should set approval mode to "yolo" when argument is "yolo"', async () => {
|
||||
const result = (await approvalModeCommand.action?.(
|
||||
mockContext,
|
||||
'yolo',
|
||||
)) as MessageActionReturn;
|
||||
|
||||
expect(result.type).toBe('message');
|
||||
expect(result.messageType).toBe('info');
|
||||
expect(result.content).toContain('yolo');
|
||||
expect(mockSetApprovalMode).toHaveBeenCalledWith('yolo');
|
||||
});
|
||||
|
||||
it('should set approval mode to "auto-edit" when argument is "auto-edit"', async () => {
|
||||
const result = (await approvalModeCommand.action?.(
|
||||
mockContext,
|
||||
'auto-edit',
|
||||
)) as MessageActionReturn;
|
||||
|
||||
expect(result.type).toBe('message');
|
||||
expect(result.messageType).toBe('info');
|
||||
expect(result.content).toContain('auto-edit');
|
||||
expect(mockSetApprovalMode).toHaveBeenCalledWith('auto-edit');
|
||||
});
|
||||
|
||||
it('should set approval mode to "default" when argument is "default"', async () => {
|
||||
const result = (await approvalModeCommand.action?.(
|
||||
mockContext,
|
||||
'default',
|
||||
)) as MessageActionReturn;
|
||||
|
||||
expect(result.type).toBe('message');
|
||||
expect(result.messageType).toBe('info');
|
||||
expect(result.content).toContain('default');
|
||||
expect(mockSetApprovalMode).toHaveBeenCalledWith('default');
|
||||
});
|
||||
|
||||
it('should be case-insensitive for mode argument', async () => {
|
||||
const result = (await approvalModeCommand.action?.(
|
||||
mockContext,
|
||||
'YOLO',
|
||||
)) as MessageActionReturn;
|
||||
|
||||
expect(result.type).toBe('message');
|
||||
expect(result.messageType).toBe('info');
|
||||
expect(mockSetApprovalMode).toHaveBeenCalledWith('yolo');
|
||||
});
|
||||
|
||||
it('should handle argument with leading/trailing whitespace', async () => {
|
||||
const result = (await approvalModeCommand.action?.(
|
||||
mockContext,
|
||||
' plan ',
|
||||
)) as MessageActionReturn;
|
||||
|
||||
expect(result.type).toBe('message');
|
||||
expect(result.messageType).toBe('info');
|
||||
expect(mockSetApprovalMode).toHaveBeenCalledWith('plan');
|
||||
});
|
||||
});
|
||||
|
||||
describe('invalid mode argument', () => {
|
||||
it('should return error for invalid mode', async () => {
|
||||
const result = (await approvalModeCommand.action?.(
|
||||
mockContext,
|
||||
'invalid-mode',
|
||||
)) as MessageActionReturn;
|
||||
|
||||
expect(result.type).toBe('message');
|
||||
expect(result.messageType).toBe('error');
|
||||
expect(result.content).toContain('invalid-mode');
|
||||
expect(result.content).toContain('plan');
|
||||
expect(result.content).toContain('yolo');
|
||||
expect(mockSetApprovalMode).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('untrusted folder handling', () => {
|
||||
it('should return error when setApprovalMode throws (e.g., untrusted folder)', async () => {
|
||||
const errorMessage =
|
||||
'Cannot enable privileged approval modes in an untrusted folder.';
|
||||
mockSetApprovalMode.mockImplementation(() => {
|
||||
throw new Error(errorMessage);
|
||||
});
|
||||
|
||||
const result = (await approvalModeCommand.action?.(
|
||||
mockContext,
|
||||
'yolo',
|
||||
)) as MessageActionReturn;
|
||||
|
||||
expect(result.type).toBe('message');
|
||||
expect(result.messageType).toBe('error');
|
||||
expect(result.content).toBe(errorMessage);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not have subcommands', () => {
|
||||
expect(approvalModeCommand.subCommands).toBeUndefined();
|
||||
});
|
||||
|
||||
@@ -8,25 +8,9 @@ import type {
|
||||
SlashCommand,
|
||||
CommandContext,
|
||||
OpenDialogActionReturn,
|
||||
MessageActionReturn,
|
||||
} from './types.js';
|
||||
import { CommandKind } from './types.js';
|
||||
import { t } from '../../i18n/index.js';
|
||||
import type { ApprovalMode } from '@qwen-code/qwen-code-core';
|
||||
import { APPROVAL_MODES } from '@qwen-code/qwen-code-core';
|
||||
|
||||
/**
|
||||
* Parses the argument string and returns the corresponding ApprovalMode if valid.
|
||||
* Returns undefined if the argument is empty or not a valid mode.
|
||||
*/
|
||||
function parseApprovalModeArg(arg: string): ApprovalMode | undefined {
|
||||
const trimmed = arg.trim().toLowerCase();
|
||||
if (!trimmed) {
|
||||
return undefined;
|
||||
}
|
||||
// Match against valid approval modes (case-insensitive)
|
||||
return APPROVAL_MODES.find((mode) => mode.toLowerCase() === trimmed);
|
||||
}
|
||||
|
||||
export const approvalModeCommand: SlashCommand = {
|
||||
name: 'approval-mode',
|
||||
@@ -35,49 +19,10 @@ export const approvalModeCommand: SlashCommand = {
|
||||
},
|
||||
kind: CommandKind.BUILT_IN,
|
||||
action: async (
|
||||
context: CommandContext,
|
||||
args: string,
|
||||
): Promise<OpenDialogActionReturn | MessageActionReturn> => {
|
||||
const mode = parseApprovalModeArg(args);
|
||||
|
||||
// If no argument provided, open the dialog
|
||||
if (!args.trim()) {
|
||||
return {
|
||||
type: 'dialog',
|
||||
dialog: 'approval-mode',
|
||||
};
|
||||
}
|
||||
|
||||
// If invalid argument, return error message with valid options
|
||||
if (!mode) {
|
||||
return {
|
||||
type: 'message',
|
||||
messageType: 'error',
|
||||
content: t('Invalid approval mode "{{arg}}". Valid modes: {{modes}}', {
|
||||
arg: args.trim(),
|
||||
modes: APPROVAL_MODES.join(', '),
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
// Set the mode for current session only (not persisted)
|
||||
const { config } = context.services;
|
||||
if (config) {
|
||||
try {
|
||||
config.setApprovalMode(mode);
|
||||
} catch (e) {
|
||||
return {
|
||||
type: 'message',
|
||||
messageType: 'error',
|
||||
content: (e as Error).message,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'message',
|
||||
messageType: 'info',
|
||||
content: t('Approval mode set to "{{mode}}"', { mode }),
|
||||
};
|
||||
},
|
||||
_context: CommandContext,
|
||||
_args: string,
|
||||
): Promise<OpenDialogActionReturn> => ({
|
||||
type: 'dialog',
|
||||
dialog: 'approval-mode',
|
||||
}),
|
||||
};
|
||||
|
||||
@@ -4,13 +4,6 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { requestConsentInteractive } from '../../config/extension.js';
|
||||
import {
|
||||
updateAllUpdatableExtensions,
|
||||
type ExtensionUpdateInfo,
|
||||
updateExtension,
|
||||
checkForAllExtensionUpdates,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import { ExtensionUpdateState } from '../state/extensions.js';
|
||||
import { MessageType } from '../types.js';
|
||||
@@ -20,8 +13,34 @@ import {
|
||||
CommandKind,
|
||||
} from './types.js';
|
||||
import { t } from '../../i18n/index.js';
|
||||
import type { ExtensionUpdateInfo } from '@qwen-code/qwen-code-core';
|
||||
|
||||
function showMessageIfNoExtensions(
|
||||
context: CommandContext,
|
||||
extensions: unknown[],
|
||||
): boolean {
|
||||
if (extensions.length === 0) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: 'No extensions installed. Run `/extensions explore` to check out the gallery.',
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async function listAction(context: CommandContext) {
|
||||
const extensions = context.services.config
|
||||
? context.services.config.getExtensions()
|
||||
: [];
|
||||
|
||||
if (showMessageIfNoExtensions(context, extensions)) {
|
||||
return;
|
||||
}
|
||||
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: MessageType.EXTENSIONS_LIST,
|
||||
@@ -34,7 +53,6 @@ async function updateAction(context: CommandContext, args: string) {
|
||||
const updateArgs = args.split(' ').filter((value) => value.length > 0);
|
||||
const all = updateArgs.length === 1 && updateArgs[0] === '--all';
|
||||
const names = all ? undefined : updateArgs;
|
||||
let updateInfos: ExtensionUpdateInfo[] = [];
|
||||
|
||||
if (!all && names?.length === 0) {
|
||||
context.ui.addItem(
|
||||
@@ -47,29 +65,40 @@ async function updateAction(context: CommandContext, args: string) {
|
||||
return;
|
||||
}
|
||||
|
||||
let updateInfos: ExtensionUpdateInfo[] = [];
|
||||
|
||||
const extensionManager = context.services.config!.getExtensionManager();
|
||||
const extensions = context.services.config
|
||||
? context.services.config.getExtensions()
|
||||
: [];
|
||||
|
||||
if (showMessageIfNoExtensions(context, extensions)) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
try {
|
||||
await checkForAllExtensionUpdates(
|
||||
context.services.config!.getExtensions(),
|
||||
context.ui.dispatchExtensionStateUpdate,
|
||||
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
|
||||
await extensionManager.checkForAllExtensionUpdates((extensionName, state) =>
|
||||
context.ui.dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
}),
|
||||
);
|
||||
context.ui.dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
|
||||
|
||||
context.ui.setPendingItem({
|
||||
type: MessageType.EXTENSIONS_LIST,
|
||||
});
|
||||
if (all) {
|
||||
updateInfos = await updateAllUpdatableExtensions(
|
||||
context.services.config!.getWorkingDir(),
|
||||
// We don't have the ability to prompt for consent yet in this flow.
|
||||
(description) =>
|
||||
requestConsentInteractive(
|
||||
description,
|
||||
context.ui.addConfirmUpdateExtensionRequest,
|
||||
),
|
||||
context.services.config!.getExtensions(),
|
||||
updateInfos = await extensionManager.updateAllUpdatableExtensions(
|
||||
context.ui.extensionsUpdateState,
|
||||
context.ui.dispatchExtensionStateUpdate,
|
||||
(extensionName, state) =>
|
||||
context.ui.dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
}),
|
||||
);
|
||||
} else if (names?.length) {
|
||||
const workingDir = context.services.config!.getWorkingDir();
|
||||
const extensions = context.services.config!.getExtensions();
|
||||
for (const name of names) {
|
||||
const extension = extensions.find(
|
||||
@@ -85,17 +114,15 @@ async function updateAction(context: CommandContext, args: string) {
|
||||
);
|
||||
continue;
|
||||
}
|
||||
const updateInfo = await updateExtension(
|
||||
const updateInfo = await extensionManager.updateExtension(
|
||||
extension,
|
||||
workingDir,
|
||||
(description) =>
|
||||
requestConsentInteractive(
|
||||
description,
|
||||
context.ui.addConfirmUpdateExtensionRequest,
|
||||
),
|
||||
context.ui.extensionsUpdateState.get(extension.name)?.status ??
|
||||
ExtensionUpdateState.UNKNOWN,
|
||||
context.ui.dispatchExtensionStateUpdate,
|
||||
(extensionName, state) =>
|
||||
context.ui.dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
}),
|
||||
);
|
||||
if (updateInfo) updateInfos.push(updateInfo);
|
||||
}
|
||||
|
||||
@@ -191,23 +191,11 @@ export const ideCommand = async (): Promise<SlashCommand> => {
|
||||
kind: CommandKind.BUILT_IN,
|
||||
action: async (context) => {
|
||||
const installer = getIdeInstaller(currentIDE);
|
||||
const isSandBox = !!process.env['SANDBOX'];
|
||||
if (isSandBox) {
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: 'info',
|
||||
text: `IDE integration needs to be installed on the host. If you have already installed it, you can directly connect the ide`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (!installer) {
|
||||
const ideName = ideClient.getDetectedIdeDisplayName();
|
||||
context.ui.addItem(
|
||||
{
|
||||
type: 'error',
|
||||
text: `Automatic installation is not supported for ${ideName}. Please install the '${QWEN_CODE_COMPANION_EXTENSION_NAME}' extension manually from the marketplace.`,
|
||||
text: `No installer is available for ${ideClient.getDetectedIdeDisplayName()}. Please install the '${QWEN_CODE_COMPANION_EXTENSION_NAME}' extension manually from the marketplace.`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
|
||||
@@ -11,14 +11,9 @@ import type { SlashCommand, type CommandContext } from './types.js';
|
||||
import { createMockCommandContext } from '../../test-utils/mockCommandContext.js';
|
||||
import { MessageType } from '../types.js';
|
||||
import type { LoadedSettings } from '../../config/settings.js';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import {
|
||||
getErrorMessage,
|
||||
loadServerHierarchicalMemory,
|
||||
QWEN_DIR,
|
||||
setGeminiMdFilename,
|
||||
type FileDiscoveryService,
|
||||
type LoadServerHierarchicalMemoryResponse,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
@@ -36,18 +31,7 @@ vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('node:fs/promises', () => {
|
||||
const readFile = vi.fn();
|
||||
return {
|
||||
readFile,
|
||||
default: {
|
||||
readFile,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const mockLoadServerHierarchicalMemory = loadServerHierarchicalMemory as Mock;
|
||||
const mockReadFile = readFile as unknown as Mock;
|
||||
|
||||
describe('memoryCommand', () => {
|
||||
let mockContext: CommandContext;
|
||||
@@ -68,10 +52,6 @@ describe('memoryCommand', () => {
|
||||
let mockGetGeminiMdFileCount: Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
setGeminiMdFilename('QWEN.md');
|
||||
mockReadFile.mockReset();
|
||||
vi.restoreAllMocks();
|
||||
|
||||
showCommand = getSubCommand('show');
|
||||
|
||||
mockGetUserMemory = vi.fn();
|
||||
@@ -122,52 +102,6 @@ describe('memoryCommand', () => {
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should show project memory from the configured context file', async () => {
|
||||
const projectCommand = showCommand.subCommands?.find(
|
||||
(cmd) => cmd.name === '--project',
|
||||
);
|
||||
if (!projectCommand?.action) throw new Error('Command has no action');
|
||||
|
||||
setGeminiMdFilename('AGENTS.md');
|
||||
vi.spyOn(process, 'cwd').mockReturnValue('/test/project');
|
||||
mockReadFile.mockResolvedValue('project memory');
|
||||
|
||||
await projectCommand.action(mockContext, '');
|
||||
|
||||
const expectedProjectPath = path.join('/test/project', 'AGENTS.md');
|
||||
expect(mockReadFile).toHaveBeenCalledWith(expectedProjectPath, 'utf-8');
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: expect.stringContaining(expectedProjectPath),
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
|
||||
it('should show global memory from the configured context file', async () => {
|
||||
const globalCommand = showCommand.subCommands?.find(
|
||||
(cmd) => cmd.name === '--global',
|
||||
);
|
||||
if (!globalCommand?.action) throw new Error('Command has no action');
|
||||
|
||||
setGeminiMdFilename('AGENTS.md');
|
||||
vi.spyOn(os, 'homedir').mockReturnValue('/home/user');
|
||||
mockReadFile.mockResolvedValue('global memory');
|
||||
|
||||
await globalCommand.action(mockContext, '');
|
||||
|
||||
const expectedGlobalPath = path.join('/home/user', QWEN_DIR, 'AGENTS.md');
|
||||
expect(mockReadFile).toHaveBeenCalledWith(expectedGlobalPath, 'utf-8');
|
||||
expect(mockContext.ui.addItem).toHaveBeenCalledWith(
|
||||
{
|
||||
type: MessageType.INFO,
|
||||
text: expect.stringContaining('Global memory content'),
|
||||
},
|
||||
expect.any(Number),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('/memory add', () => {
|
||||
|
||||
@@ -6,13 +6,12 @@
|
||||
|
||||
import {
|
||||
getErrorMessage,
|
||||
getCurrentGeminiMdFilename,
|
||||
loadServerHierarchicalMemory,
|
||||
QWEN_DIR,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import path from 'node:path';
|
||||
import os from 'node:os';
|
||||
import fs from 'node:fs/promises';
|
||||
import os from 'os';
|
||||
import fs from 'fs/promises';
|
||||
import { MessageType } from '../types.js';
|
||||
import type { SlashCommand, SlashCommandActionReturn } from './types.js';
|
||||
import { CommandKind } from './types.js';
|
||||
@@ -57,12 +56,7 @@ export const memoryCommand: SlashCommand = {
|
||||
kind: CommandKind.BUILT_IN,
|
||||
action: async (context) => {
|
||||
try {
|
||||
const workingDir =
|
||||
context.services.config?.getWorkingDir?.() ?? process.cwd();
|
||||
const projectMemoryPath = path.join(
|
||||
workingDir,
|
||||
getCurrentGeminiMdFilename(),
|
||||
);
|
||||
const projectMemoryPath = path.join(process.cwd(), 'QWEN.md');
|
||||
const memoryContent = await fs.readFile(
|
||||
projectMemoryPath,
|
||||
'utf-8',
|
||||
@@ -110,7 +104,7 @@ export const memoryCommand: SlashCommand = {
|
||||
const globalMemoryPath = path.join(
|
||||
os.homedir(),
|
||||
QWEN_DIR,
|
||||
getCurrentGeminiMdFilename(),
|
||||
'QWEN.md',
|
||||
);
|
||||
const globalMemoryContent = await fs.readFile(
|
||||
globalMemoryPath,
|
||||
|
||||
@@ -13,6 +13,12 @@ import {
|
||||
type ContentGeneratorConfig,
|
||||
type Config,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import * as availableModelsModule from '../models/availableModels.js';
|
||||
|
||||
// Mock the availableModels module
|
||||
vi.mock('../models/availableModels.js', () => ({
|
||||
getAvailableModelsForAuthType: vi.fn(),
|
||||
}));
|
||||
|
||||
// Helper function to create a mock config
|
||||
function createMockConfig(
|
||||
@@ -25,6 +31,9 @@ function createMockConfig(
|
||||
|
||||
describe('modelCommand', () => {
|
||||
let mockContext: CommandContext;
|
||||
const mockGetAvailableModelsForAuthType = vi.mocked(
|
||||
availableModelsModule.getAvailableModelsForAuthType,
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
mockContext = createMockCommandContext();
|
||||
@@ -78,6 +87,10 @@ describe('modelCommand', () => {
|
||||
});
|
||||
|
||||
it('should return dialog action for QWEN_OAUTH auth type', async () => {
|
||||
mockGetAvailableModelsForAuthType.mockReturnValue([
|
||||
{ id: 'qwen3-coder-plus', label: 'qwen3-coder-plus' },
|
||||
]);
|
||||
|
||||
const mockConfig = createMockConfig({
|
||||
model: 'test-model',
|
||||
authType: AuthType.QWEN_OAUTH,
|
||||
@@ -92,7 +105,11 @@ describe('modelCommand', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should return dialog action for USE_OPENAI auth type', async () => {
|
||||
it('should return dialog action for USE_OPENAI auth type when model is available', async () => {
|
||||
mockGetAvailableModelsForAuthType.mockReturnValue([
|
||||
{ id: 'gpt-4', label: 'gpt-4' },
|
||||
]);
|
||||
|
||||
const mockConfig = createMockConfig({
|
||||
model: 'test-model',
|
||||
authType: AuthType.USE_OPENAI,
|
||||
@@ -107,7 +124,28 @@ describe('modelCommand', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should return dialog action for unsupported auth types', async () => {
|
||||
it('should return error for USE_OPENAI auth type when no model is available', async () => {
|
||||
mockGetAvailableModelsForAuthType.mockReturnValue([]);
|
||||
|
||||
const mockConfig = createMockConfig({
|
||||
model: 'test-model',
|
||||
authType: AuthType.USE_OPENAI,
|
||||
});
|
||||
mockContext.services.config = mockConfig as Config;
|
||||
|
||||
const result = await modelCommand.action!(mockContext, '');
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'message',
|
||||
messageType: 'error',
|
||||
content:
|
||||
'No models available for the current authentication type (openai).',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return error for unsupported auth types', async () => {
|
||||
mockGetAvailableModelsForAuthType.mockReturnValue([]);
|
||||
|
||||
const mockConfig = createMockConfig({
|
||||
model: 'test-model',
|
||||
authType: 'UNSUPPORTED_AUTH_TYPE' as AuthType,
|
||||
@@ -117,8 +155,10 @@ describe('modelCommand', () => {
|
||||
const result = await modelCommand.action!(mockContext, '');
|
||||
|
||||
expect(result).toEqual({
|
||||
type: 'dialog',
|
||||
dialog: 'model',
|
||||
type: 'message',
|
||||
messageType: 'error',
|
||||
content:
|
||||
'No models available for the current authentication type (UNSUPPORTED_AUTH_TYPE).',
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ import type {
|
||||
MessageActionReturn,
|
||||
} from './types.js';
|
||||
import { CommandKind } from './types.js';
|
||||
import { getAvailableModelsForAuthType } from '../models/availableModels.js';
|
||||
import { t } from '../../i18n/index.js';
|
||||
|
||||
export const modelCommand: SlashCommand = {
|
||||
@@ -29,7 +30,7 @@ export const modelCommand: SlashCommand = {
|
||||
return {
|
||||
type: 'message',
|
||||
messageType: 'error',
|
||||
content: t('Configuration not available.'),
|
||||
content: 'Configuration not available.',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -51,6 +52,22 @@ export const modelCommand: SlashCommand = {
|
||||
};
|
||||
}
|
||||
|
||||
const availableModels = getAvailableModelsForAuthType(authType);
|
||||
|
||||
if (availableModels.length === 0) {
|
||||
return {
|
||||
type: 'message',
|
||||
messageType: 'error',
|
||||
content: t(
|
||||
'No models available for the current authentication type ({{authType}}).',
|
||||
{
|
||||
authType,
|
||||
},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
// Trigger model selection dialog
|
||||
return {
|
||||
type: 'dialog',
|
||||
dialog: 'model',
|
||||
|
||||
@@ -54,7 +54,7 @@ export function ApprovalModeDialog({
|
||||
}: ApprovalModeDialogProps): React.JSX.Element {
|
||||
// Start with User scope by default
|
||||
const [selectedScope, setSelectedScope] = useState<SettingScope>(
|
||||
SettingScope.Workspace,
|
||||
SettingScope.User,
|
||||
);
|
||||
|
||||
// Track the currently highlighted approval mode
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
import { Box, Text } from 'ink';
|
||||
import { IdeIntegrationNudge } from '../IdeIntegrationNudge.js';
|
||||
import { CommandFormatMigrationNudge } from '../CommandFormatMigrationNudge.js';
|
||||
import { LoopDetectionConfirmation } from './LoopDetectionConfirmation.js';
|
||||
import { FolderTrustDialog } from './FolderTrustDialog.js';
|
||||
import { ShellConfirmationDialog } from './ShellConfirmationDialog.js';
|
||||
@@ -16,7 +17,6 @@ import { QwenOAuthProgress } from './QwenOAuthProgress.js';
|
||||
import { AuthDialog } from '../auth/AuthDialog.js';
|
||||
import { OpenAIKeyPrompt } from './OpenAIKeyPrompt.js';
|
||||
import { EditorSettingsDialog } from './EditorSettingsDialog.js';
|
||||
import { WorkspaceMigrationDialog } from './WorkspaceMigrationDialog.js';
|
||||
import { PermissionsModifyTrustDialog } from './PermissionsModifyTrustDialog.js';
|
||||
import { ModelDialog } from './ModelDialog.js';
|
||||
import { ApprovalModeDialog } from './ApprovalModeDialog.js';
|
||||
@@ -25,6 +25,7 @@ import { useUIState } from '../contexts/UIStateContext.js';
|
||||
import { useUIActions } from '../contexts/UIActionsContext.js';
|
||||
import { useConfig } from '../contexts/ConfigContext.js';
|
||||
import { useSettings } from '../contexts/SettingsContext.js';
|
||||
import { SettingScope } from '../../config/settings.js';
|
||||
import { AuthState } from '../types.js';
|
||||
import { AuthType } from '@qwen-code/qwen-code-core';
|
||||
import process from 'node:process';
|
||||
@@ -76,15 +77,6 @@ export const DialogManager = ({
|
||||
if (uiState.showIdeRestartPrompt) {
|
||||
return <IdeTrustChangeDialog reason={uiState.ideTrustRestartReason} />;
|
||||
}
|
||||
if (uiState.showWorkspaceMigrationDialog) {
|
||||
return (
|
||||
<WorkspaceMigrationDialog
|
||||
workspaceExtensions={uiState.workspaceExtensions}
|
||||
onOpen={uiActions.onWorkspaceMigrationDialogOpen}
|
||||
onClose={uiActions.onWorkspaceMigrationDialogClose}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (uiState.shouldShowIdePrompt) {
|
||||
return (
|
||||
<IdeIntegrationNudge
|
||||
@@ -93,6 +85,14 @@ export const DialogManager = ({
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (uiState.shouldShowCommandMigrationNudge) {
|
||||
return (
|
||||
<CommandFormatMigrationNudge
|
||||
tomlFiles={uiState.commandMigrationTomlFiles}
|
||||
onComplete={uiActions.handleCommandMigrationComplete}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (uiState.isFolderTrustDialogOpen) {
|
||||
return (
|
||||
<FolderTrustDialog
|
||||
@@ -201,7 +201,7 @@ export const DialogManager = ({
|
||||
return (
|
||||
<OpenAIKeyPrompt
|
||||
onSubmit={(apiKey, baseUrl, model) => {
|
||||
uiActions.handleAuthSelect(AuthType.USE_OPENAI, {
|
||||
uiActions.handleAuthSelect(AuthType.USE_OPENAI, SettingScope.User, {
|
||||
apiKey,
|
||||
baseUrl,
|
||||
model,
|
||||
|
||||
@@ -10,11 +10,7 @@ import { ModelDialog } from './ModelDialog.js';
|
||||
import { useKeypress } from '../hooks/useKeypress.js';
|
||||
import { DescriptiveRadioButtonSelect } from './shared/DescriptiveRadioButtonSelect.js';
|
||||
import { ConfigContext } from '../contexts/ConfigContext.js';
|
||||
import { SettingsContext } from '../contexts/SettingsContext.js';
|
||||
import type { Config } from '@qwen-code/qwen-code-core';
|
||||
import { AuthType } from '@qwen-code/qwen-code-core';
|
||||
import type { LoadedSettings } from '../../config/settings.js';
|
||||
import { SettingScope } from '../../config/settings.js';
|
||||
import {
|
||||
AVAILABLE_MODELS_QWEN,
|
||||
MAINLINE_CODER,
|
||||
@@ -40,29 +36,18 @@ const renderComponent = (
|
||||
};
|
||||
const combinedProps = { ...defaultProps, ...props };
|
||||
|
||||
const mockSettings = {
|
||||
isTrusted: true,
|
||||
user: { settings: {} },
|
||||
workspace: { settings: {} },
|
||||
setValue: vi.fn(),
|
||||
} as unknown as LoadedSettings;
|
||||
|
||||
const mockConfig = contextValue
|
||||
? ({
|
||||
// --- Functions used by ModelDialog ---
|
||||
getModel: vi.fn(() => MAINLINE_CODER),
|
||||
setModel: vi.fn().mockResolvedValue(undefined),
|
||||
switchModel: vi.fn().mockResolvedValue(undefined),
|
||||
setModel: vi.fn(),
|
||||
getAuthType: vi.fn(() => 'qwen-oauth'),
|
||||
|
||||
// --- Functions used by ClearcutLogger ---
|
||||
getUsageStatisticsEnabled: vi.fn(() => true),
|
||||
getSessionId: vi.fn(() => 'mock-session-id'),
|
||||
getDebugMode: vi.fn(() => false),
|
||||
getContentGeneratorConfig: vi.fn(() => ({
|
||||
authType: AuthType.QWEN_OAUTH,
|
||||
model: MAINLINE_CODER,
|
||||
})),
|
||||
getContentGeneratorConfig: vi.fn(() => ({ authType: 'mock' })),
|
||||
getUseSmartEdit: vi.fn(() => false),
|
||||
getUseModelRouter: vi.fn(() => false),
|
||||
getProxy: vi.fn(() => undefined),
|
||||
@@ -73,27 +58,21 @@ const renderComponent = (
|
||||
: undefined;
|
||||
|
||||
const renderResult = render(
|
||||
<SettingsContext.Provider value={mockSettings}>
|
||||
<ConfigContext.Provider value={mockConfig}>
|
||||
<ModelDialog {...combinedProps} />
|
||||
</ConfigContext.Provider>
|
||||
</SettingsContext.Provider>,
|
||||
<ConfigContext.Provider value={mockConfig}>
|
||||
<ModelDialog {...combinedProps} />
|
||||
</ConfigContext.Provider>,
|
||||
);
|
||||
|
||||
return {
|
||||
...renderResult,
|
||||
props: combinedProps,
|
||||
mockConfig,
|
||||
mockSettings,
|
||||
};
|
||||
};
|
||||
|
||||
describe('<ModelDialog />', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Ensure env-based fallback models don't leak into this suite from the developer environment.
|
||||
delete process.env['OPENAI_MODEL'];
|
||||
delete process.env['ANTHROPIC_MODEL'];
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -112,12 +91,8 @@ describe('<ModelDialog />', () => {
|
||||
|
||||
const props = mockedSelect.mock.calls[0][0];
|
||||
expect(props.items).toHaveLength(AVAILABLE_MODELS_QWEN.length);
|
||||
expect(props.items[0].value).toBe(
|
||||
`${AuthType.QWEN_OAUTH}::${MAINLINE_CODER}`,
|
||||
);
|
||||
expect(props.items[1].value).toBe(
|
||||
`${AuthType.QWEN_OAUTH}::${MAINLINE_VLM}`,
|
||||
);
|
||||
expect(props.items[0].value).toBe(MAINLINE_CODER);
|
||||
expect(props.items[1].value).toBe(MAINLINE_VLM);
|
||||
expect(props.showNumbers).toBe(true);
|
||||
});
|
||||
|
||||
@@ -164,93 +139,16 @@ describe('<ModelDialog />', () => {
|
||||
expect(mockedSelect).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('calls config.switchModel and onClose when DescriptiveRadioButtonSelect.onSelect is triggered', async () => {
|
||||
const { props, mockConfig, mockSettings } = renderComponent({}, {}); // Pass empty object for contextValue
|
||||
it('calls config.setModel and onClose when DescriptiveRadioButtonSelect.onSelect is triggered', () => {
|
||||
const { props, mockConfig } = renderComponent({}, {}); // Pass empty object for contextValue
|
||||
|
||||
const childOnSelect = mockedSelect.mock.calls[0][0].onSelect;
|
||||
expect(childOnSelect).toBeDefined();
|
||||
|
||||
await childOnSelect(`${AuthType.QWEN_OAUTH}::${MAINLINE_CODER}`);
|
||||
childOnSelect(MAINLINE_CODER);
|
||||
|
||||
expect(mockConfig?.switchModel).toHaveBeenCalledWith(
|
||||
AuthType.QWEN_OAUTH,
|
||||
MAINLINE_CODER,
|
||||
undefined,
|
||||
{
|
||||
reason: 'user_manual',
|
||||
context: 'Model switched via /model dialog',
|
||||
},
|
||||
);
|
||||
expect(mockSettings.setValue).toHaveBeenCalledWith(
|
||||
SettingScope.User,
|
||||
'model.name',
|
||||
MAINLINE_CODER,
|
||||
);
|
||||
expect(mockSettings.setValue).toHaveBeenCalledWith(
|
||||
SettingScope.User,
|
||||
'security.auth.selectedType',
|
||||
AuthType.QWEN_OAUTH,
|
||||
);
|
||||
expect(props.onClose).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('calls config.switchModel and persists authType+model when selecting a different authType', async () => {
|
||||
const switchModel = vi.fn().mockResolvedValue(undefined);
|
||||
const getAuthType = vi.fn(() => AuthType.USE_OPENAI);
|
||||
const getAvailableModelsForAuthType = vi.fn((t: AuthType) => {
|
||||
if (t === AuthType.USE_OPENAI) {
|
||||
return [{ id: 'gpt-4', label: 'GPT-4', authType: t }];
|
||||
}
|
||||
if (t === AuthType.QWEN_OAUTH) {
|
||||
return AVAILABLE_MODELS_QWEN.map((m) => ({
|
||||
id: m.id,
|
||||
label: m.label,
|
||||
authType: AuthType.QWEN_OAUTH,
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
});
|
||||
|
||||
const mockConfigWithSwitchAuthType = {
|
||||
getAuthType,
|
||||
getModel: vi.fn(() => 'gpt-4'),
|
||||
getContentGeneratorConfig: vi.fn(() => ({
|
||||
authType: AuthType.QWEN_OAUTH,
|
||||
model: MAINLINE_CODER,
|
||||
})),
|
||||
// Add switchModel to the mock object (not the type)
|
||||
switchModel,
|
||||
getAvailableModelsForAuthType,
|
||||
};
|
||||
|
||||
const { props, mockSettings } = renderComponent(
|
||||
{},
|
||||
// Cast to Config to bypass type checking, matching the runtime behavior
|
||||
mockConfigWithSwitchAuthType as unknown as Partial<Config>,
|
||||
);
|
||||
|
||||
const childOnSelect = mockedSelect.mock.calls[0][0].onSelect;
|
||||
await childOnSelect(`${AuthType.QWEN_OAUTH}::${MAINLINE_CODER}`);
|
||||
|
||||
expect(switchModel).toHaveBeenCalledWith(
|
||||
AuthType.QWEN_OAUTH,
|
||||
MAINLINE_CODER,
|
||||
{ requireCachedCredentials: true },
|
||||
{
|
||||
reason: 'user_manual',
|
||||
context: 'AuthType+model switched via /model dialog',
|
||||
},
|
||||
);
|
||||
expect(mockSettings.setValue).toHaveBeenCalledWith(
|
||||
SettingScope.User,
|
||||
'model.name',
|
||||
MAINLINE_CODER,
|
||||
);
|
||||
expect(mockSettings.setValue).toHaveBeenCalledWith(
|
||||
SettingScope.User,
|
||||
'security.auth.selectedType',
|
||||
AuthType.QWEN_OAUTH,
|
||||
);
|
||||
// Assert against the default mock provided by renderComponent
|
||||
expect(mockConfig?.setModel).toHaveBeenCalledWith(MAINLINE_CODER);
|
||||
expect(props.onClose).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
@@ -295,25 +193,17 @@ describe('<ModelDialog />', () => {
|
||||
it('updates initialIndex when config context changes', () => {
|
||||
const mockGetModel = vi.fn(() => MAINLINE_CODER);
|
||||
const mockGetAuthType = vi.fn(() => 'qwen-oauth');
|
||||
const mockSettings = {
|
||||
isTrusted: true,
|
||||
user: { settings: {} },
|
||||
workspace: { settings: {} },
|
||||
setValue: vi.fn(),
|
||||
} as unknown as LoadedSettings;
|
||||
const { rerender } = render(
|
||||
<SettingsContext.Provider value={mockSettings}>
|
||||
<ConfigContext.Provider
|
||||
value={
|
||||
{
|
||||
getModel: mockGetModel,
|
||||
getAuthType: mockGetAuthType,
|
||||
} as unknown as Config
|
||||
}
|
||||
>
|
||||
<ModelDialog onClose={vi.fn()} />
|
||||
</ConfigContext.Provider>
|
||||
</SettingsContext.Provider>,
|
||||
<ConfigContext.Provider
|
||||
value={
|
||||
{
|
||||
getModel: mockGetModel,
|
||||
getAuthType: mockGetAuthType,
|
||||
} as unknown as Config
|
||||
}
|
||||
>
|
||||
<ModelDialog onClose={vi.fn()} />
|
||||
</ConfigContext.Provider>,
|
||||
);
|
||||
|
||||
expect(mockedSelect.mock.calls[0][0].initialIndex).toBe(0);
|
||||
@@ -325,11 +215,9 @@ describe('<ModelDialog />', () => {
|
||||
} as unknown as Config;
|
||||
|
||||
rerender(
|
||||
<SettingsContext.Provider value={mockSettings}>
|
||||
<ConfigContext.Provider value={newMockConfig}>
|
||||
<ModelDialog onClose={vi.fn()} />
|
||||
</ConfigContext.Provider>
|
||||
</SettingsContext.Provider>,
|
||||
<ConfigContext.Provider value={newMockConfig}>
|
||||
<ModelDialog onClose={vi.fn()} />
|
||||
</ConfigContext.Provider>,
|
||||
);
|
||||
|
||||
// Should be called at least twice: initial render + re-render after context change
|
||||
|
||||
@@ -5,210 +5,52 @@
|
||||
*/
|
||||
|
||||
import type React from 'react';
|
||||
import { useCallback, useContext, useMemo, useState } from 'react';
|
||||
import { useCallback, useContext, useMemo } from 'react';
|
||||
import { Box, Text } from 'ink';
|
||||
import {
|
||||
AuthType,
|
||||
ModelSlashCommandEvent,
|
||||
logModelSlashCommand,
|
||||
type ContentGeneratorConfig,
|
||||
type ContentGeneratorConfigSource,
|
||||
type ContentGeneratorConfigSources,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { useKeypress } from '../hooks/useKeypress.js';
|
||||
import { theme } from '../semantic-colors.js';
|
||||
import { DescriptiveRadioButtonSelect } from './shared/DescriptiveRadioButtonSelect.js';
|
||||
import { ConfigContext } from '../contexts/ConfigContext.js';
|
||||
import { UIStateContext } from '../contexts/UIStateContext.js';
|
||||
import { useSettings } from '../contexts/SettingsContext.js';
|
||||
import {
|
||||
getAvailableModelsForAuthType,
|
||||
MAINLINE_CODER,
|
||||
} from '../models/availableModels.js';
|
||||
import { getPersistScopeForModelSelection } from '../../config/modelProvidersScope.js';
|
||||
import { t } from '../../i18n/index.js';
|
||||
|
||||
interface ModelDialogProps {
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
function formatSourceBadge(
|
||||
source: ContentGeneratorConfigSource | undefined,
|
||||
): string | undefined {
|
||||
if (!source) return undefined;
|
||||
|
||||
switch (source.kind) {
|
||||
case 'cli':
|
||||
return source.detail ? `CLI ${source.detail}` : 'CLI';
|
||||
case 'env':
|
||||
return source.envKey ? `ENV ${source.envKey}` : 'ENV';
|
||||
case 'settings':
|
||||
return source.settingsPath
|
||||
? `Settings ${source.settingsPath}`
|
||||
: 'Settings';
|
||||
case 'modelProviders': {
|
||||
const suffix =
|
||||
source.authType && source.modelId
|
||||
? `${source.authType}:${source.modelId}`
|
||||
: source.authType
|
||||
? `${source.authType}`
|
||||
: source.modelId
|
||||
? `${source.modelId}`
|
||||
: '';
|
||||
return suffix ? `ModelProviders ${suffix}` : 'ModelProviders';
|
||||
}
|
||||
case 'default':
|
||||
return source.detail ? `Default ${source.detail}` : 'Default';
|
||||
case 'computed':
|
||||
return source.detail ? `Computed ${source.detail}` : 'Computed';
|
||||
case 'programmatic':
|
||||
return source.detail ? `Programmatic ${source.detail}` : 'Programmatic';
|
||||
case 'unknown':
|
||||
default:
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function readSourcesFromConfig(config: unknown): ContentGeneratorConfigSources {
|
||||
if (!config) {
|
||||
return {};
|
||||
}
|
||||
const maybe = config as {
|
||||
getContentGeneratorConfigSources?: () => ContentGeneratorConfigSources;
|
||||
};
|
||||
return maybe.getContentGeneratorConfigSources?.() ?? {};
|
||||
}
|
||||
|
||||
function maskApiKey(apiKey: string | undefined): string {
|
||||
if (!apiKey) return '(not set)';
|
||||
const trimmed = apiKey.trim();
|
||||
if (trimmed.length === 0) return '(not set)';
|
||||
if (trimmed.length <= 6) return '***';
|
||||
const head = trimmed.slice(0, 3);
|
||||
const tail = trimmed.slice(-4);
|
||||
return `${head}…${tail}`;
|
||||
}
|
||||
|
||||
function persistModelSelection(
|
||||
settings: ReturnType<typeof useSettings>,
|
||||
modelId: string,
|
||||
): void {
|
||||
const scope = getPersistScopeForModelSelection(settings);
|
||||
settings.setValue(scope, 'model.name', modelId);
|
||||
}
|
||||
|
||||
function persistAuthTypeSelection(
|
||||
settings: ReturnType<typeof useSettings>,
|
||||
authType: AuthType,
|
||||
): void {
|
||||
const scope = getPersistScopeForModelSelection(settings);
|
||||
settings.setValue(scope, 'security.auth.selectedType', authType);
|
||||
}
|
||||
|
||||
function ConfigRow({
|
||||
label,
|
||||
value,
|
||||
badge,
|
||||
}: {
|
||||
label: string;
|
||||
value: React.ReactNode;
|
||||
badge?: string;
|
||||
}): React.JSX.Element {
|
||||
return (
|
||||
<Box flexDirection="column">
|
||||
<Box>
|
||||
<Box minWidth={12} flexShrink={0}>
|
||||
<Text color={theme.text.secondary}>{label}:</Text>
|
||||
</Box>
|
||||
<Box flexGrow={1} flexDirection="row" flexWrap="wrap">
|
||||
<Text>{value}</Text>
|
||||
</Box>
|
||||
</Box>
|
||||
{badge ? (
|
||||
<Box>
|
||||
<Box minWidth={12} flexShrink={0}>
|
||||
<Text> </Text>
|
||||
</Box>
|
||||
<Box flexGrow={1}>
|
||||
<Text color={theme.text.secondary}>{badge}</Text>
|
||||
</Box>
|
||||
</Box>
|
||||
) : null}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
export function ModelDialog({ onClose }: ModelDialogProps): React.JSX.Element {
|
||||
const config = useContext(ConfigContext);
|
||||
const uiState = useContext(UIStateContext);
|
||||
const settings = useSettings();
|
||||
|
||||
// Local error state for displaying errors within the dialog
|
||||
const [errorMessage, setErrorMessage] = useState<string | null>(null);
|
||||
// Get auth type from config, default to QWEN_OAUTH if not available
|
||||
const authType = config?.getAuthType() ?? AuthType.QWEN_OAUTH;
|
||||
|
||||
const authType = config?.getAuthType();
|
||||
const effectiveConfig =
|
||||
(config?.getContentGeneratorConfig?.() as
|
||||
| ContentGeneratorConfig
|
||||
| undefined) ?? undefined;
|
||||
const sources = readSourcesFromConfig(config);
|
||||
|
||||
const availableModelEntries = useMemo(() => {
|
||||
const allAuthTypes = Object.values(AuthType) as AuthType[];
|
||||
const modelsByAuthType = allAuthTypes
|
||||
.map((t) => ({
|
||||
authType: t,
|
||||
models: getAvailableModelsForAuthType(t, config ?? undefined),
|
||||
}))
|
||||
.filter((x) => x.models.length > 0);
|
||||
|
||||
// Fixed order: qwen-oauth first, then others in a stable order
|
||||
const authTypeOrder: AuthType[] = [
|
||||
AuthType.QWEN_OAUTH,
|
||||
AuthType.USE_OPENAI,
|
||||
AuthType.USE_ANTHROPIC,
|
||||
AuthType.USE_GEMINI,
|
||||
AuthType.USE_VERTEX_AI,
|
||||
];
|
||||
|
||||
// Filter to only include authTypes that have models
|
||||
const availableAuthTypes = new Set(modelsByAuthType.map((x) => x.authType));
|
||||
const orderedAuthTypes = authTypeOrder.filter((t) =>
|
||||
availableAuthTypes.has(t),
|
||||
);
|
||||
|
||||
return orderedAuthTypes.flatMap((t) => {
|
||||
const models =
|
||||
modelsByAuthType.find((x) => x.authType === t)?.models ?? [];
|
||||
return models.map((m) => ({ authType: t, model: m }));
|
||||
});
|
||||
}, [config]);
|
||||
// Get available models based on auth type
|
||||
const availableModels = useMemo(
|
||||
() => getAvailableModelsForAuthType(authType),
|
||||
[authType],
|
||||
);
|
||||
|
||||
const MODEL_OPTIONS = useMemo(
|
||||
() =>
|
||||
availableModelEntries.map(({ authType: t2, model }) => {
|
||||
const value = `${t2}::${model.id}`;
|
||||
const title = (
|
||||
<Text>
|
||||
<Text bold color={theme.text.accent}>
|
||||
[{t2}]
|
||||
</Text>
|
||||
<Text>{` ${model.label}`}</Text>
|
||||
</Text>
|
||||
);
|
||||
const description = model.description || '';
|
||||
return {
|
||||
value,
|
||||
title,
|
||||
description,
|
||||
key: value,
|
||||
};
|
||||
}),
|
||||
[availableModelEntries],
|
||||
availableModels.map((model) => ({
|
||||
value: model.id,
|
||||
title: model.label,
|
||||
description: model.description || '',
|
||||
key: model.id,
|
||||
})),
|
||||
[availableModels],
|
||||
);
|
||||
|
||||
const preferredModelId = config?.getModel() || MAINLINE_CODER;
|
||||
const preferredKey = authType ? `${authType}::${preferredModelId}` : '';
|
||||
// Determine the Preferred Model (read once when the dialog opens).
|
||||
const preferredModel = config?.getModel() || MAINLINE_CODER;
|
||||
|
||||
useKeypress(
|
||||
(key) => {
|
||||
@@ -219,83 +61,25 @@ export function ModelDialog({ onClose }: ModelDialogProps): React.JSX.Element {
|
||||
{ isActive: true },
|
||||
);
|
||||
|
||||
const initialIndex = useMemo(() => {
|
||||
const index = MODEL_OPTIONS.findIndex(
|
||||
(option) => option.value === preferredKey,
|
||||
);
|
||||
return index === -1 ? 0 : index;
|
||||
}, [MODEL_OPTIONS, preferredKey]);
|
||||
// Calculate the initial index based on the preferred model.
|
||||
const initialIndex = useMemo(
|
||||
() => MODEL_OPTIONS.findIndex((option) => option.value === preferredModel),
|
||||
[MODEL_OPTIONS, preferredModel],
|
||||
);
|
||||
|
||||
// Handle selection internally (Autonomous Dialog).
|
||||
const handleSelect = useCallback(
|
||||
async (selected: string) => {
|
||||
// Clear any previous error
|
||||
setErrorMessage(null);
|
||||
|
||||
const sep = '::';
|
||||
const idx = selected.indexOf(sep);
|
||||
const selectedAuthType = (
|
||||
idx >= 0 ? selected.slice(0, idx) : authType
|
||||
) as AuthType;
|
||||
const modelId = idx >= 0 ? selected.slice(idx + sep.length) : selected;
|
||||
|
||||
(model: string) => {
|
||||
if (config) {
|
||||
try {
|
||||
await config.switchModel(
|
||||
selectedAuthType,
|
||||
modelId,
|
||||
selectedAuthType !== authType &&
|
||||
selectedAuthType === AuthType.QWEN_OAUTH
|
||||
? { requireCachedCredentials: true }
|
||||
: undefined,
|
||||
{
|
||||
reason: 'user_manual',
|
||||
context:
|
||||
selectedAuthType === authType
|
||||
? 'Model switched via /model dialog'
|
||||
: 'AuthType+model switched via /model dialog',
|
||||
},
|
||||
);
|
||||
} catch (e) {
|
||||
const baseErrorMessage = e instanceof Error ? e.message : String(e);
|
||||
setErrorMessage(
|
||||
`Failed to switch model to '${modelId}'.\n\n${baseErrorMessage}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
const event = new ModelSlashCommandEvent(modelId);
|
||||
config.setModel(model);
|
||||
const event = new ModelSlashCommandEvent(model);
|
||||
logModelSlashCommand(config, event);
|
||||
|
||||
const after = config.getContentGeneratorConfig?.() as
|
||||
| ContentGeneratorConfig
|
||||
| undefined;
|
||||
const effectiveAuthType =
|
||||
after?.authType ?? selectedAuthType ?? authType;
|
||||
const effectiveModelId = after?.model ?? modelId;
|
||||
|
||||
persistModelSelection(settings, effectiveModelId);
|
||||
persistAuthTypeSelection(settings, effectiveAuthType);
|
||||
|
||||
const baseUrl = after?.baseUrl ?? '(default)';
|
||||
const maskedKey = maskApiKey(after?.apiKey);
|
||||
uiState?.historyManager.addItem(
|
||||
{
|
||||
type: 'info',
|
||||
text:
|
||||
`authType: ${effectiveAuthType}\n` +
|
||||
`Using model: ${effectiveModelId}\n` +
|
||||
`Base URL: ${baseUrl}\n` +
|
||||
`API key: ${maskedKey}`,
|
||||
},
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
onClose();
|
||||
},
|
||||
[authType, config, onClose, settings, uiState, setErrorMessage],
|
||||
[config, onClose],
|
||||
);
|
||||
|
||||
const hasModels = MODEL_OPTIONS.length > 0;
|
||||
|
||||
return (
|
||||
<Box
|
||||
borderStyle="round"
|
||||
@@ -305,73 +89,14 @@ export function ModelDialog({ onClose }: ModelDialogProps): React.JSX.Element {
|
||||
width="100%"
|
||||
>
|
||||
<Text bold>{t('Select Model')}</Text>
|
||||
|
||||
<Box marginTop={1} flexDirection="column">
|
||||
<Text color={theme.text.secondary}>
|
||||
{t('Current (effective) configuration')}
|
||||
</Text>
|
||||
<Box flexDirection="column" marginTop={1}>
|
||||
<ConfigRow label="AuthType" value={authType} />
|
||||
<ConfigRow
|
||||
label="Model"
|
||||
value={effectiveConfig?.model ?? config?.getModel?.() ?? ''}
|
||||
badge={formatSourceBadge(sources['model'])}
|
||||
/>
|
||||
|
||||
{authType !== AuthType.QWEN_OAUTH && (
|
||||
<>
|
||||
<ConfigRow
|
||||
label="Base URL"
|
||||
value={effectiveConfig?.baseUrl ?? ''}
|
||||
badge={formatSourceBadge(sources['baseUrl'])}
|
||||
/>
|
||||
<ConfigRow
|
||||
label="API Key"
|
||||
value={effectiveConfig?.apiKey ? t('(set)') : t('(not set)')}
|
||||
badge={formatSourceBadge(sources['apiKey'])}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</Box>
|
||||
<Box marginTop={1}>
|
||||
<DescriptiveRadioButtonSelect
|
||||
items={MODEL_OPTIONS}
|
||||
onSelect={handleSelect}
|
||||
initialIndex={initialIndex}
|
||||
showNumbers={true}
|
||||
/>
|
||||
</Box>
|
||||
|
||||
{!hasModels ? (
|
||||
<Box marginTop={1} flexDirection="column">
|
||||
<Text color={theme.status.warning}>
|
||||
{t(
|
||||
'No models available for the current authentication type ({{authType}}).',
|
||||
{
|
||||
authType: authType ? String(authType) : t('(none)'),
|
||||
},
|
||||
)}
|
||||
</Text>
|
||||
<Box marginTop={1}>
|
||||
<Text color={theme.text.secondary}>
|
||||
{t(
|
||||
'Please configure models in settings.modelProviders or use environment variables.',
|
||||
)}
|
||||
</Text>
|
||||
</Box>
|
||||
</Box>
|
||||
) : (
|
||||
<Box marginTop={1}>
|
||||
<DescriptiveRadioButtonSelect
|
||||
items={MODEL_OPTIONS}
|
||||
onSelect={handleSelect}
|
||||
initialIndex={initialIndex}
|
||||
showNumbers={true}
|
||||
/>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{errorMessage && (
|
||||
<Box marginTop={1} flexDirection="column" paddingX={1}>
|
||||
<Text color={theme.status.error} wrap="wrap">
|
||||
✕ {errorMessage}
|
||||
</Text>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
<Box marginTop={1} flexDirection="column">
|
||||
<Text color={theme.text.secondary}>{t('(Press Esc to close)')}</Text>
|
||||
</Box>
|
||||
|
||||
@@ -87,13 +87,7 @@ export async function showResumeSessionPicker(
|
||||
let selectedId: string | undefined;
|
||||
|
||||
const { unmount, waitUntilExit } = render(
|
||||
<KeypressProvider
|
||||
kittyProtocolEnabled={false}
|
||||
pasteWorkaround={
|
||||
process.platform === 'win32' ||
|
||||
parseInt(process.versions.node.split('.')[0], 10) < 20
|
||||
}
|
||||
>
|
||||
<KeypressProvider kittyProtocolEnabled={false}>
|
||||
<StandalonePickerScreen
|
||||
sessionService={sessionService}
|
||||
onSelect={(id) => {
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { Box, Text } from 'ink';
|
||||
import {
|
||||
type Extension,
|
||||
performWorkspaceExtensionMigration,
|
||||
} from '../../config/extension.js';
|
||||
import { RadioButtonSelect } from './shared/RadioButtonSelect.js';
|
||||
import { theme } from '../semantic-colors.js';
|
||||
import { useState } from 'react';
|
||||
import { useKeypress } from '../hooks/useKeypress.js';
|
||||
|
||||
export function WorkspaceMigrationDialog(props: {
|
||||
workspaceExtensions: Extension[];
|
||||
onOpen: () => void;
|
||||
onClose: () => void;
|
||||
}) {
|
||||
const { workspaceExtensions, onOpen, onClose } = props;
|
||||
const [migrationComplete, setMigrationComplete] = useState(false);
|
||||
const [failedExtensions, setFailedExtensions] = useState<string[]>([]);
|
||||
onOpen();
|
||||
const onMigrate = async () => {
|
||||
const failed = await performWorkspaceExtensionMigration(
|
||||
workspaceExtensions,
|
||||
// We aren't updating extensions, just moving them around, don't need to ask for consent.
|
||||
async (_) => true,
|
||||
);
|
||||
setFailedExtensions(failed);
|
||||
setMigrationComplete(true);
|
||||
};
|
||||
|
||||
useKeypress(
|
||||
(key) => {
|
||||
if (migrationComplete && key.sequence === 'q') {
|
||||
process.exit(0);
|
||||
}
|
||||
},
|
||||
{ isActive: true },
|
||||
);
|
||||
|
||||
if (migrationComplete) {
|
||||
return (
|
||||
<Box
|
||||
flexDirection="column"
|
||||
borderStyle="round"
|
||||
borderColor={theme.border.default}
|
||||
padding={1}
|
||||
>
|
||||
{failedExtensions.length > 0 ? (
|
||||
<>
|
||||
<Text color={theme.text.primary}>
|
||||
The following extensions failed to migrate. Please try installing
|
||||
them manually. To see other changes, Qwen Code must be restarted.
|
||||
Press 'q' to quit.
|
||||
</Text>
|
||||
<Box flexDirection="column" marginTop={1} marginLeft={2}>
|
||||
{failedExtensions.map((failed) => (
|
||||
<Text key={failed}>- {failed}</Text>
|
||||
))}
|
||||
</Box>
|
||||
</>
|
||||
) : (
|
||||
<Text color={theme.text.primary}>
|
||||
Migration complete. To see changes, Qwen Code must be restarted.
|
||||
Press 'q' to quit.
|
||||
</Text>
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Box
|
||||
flexDirection="column"
|
||||
borderStyle="round"
|
||||
borderColor={theme.border.default}
|
||||
padding={1}
|
||||
>
|
||||
<Text bold color={theme.text.primary}>
|
||||
Workspace-level extensions are deprecated{'\n'}
|
||||
</Text>
|
||||
<Text color={theme.text.primary}>
|
||||
Would you like to install them at the user level?
|
||||
</Text>
|
||||
<Text color={theme.text.primary}>
|
||||
The extension definition will remain in your workspace directory.
|
||||
</Text>
|
||||
<Text color={theme.text.primary}>
|
||||
If you opt to skip, you can install them manually using the extensions
|
||||
install command.
|
||||
</Text>
|
||||
|
||||
<Box flexDirection="column" marginTop={1} marginLeft={2}>
|
||||
{workspaceExtensions.map((extension) => (
|
||||
<Text key={extension.config.name}>- {extension.config.name}</Text>
|
||||
))}
|
||||
</Box>
|
||||
<Box marginTop={1}>
|
||||
<RadioButtonSelect
|
||||
items={[
|
||||
{ label: 'Install all', value: 'migrate', key: 'migrate' },
|
||||
{ label: 'Skip', value: 'skip', key: 'skip' },
|
||||
]}
|
||||
onSelect={(value: string) => {
|
||||
if (value === 'migrate') {
|
||||
onMigrate();
|
||||
} else {
|
||||
onClose();
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
@@ -11,7 +11,7 @@ import { BaseSelectionList } from './BaseSelectionList.js';
|
||||
import type { SelectionListItem } from '../../hooks/useSelectionList.js';
|
||||
|
||||
export interface DescriptiveRadioSelectItem<T> extends SelectionListItem<T> {
|
||||
title: React.ReactNode;
|
||||
title: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
|
||||
@@ -218,7 +218,7 @@ export const AgentSelectionStep = ({
|
||||
const renderAgentItem = (
|
||||
agent: {
|
||||
name: string;
|
||||
level: 'project' | 'user' | 'builtin' | 'session';
|
||||
level: 'project' | 'user' | 'builtin' | 'session' | 'extension';
|
||||
isBuiltin?: boolean;
|
||||
},
|
||||
index: number,
|
||||
|
||||
@@ -18,7 +18,6 @@ const mockUseUIState = vi.mocked(useUIState);
|
||||
const mockExtensions = [
|
||||
{ name: 'ext-one', version: '1.0.0', isActive: true },
|
||||
{ name: 'ext-two', version: '2.1.0', isActive: true },
|
||||
{ name: 'ext-disabled', version: '3.0.0', isActive: false },
|
||||
];
|
||||
|
||||
describe('<ExtensionsList />', () => {
|
||||
@@ -29,7 +28,6 @@ describe('<ExtensionsList />', () => {
|
||||
const mockUIState = (
|
||||
extensions: unknown[],
|
||||
extensionsUpdateState: Map<string, ExtensionUpdateState>,
|
||||
disabledExtensions: string[] = [],
|
||||
) => {
|
||||
mockUseUIState.mockReturnValue({
|
||||
commandContext: createMockCommandContext({
|
||||
@@ -37,13 +35,6 @@ describe('<ExtensionsList />', () => {
|
||||
config: {
|
||||
getExtensions: () => extensions,
|
||||
},
|
||||
settings: {
|
||||
merged: {
|
||||
extensions: {
|
||||
disabled: disabledExtensions,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
extensionsUpdateState,
|
||||
@@ -58,12 +49,11 @@ describe('<ExtensionsList />', () => {
|
||||
});
|
||||
|
||||
it('should render a list of extensions with their version and status', () => {
|
||||
mockUIState(mockExtensions, new Map(), ['ext-disabled']);
|
||||
mockUIState(mockExtensions, new Map());
|
||||
const { lastFrame } = render(<ExtensionsList />);
|
||||
const output = lastFrame();
|
||||
expect(output).toContain('ext-one (v1.0.0) - active');
|
||||
expect(output).toContain('ext-two (v2.1.0) - active');
|
||||
expect(output).toContain('ext-disabled (v3.0.0) - disabled');
|
||||
});
|
||||
|
||||
it('should display "unknown state" if an extension has no update state', () => {
|
||||
|
||||
@@ -9,12 +9,10 @@ import { useUIState } from '../../contexts/UIStateContext.js';
|
||||
import { ExtensionUpdateState } from '../../state/extensions.js';
|
||||
|
||||
export const ExtensionsList = () => {
|
||||
const { commandContext, extensionsUpdateState } = useUIState();
|
||||
const allExtensions = commandContext.services.config!.getExtensions();
|
||||
const settings = commandContext.services.settings;
|
||||
const disabledExtensions = settings.merged.extensions?.disabled ?? [];
|
||||
const { extensionsUpdateState, commandContext } = useUIState();
|
||||
const extensions = commandContext.services.config?.getExtensions() || [];
|
||||
|
||||
if (allExtensions.length === 0) {
|
||||
if (extensions.length === 0) {
|
||||
return <Text>No extensions installed.</Text>;
|
||||
}
|
||||
|
||||
@@ -22,10 +20,11 @@ export const ExtensionsList = () => {
|
||||
<Box flexDirection="column" marginTop={1} marginBottom={1}>
|
||||
<Text>Installed extensions:</Text>
|
||||
<Box flexDirection="column" paddingLeft={2}>
|
||||
{allExtensions.map((ext) => {
|
||||
{extensions.map((ext) => {
|
||||
const state = extensionsUpdateState.get(ext.name);
|
||||
const isActive = !disabledExtensions.includes(ext.name);
|
||||
const isActive = ext.isActive;
|
||||
const activeString = isActive ? 'active' : 'disabled';
|
||||
const activeColor = isActive ? 'green' : 'grey';
|
||||
|
||||
let stateColor = 'gray';
|
||||
const stateText = state || 'unknown state';
|
||||
@@ -44,6 +43,7 @@ export const ExtensionsList = () => {
|
||||
break;
|
||||
case ExtensionUpdateState.UP_TO_DATE:
|
||||
case ExtensionUpdateState.NOT_UPDATABLE:
|
||||
case ExtensionUpdateState.UPDATED:
|
||||
stateColor = 'green';
|
||||
break;
|
||||
default:
|
||||
@@ -52,12 +52,22 @@ export const ExtensionsList = () => {
|
||||
}
|
||||
|
||||
return (
|
||||
<Box key={ext.name}>
|
||||
<Box key={ext.name} flexDirection="column" marginBottom={1}>
|
||||
<Text>
|
||||
<Text color="cyan">{`${ext.name} (v${ext.version})`}</Text>
|
||||
{` - ${activeString}`}
|
||||
<Text color={activeColor}>{` - ${activeString}`}</Text>
|
||||
{<Text color={stateColor}>{` (${stateText})`}</Text>}
|
||||
</Text>
|
||||
{ext.resolvedSettings && ext.resolvedSettings.length > 0 && (
|
||||
<Box flexDirection="column" paddingLeft={2}>
|
||||
<Text>settings:</Text>
|
||||
{ext.resolvedSettings.map((setting) => (
|
||||
<Text key={setting.name}>
|
||||
- {setting.name}: {setting.value}
|
||||
</Text>
|
||||
))}
|
||||
</Box>
|
||||
)}
|
||||
</Box>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
import { createContext, useContext } from 'react';
|
||||
import { type Key } from '../hooks/useKeypress.js';
|
||||
import { type IdeIntegrationNudgeResult } from '../IdeIntegrationNudge.js';
|
||||
import { type CommandMigrationNudgeResult } from '../CommandFormatMigrationNudge.js';
|
||||
import { type FolderTrustChoice } from '../components/FolderTrustDialog.js';
|
||||
import {
|
||||
type AuthType,
|
||||
@@ -30,6 +31,7 @@ export interface UIActions {
|
||||
) => void;
|
||||
handleAuthSelect: (
|
||||
authType: AuthType | undefined,
|
||||
scope: SettingScope,
|
||||
credentials?: OpenAICredentials,
|
||||
) => Promise<void>;
|
||||
setAuthState: (state: AuthState) => void;
|
||||
@@ -46,14 +48,13 @@ export interface UIActions {
|
||||
setShellModeActive: (value: boolean) => void;
|
||||
vimHandleInput: (key: Key) => boolean;
|
||||
handleIdePromptComplete: (result: IdeIntegrationNudgeResult) => void;
|
||||
handleCommandMigrationComplete: (result: CommandMigrationNudgeResult) => void;
|
||||
handleFolderTrustSelect: (choice: FolderTrustChoice) => void;
|
||||
setConstrainHeight: (value: boolean) => void;
|
||||
onEscapePromptChange: (show: boolean) => void;
|
||||
refreshStatic: () => void;
|
||||
handleFinalSubmit: (value: string) => void;
|
||||
handleClearScreen: () => void;
|
||||
onWorkspaceMigrationDialogOpen: () => void;
|
||||
onWorkspaceMigrationDialogClose: () => void;
|
||||
// Vision switch dialog
|
||||
handleVisionSwitchSelect: (outcome: VisionSwitchOutcome) => void;
|
||||
// Welcome back dialog
|
||||
|
||||
@@ -72,6 +72,8 @@ export interface UIState {
|
||||
suggestionsWidth: number;
|
||||
isInputActive: boolean;
|
||||
shouldShowIdePrompt: boolean;
|
||||
shouldShowCommandMigrationNudge: boolean;
|
||||
commandMigrationTomlFiles: string[];
|
||||
isFolderTrustDialogOpen: boolean;
|
||||
isTrustedFolder: boolean | undefined;
|
||||
constrainHeight: boolean;
|
||||
@@ -87,9 +89,6 @@ export interface UIState {
|
||||
historyRemountKey: number;
|
||||
messageQueue: string[];
|
||||
showAutoAcceptIndicator: ApprovalMode;
|
||||
showWorkspaceMigrationDialog: boolean;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
workspaceExtensions: any[]; // Extension[]
|
||||
// Quota-related state
|
||||
currentModel: string;
|
||||
contextFileNames: string[];
|
||||
|
||||
51
packages/cli/src/ui/hooks/useCommandMigration.ts
Normal file
51
packages/cli/src/ui/hooks/useCommandMigration.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Storage } from '@qwen-code/qwen-code-core';
|
||||
import { detectTomlCommands } from '../../services/command-migration-tool.js';
|
||||
import type { LoadedSettings } from '../../config/settings.js';
|
||||
|
||||
/**
|
||||
* Hook to detect TOML command files and manage migration nudge visibility.
|
||||
* Checks all command directories: workspace, user, and global levels.
|
||||
*/
|
||||
export function useCommandMigration(
|
||||
settings: LoadedSettings,
|
||||
storage: Storage,
|
||||
) {
|
||||
const [showMigrationNudge, setShowMigrationNudge] = useState(false);
|
||||
const [tomlFiles, setTomlFiles] = useState<string[]>([]);
|
||||
|
||||
useEffect(() => {
|
||||
const checkTomlCommands = async () => {
|
||||
const allFiles: string[] = [];
|
||||
|
||||
// Check workspace commands directory (.qwen/commands)
|
||||
const workspaceCommandsDir = storage.getProjectCommandsDir();
|
||||
const workspaceFiles = await detectTomlCommands(workspaceCommandsDir);
|
||||
allFiles.push(...workspaceFiles.map((f) => `workspace: ${f}`));
|
||||
|
||||
// Check user commands directory (~/.qwen/commands)
|
||||
const userCommandsDir = Storage.getUserCommandsDir();
|
||||
const userFiles = await detectTomlCommands(userCommandsDir);
|
||||
allFiles.push(...userFiles.map((f) => `user: ${f}`));
|
||||
|
||||
if (allFiles.length > 0) {
|
||||
setTomlFiles(allFiles);
|
||||
setShowMigrationNudge(true);
|
||||
}
|
||||
};
|
||||
|
||||
checkTomlCommands();
|
||||
}, [storage]);
|
||||
|
||||
return {
|
||||
showMigrationNudge,
|
||||
tomlFiles,
|
||||
setShowMigrationNudge,
|
||||
};
|
||||
}
|
||||
@@ -25,6 +25,7 @@ export interface DialogCloseOptions {
|
||||
isAuthDialogOpen: boolean;
|
||||
handleAuthSelect: (
|
||||
authType: AuthType | undefined,
|
||||
scope: SettingScope,
|
||||
credentials?: OpenAICredentials,
|
||||
) => Promise<void>;
|
||||
pendingAuthType: AuthType | undefined;
|
||||
|
||||
@@ -4,26 +4,21 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { vi } from 'vitest';
|
||||
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
ExtensionStorage,
|
||||
annotateActiveExtensions,
|
||||
loadExtension,
|
||||
} from '../../config/extension.js';
|
||||
import { createExtension } from '../../test-utils/createExtension.js';
|
||||
|
||||
import { useExtensionUpdates } from './useExtensionUpdates.js';
|
||||
import { QWEN_DIR, type GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
QWEN_DIR,
|
||||
type ExtensionManager,
|
||||
type Extension,
|
||||
type ExtensionUpdateInfo,
|
||||
ExtensionUpdateState,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { MessageType } from '../types.js';
|
||||
import { ExtensionEnablementManager } from '../../config/extensions/extensionEnablement.js';
|
||||
import {
|
||||
checkForAllExtensionUpdates,
|
||||
updateExtension,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { ExtensionUpdateState } from '../state/extensions.js';
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const mockedOs = await importOriginal<typeof os>();
|
||||
@@ -33,63 +28,85 @@ vi.mock('os', async (importOriginal) => {
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../../config/extensions/update.js', () => ({
|
||||
checkForAllExtensionUpdates: vi.fn(),
|
||||
updateExtension: vi.fn(),
|
||||
}));
|
||||
function createMockExtension(overrides: Partial<Extension> = {}): Extension {
|
||||
return {
|
||||
id: 'test-extension-id',
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
path: '/some/path',
|
||||
isActive: true,
|
||||
config: {
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
},
|
||||
contextFiles: [],
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo',
|
||||
autoUpdate: false,
|
||||
},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createMockExtensionManager(
|
||||
extensions: Extension[],
|
||||
checkCallback?: (
|
||||
callback: (extensionName: string, state: ExtensionUpdateState) => void,
|
||||
) => Promise<void>,
|
||||
updateResult?: ExtensionUpdateInfo | undefined,
|
||||
): ExtensionManager {
|
||||
return {
|
||||
getLoadedExtensions: vi.fn(() => extensions),
|
||||
checkForAllExtensionUpdates: vi.fn(
|
||||
async (
|
||||
callback: (extensionName: string, state: ExtensionUpdateState) => void,
|
||||
) => {
|
||||
if (checkCallback) {
|
||||
await checkCallback(callback);
|
||||
}
|
||||
},
|
||||
),
|
||||
updateExtension: vi.fn(async () => updateResult),
|
||||
} as unknown as ExtensionManager;
|
||||
}
|
||||
|
||||
describe('useExtensionUpdates', () => {
|
||||
let tempHomeDir: string;
|
||||
let userExtensionsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempHomeDir = fs.mkdtempSync(
|
||||
path.join(os.tmpdir(), 'gemini-cli-test-home-'),
|
||||
);
|
||||
tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'qwen-cli-test-home-'));
|
||||
vi.mocked(os.homedir).mockReturnValue(tempHomeDir);
|
||||
userExtensionsDir = path.join(tempHomeDir, QWEN_DIR, 'extensions');
|
||||
fs.mkdirSync(userExtensionsDir, { recursive: true });
|
||||
vi.mocked(checkForAllExtensionUpdates).mockReset();
|
||||
vi.mocked(updateExtension).mockReset();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempHomeDir, { recursive: true, force: true });
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should check for updates and log a message if an update is available', async () => {
|
||||
const extensions = [
|
||||
{
|
||||
name: 'test-extension',
|
||||
const extension = createMockExtension({
|
||||
name: 'test-extension',
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
version: '1.0.0',
|
||||
path: '/some/path',
|
||||
isActive: true,
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo',
|
||||
autoUpdate: false,
|
||||
},
|
||||
source: 'https://some/repo',
|
||||
autoUpdate: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
const addItem = vi.fn();
|
||||
const cwd = '/test/cwd';
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
const extensionManager = createMockExtensionManager(
|
||||
[extension],
|
||||
async (callback) => {
|
||||
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
);
|
||||
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
|
||||
);
|
||||
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(addItem).toHaveBeenCalledWith(
|
||||
@@ -103,43 +120,32 @@ describe('useExtensionUpdates', () => {
|
||||
});
|
||||
|
||||
it('should check for updates and automatically update if autoUpdate is true', async () => {
|
||||
const extensionDir = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
const extension = createMockExtension({
|
||||
name: 'test-extension',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo',
|
||||
type: 'git',
|
||||
source: 'https://some.git/repo',
|
||||
autoUpdate: true,
|
||||
},
|
||||
});
|
||||
const extension = annotateActiveExtensions(
|
||||
[loadExtension({ extensionDir, workspaceDir: tempHomeDir })!],
|
||||
tempHomeDir,
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
)[0];
|
||||
|
||||
const addItem = vi.fn();
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
const extensionManager = createMockExtensionManager(
|
||||
[extension],
|
||||
async (callback) => {
|
||||
callback('test-extension', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
{
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: 'test-extension',
|
||||
},
|
||||
);
|
||||
|
||||
vi.mocked(updateExtension).mockResolvedValue({
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: '',
|
||||
});
|
||||
|
||||
renderHook(() => useExtensionUpdates([extension], addItem, tempHomeDir));
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
|
||||
);
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
@@ -156,77 +162,64 @@ describe('useExtensionUpdates', () => {
|
||||
});
|
||||
|
||||
it('should batch update notifications for multiple extensions', async () => {
|
||||
const extensionDir1 = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
const extension1 = createMockExtension({
|
||||
id: 'test-extension-1-id',
|
||||
name: 'test-extension-1',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo1',
|
||||
type: 'git',
|
||||
source: 'https://some.git/repo1',
|
||||
autoUpdate: true,
|
||||
},
|
||||
});
|
||||
const extensionDir2 = createExtension({
|
||||
extensionsDir: userExtensionsDir,
|
||||
const extension2 = createMockExtension({
|
||||
id: 'test-extension-2-id',
|
||||
name: 'test-extension-2',
|
||||
version: '2.0.0',
|
||||
installMetadata: {
|
||||
source: 'https://some.git/repo2',
|
||||
type: 'git',
|
||||
source: 'https://some.git/repo2',
|
||||
autoUpdate: true,
|
||||
},
|
||||
});
|
||||
|
||||
const extensions = annotateActiveExtensions(
|
||||
[
|
||||
loadExtension({
|
||||
extensionDir: extensionDir1,
|
||||
workspaceDir: tempHomeDir,
|
||||
})!,
|
||||
loadExtension({
|
||||
extensionDir: extensionDir2,
|
||||
workspaceDir: tempHomeDir,
|
||||
})!,
|
||||
],
|
||||
tempHomeDir,
|
||||
new ExtensionEnablementManager(ExtensionStorage.getUserExtensionsDir()),
|
||||
);
|
||||
|
||||
const addItem = vi.fn();
|
||||
let updateCallCount = 0;
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
const extensionManager = {
|
||||
getLoadedExtensions: vi.fn(() => [extension1, extension2]),
|
||||
checkForAllExtensionUpdates: vi.fn(
|
||||
async (
|
||||
callback: (
|
||||
extensionName: string,
|
||||
state: ExtensionUpdateState,
|
||||
) => void,
|
||||
) => {
|
||||
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
),
|
||||
updateExtension: vi.fn(async () => {
|
||||
updateCallCount++;
|
||||
if (updateCallCount === 1) {
|
||||
return {
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: 'test-extension-1',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension-2',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
originalVersion: '2.0.0',
|
||||
updatedVersion: '2.1.0',
|
||||
name: 'test-extension-2',
|
||||
};
|
||||
}),
|
||||
} as unknown as ExtensionManager;
|
||||
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensionManager, addItem, tempHomeDir),
|
||||
);
|
||||
|
||||
vi.mocked(updateExtension)
|
||||
.mockResolvedValueOnce({
|
||||
originalVersion: '1.0.0',
|
||||
updatedVersion: '1.1.0',
|
||||
name: '',
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
originalVersion: '2.0.0',
|
||||
updatedVersion: '2.1.0',
|
||||
name: '',
|
||||
});
|
||||
|
||||
renderHook(() => useExtensionUpdates(extensions, addItem, tempHomeDir));
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(addItem).toHaveBeenCalledTimes(2);
|
||||
@@ -250,60 +243,40 @@ describe('useExtensionUpdates', () => {
|
||||
});
|
||||
|
||||
it('should batch update notifications for multiple extensions with autoUpdate: false', async () => {
|
||||
const extensions = [
|
||||
{
|
||||
name: 'test-extension-1',
|
||||
const extension1 = createMockExtension({
|
||||
id: 'test-extension-1-id',
|
||||
name: 'test-extension-1',
|
||||
version: '1.0.0',
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
version: '1.0.0',
|
||||
path: '/some/path1',
|
||||
isActive: true,
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo1',
|
||||
autoUpdate: false,
|
||||
},
|
||||
source: 'https://some/repo1',
|
||||
autoUpdate: false,
|
||||
},
|
||||
{
|
||||
name: 'test-extension-2',
|
||||
});
|
||||
const extension2 = createMockExtension({
|
||||
id: 'test-extension-2-id',
|
||||
name: 'test-extension-2',
|
||||
version: '2.0.0',
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
version: '2.0.0',
|
||||
path: '/some/path2',
|
||||
isActive: true,
|
||||
installMetadata: {
|
||||
type: 'git',
|
||||
source: 'https://some/repo2',
|
||||
autoUpdate: false,
|
||||
},
|
||||
source: 'https://some/repo2',
|
||||
autoUpdate: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
const addItem = vi.fn();
|
||||
const cwd = '/test/cwd';
|
||||
|
||||
vi.mocked(checkForAllExtensionUpdates).mockImplementation(
|
||||
async (extensions, dispatch) => {
|
||||
dispatch({ type: 'BATCH_CHECK_START' });
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension-1',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
const extensionManager = createMockExtensionManager(
|
||||
[extension1, extension2],
|
||||
async (callback) => {
|
||||
callback('test-extension-1', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
await new Promise((r) => setTimeout(r, 50));
|
||||
dispatch({
|
||||
type: 'SET_STATE',
|
||||
payload: {
|
||||
name: 'test-extension-2',
|
||||
state: ExtensionUpdateState.UPDATE_AVAILABLE,
|
||||
},
|
||||
});
|
||||
dispatch({ type: 'BATCH_CHECK_END' });
|
||||
callback('test-extension-2', ExtensionUpdateState.UPDATE_AVAILABLE);
|
||||
},
|
||||
);
|
||||
|
||||
renderHook(() =>
|
||||
useExtensionUpdates(extensions as GeminiCLIExtension[], addItem, cwd),
|
||||
);
|
||||
renderHook(() => useExtensionUpdates(extensionManager, addItem, cwd));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(addItem).toHaveBeenCalledTimes(1);
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import type { GeminiCLIExtension } from '@qwen-code/qwen-code-core';
|
||||
import type { ExtensionManager } from '@qwen-code/qwen-code-core';
|
||||
import { getErrorMessage } from '../../utils/errors.js';
|
||||
import {
|
||||
ExtensionUpdateState,
|
||||
@@ -14,11 +14,6 @@ import {
|
||||
import { useCallback, useEffect, useMemo, useReducer } from 'react';
|
||||
import type { UseHistoryManagerReturn } from './useHistoryManager.js';
|
||||
import { MessageType, type ConfirmationRequest } from '../types.js';
|
||||
import {
|
||||
checkForAllExtensionUpdates,
|
||||
updateExtension,
|
||||
} from '../../config/extensions/update.js';
|
||||
import { requestConsentInteractive } from '../../config/extension.js';
|
||||
import { checkExhaustive } from '../../utils/checks.js';
|
||||
|
||||
type ConfirmationRequestWrapper = {
|
||||
@@ -45,15 +40,7 @@ function confirmationRequestsReducer(
|
||||
}
|
||||
}
|
||||
|
||||
export const useExtensionUpdates = (
|
||||
extensions: GeminiCLIExtension[],
|
||||
addItem: UseHistoryManagerReturn['addItem'],
|
||||
cwd: string,
|
||||
) => {
|
||||
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
|
||||
extensionUpdatesReducer,
|
||||
initialExtensionUpdatesState,
|
||||
);
|
||||
export const useConfirmUpdateRequests = () => {
|
||||
const [
|
||||
confirmUpdateExtensionRequests,
|
||||
dispatchConfirmUpdateExtensionRequests,
|
||||
@@ -78,15 +65,52 @@ export const useExtensionUpdates = (
|
||||
},
|
||||
[dispatchConfirmUpdateExtensionRequests],
|
||||
);
|
||||
return {
|
||||
addConfirmUpdateExtensionRequest,
|
||||
confirmUpdateExtensionRequests,
|
||||
dispatchConfirmUpdateExtensionRequests,
|
||||
};
|
||||
};
|
||||
|
||||
export const useExtensionUpdates = (
|
||||
extensionManager: ExtensionManager,
|
||||
addItem: UseHistoryManagerReturn['addItem'],
|
||||
cwd: string,
|
||||
) => {
|
||||
const [extensionsUpdateState, dispatchExtensionStateUpdate] = useReducer(
|
||||
extensionUpdatesReducer,
|
||||
initialExtensionUpdatesState,
|
||||
);
|
||||
const extensions = extensionManager.getLoadedExtensions();
|
||||
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
await checkForAllExtensionUpdates(
|
||||
extensions,
|
||||
dispatchExtensionStateUpdate,
|
||||
const extensionsToCheck = extensions.filter((extension) => {
|
||||
const currentStatus = extensionsUpdateState.extensionStatuses.get(
|
||||
extension.name,
|
||||
);
|
||||
if (!currentStatus) return true;
|
||||
const currentState = currentStatus.status;
|
||||
return !currentState || currentState === ExtensionUpdateState.UNKNOWN;
|
||||
});
|
||||
if (extensionsToCheck.length === 0) return;
|
||||
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_START' });
|
||||
await extensionManager.checkForAllExtensionUpdates(
|
||||
(extensionName: string, state: ExtensionUpdateState) => {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
});
|
||||
},
|
||||
);
|
||||
dispatchExtensionStateUpdate({ type: 'BATCH_CHECK_END' });
|
||||
})();
|
||||
}, [extensions, extensions.length, dispatchExtensionStateUpdate]);
|
||||
}, [
|
||||
extensions,
|
||||
extensionManager,
|
||||
extensionsUpdateState.extensionStatuses,
|
||||
dispatchExtensionStateUpdate,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (extensionsUpdateState.batchChecksInProgress > 0) {
|
||||
@@ -113,17 +137,17 @@ export const useExtensionUpdates = (
|
||||
});
|
||||
|
||||
if (extension.installMetadata?.autoUpdate) {
|
||||
updateExtension(
|
||||
extension,
|
||||
cwd,
|
||||
(description) =>
|
||||
requestConsentInteractive(
|
||||
description,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
),
|
||||
currentState.status,
|
||||
dispatchExtensionStateUpdate,
|
||||
)
|
||||
extensionManager
|
||||
.updateExtension(
|
||||
extension,
|
||||
currentState.status,
|
||||
(extensionName, state) => {
|
||||
dispatchExtensionStateUpdate({
|
||||
type: 'SET_STATE',
|
||||
payload: { name: extensionName, state },
|
||||
});
|
||||
},
|
||||
)
|
||||
.then((result) => {
|
||||
if (!result) return;
|
||||
addItem(
|
||||
@@ -157,13 +181,7 @@ export const useExtensionUpdates = (
|
||||
Date.now(),
|
||||
);
|
||||
}
|
||||
}, [
|
||||
extensions,
|
||||
extensionsUpdateState,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
addItem,
|
||||
cwd,
|
||||
]);
|
||||
}, [extensions, extensionManager, extensionsUpdateState, addItem, cwd]);
|
||||
|
||||
const extensionsUpdateStateComputed = useMemo(() => {
|
||||
const result = new Map<string, ExtensionUpdateState>();
|
||||
@@ -180,7 +198,5 @@ export const useExtensionUpdates = (
|
||||
extensionsUpdateState: extensionsUpdateStateComputed,
|
||||
extensionsUpdateStateInternal: extensionsUpdateState.extensionStatuses,
|
||||
dispatchExtensionStateUpdate,
|
||||
confirmUpdateExtensionRequests,
|
||||
addConfirmUpdateExtensionRequest,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -912,7 +912,7 @@ export const useGeminiStream = (
|
||||
// Reset quota error flag when starting a new query (not a continuation)
|
||||
if (!options?.isContinuation) {
|
||||
setModelSwitchedFromQuotaError(false);
|
||||
// No quota-error / fallback routing mechanism currently; keep state minimal.
|
||||
config.setQuotaErrorOccurred(false);
|
||||
}
|
||||
|
||||
abortControllerRef.current = new AbortController();
|
||||
|
||||
@@ -1,58 +1,21 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Qwen
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { useCallback } from 'react';
|
||||
import { useStdin } from 'ink';
|
||||
import type { EditorType } from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
editorCommands,
|
||||
commandExists as coreCommandExists,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { spawnSync } from 'child_process';
|
||||
import { useSettings } from '../contexts/SettingsContext.js';
|
||||
|
||||
/**
|
||||
* Cache for command existence checks to avoid repeated execSync calls.
|
||||
*/
|
||||
const commandExistsCache = new Map<string, boolean>();
|
||||
|
||||
/**
|
||||
* Check if a command exists in the system with caching.
|
||||
* Results are cached to improve performance in test environments.
|
||||
*/
|
||||
function commandExists(cmd: string): boolean {
|
||||
if (commandExistsCache.has(cmd)) {
|
||||
return commandExistsCache.get(cmd)!;
|
||||
}
|
||||
|
||||
const exists = coreCommandExists(cmd);
|
||||
commandExistsCache.set(cmd, exists);
|
||||
return exists;
|
||||
}
|
||||
/**
|
||||
* Get the actual executable command for an editor type.
|
||||
*/
|
||||
function getExecutableCommand(editorType: EditorType): string {
|
||||
const commandConfig = editorCommands[editorType];
|
||||
const commands =
|
||||
process.platform === 'win32' ? commandConfig.win32 : commandConfig.default;
|
||||
|
||||
const availableCommand = commands.find((cmd) => commandExists(cmd));
|
||||
|
||||
if (!availableCommand) {
|
||||
throw new Error(
|
||||
`No available editor command found for ${editorType}. ` +
|
||||
`Tried: ${commands.join(', ')}. ` +
|
||||
`Please install one of these editors or set a different preferredEditor in settings.`,
|
||||
);
|
||||
}
|
||||
|
||||
return availableCommand;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the editor command to use based on user preferences and platform.
|
||||
*/
|
||||
function getEditorCommand(preferredEditor?: EditorType): string {
|
||||
if (preferredEditor) {
|
||||
return getExecutableCommand(preferredEditor);
|
||||
return preferredEditor;
|
||||
}
|
||||
|
||||
// Platform-specific defaults with UI preference for macOS
|
||||
@@ -100,14 +63,8 @@ export function useLaunchEditor() {
|
||||
try {
|
||||
setRawMode?.(false);
|
||||
|
||||
// On Windows, .cmd and .bat files need shell: true
|
||||
const needsShell =
|
||||
process.platform === 'win32' &&
|
||||
(editorCommand.endsWith('.cmd') || editorCommand.endsWith('.bat'));
|
||||
|
||||
const { status, error } = spawnSync(editorCommand, editorArgs, {
|
||||
stdio: 'inherit',
|
||||
shell: needsShell,
|
||||
});
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
0
packages/cli/src/ui/hooks/useTomlMigration.ts
Normal file
0
packages/cli/src/ui/hooks/useTomlMigration.ts
Normal file
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user