Compare commits

..

6 Commits

Author SHA1 Message Date
github-actions[bot]
45c0330ac5 chore(release): v0.0.13-nightly.2 2025-09-23 14:38:01 +00:00
mingholy.lmh
e38947a62d fix: failed test case 2025-09-23 22:18:10 +08:00
mingholy.lmh
e66bb7e717 docs: add vision model instructions 2025-09-23 21:57:59 +08:00
mingholy.lmh
490c36caeb fix: use dedicated model names and settings 2025-09-23 21:57:49 +08:00
mingholy.lmh
e4d16adf7b feat: add cli args & env variables for switch behavoir 2025-09-23 19:14:26 +08:00
Mingholy
85a2b8d6e0 feat: add yolo mode support to auto vision model switch 2025-09-23 14:56:20 +08:00
21 changed files with 80 additions and 175 deletions

12
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "@qwen-code/qwen-code",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@qwen-code/qwen-code",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"workspaces": [
"packages/*"
],
@@ -13454,7 +13454,7 @@
},
"packages/cli": {
"name": "@qwen-code/qwen-code",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"dependencies": {
"@google/genai": "1.9.0",
"@iarna/toml": "^2.2.5",
@@ -13662,7 +13662,7 @@
},
"packages/core": {
"name": "@qwen-code/qwen-code-core",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"dependencies": {
"@google/genai": "1.13.0",
"@lvce-editor/ripgrep": "^1.6.0",
@@ -13788,7 +13788,7 @@
},
"packages/test-utils": {
"name": "@qwen-code/qwen-code-test-utils",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"dev": true,
"license": "Apache-2.0",
"devDependencies": {
@@ -13800,7 +13800,7 @@
},
"packages/vscode-ide-companion": {
"name": "qwen-code-vscode-ide-companion",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"license": "LICENSE",
"dependencies": {
"@modelcontextprotocol/sdk": "^1.15.1",

View File

@@ -1,6 +1,6 @@
{
"name": "@qwen-code/qwen-code",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"engines": {
"node": ">=20.0.0"
},
@@ -13,7 +13,7 @@
"url": "git+https://github.com/QwenLM/qwen-code.git"
},
"config": {
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.0.13"
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.0.13-nightly.2"
},
"scripts": {
"start": "node scripts/start.js",

View File

@@ -1,6 +1,6 @@
{
"name": "@qwen-code/qwen-code",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"description": "Qwen Code",
"repository": {
"type": "git",
@@ -25,7 +25,7 @@
"dist"
],
"config": {
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.0.13"
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.0.13-nightly.2"
},
"dependencies": {
"@google/genai": "1.9.0",

View File

@@ -566,9 +566,7 @@ const App = ({ config, settings, startupWarnings = [], version }: AppProps) => {
}
// Switch model for future use but return false to stop current retry
config.setModel(fallbackModel).catch((error) => {
console.error('Failed to switch to fallback model:', error);
});
config.setModel(fallbackModel);
config.setFallbackMode(true);
logFlashFallback(
config,
@@ -652,28 +650,17 @@ const App = ({ config, settings, startupWarnings = [], version }: AppProps) => {
}, []);
const handleModelSelect = useCallback(
async (modelId: string) => {
try {
await config.setModel(modelId);
setCurrentModel(modelId);
setIsModelSelectionDialogOpen(false);
addItem(
{
type: MessageType.INFO,
text: `Switched model to \`${modelId}\` for this session.`,
},
Date.now(),
);
} catch (error) {
console.error('Failed to switch model:', error);
addItem(
{
type: MessageType.ERROR,
text: `Failed to switch to model \`${modelId}\`. Please try again.`,
},
Date.now(),
);
}
(modelId: string) => {
config.setModel(modelId);
setCurrentModel(modelId);
setIsModelSelectionDialogOpen(false);
addItem(
{
type: MessageType.INFO,
text: `Switched model to \`${modelId}\` for this session.`,
},
Date.now(),
);
},
[config, setCurrentModel, addItem],
);

View File

@@ -60,9 +60,7 @@ const mockParseAndFormatApiError = vi.hoisted(() => vi.fn());
const mockHandleVisionSwitch = vi.hoisted(() =>
vi.fn().mockResolvedValue({ shouldProceed: true }),
);
const mockRestoreOriginalModel = vi.hoisted(() =>
vi.fn().mockResolvedValue(undefined),
);
const mockRestoreOriginalModel = vi.hoisted(() => vi.fn());
vi.mock('@qwen-code/qwen-code-core', async (importOriginal) => {
const actualCoreModule = (await importOriginal()) as any;
@@ -303,8 +301,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
);
},
{
@@ -466,8 +462,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -547,8 +541,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -657,8 +649,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -768,8 +758,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -899,8 +887,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
cancelSubmitSpy,
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1212,8 +1198,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1267,8 +1251,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1319,8 +1301,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1369,8 +1349,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1420,8 +1398,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1511,8 +1487,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1563,8 +1537,6 @@ describe('useGeminiStream', () => {
vi.fn(), // setModelSwitched
vi.fn(), // onEditorClose
vi.fn(), // onCancelSubmit
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1630,8 +1602,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1710,8 +1680,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1766,8 +1734,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -1977,8 +1943,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -2011,8 +1975,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -2066,8 +2028,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);
@@ -2105,8 +2065,6 @@ describe('useGeminiStream', () => {
() => {},
() => {},
() => {},
false, // visionModelPreviewEnabled
undefined, // onVisionSwitchRequired (optional)
),
);

View File

@@ -765,9 +765,7 @@ export const useGeminiStream = (
if (processingStatus === StreamProcessingStatus.UserCancelled) {
// Restore original model if it was temporarily overridden
restoreOriginalModel().catch((error) => {
console.error('Failed to restore original model:', error);
});
restoreOriginalModel();
isSubmittingQueryRef.current = false;
return;
}
@@ -782,14 +780,10 @@ export const useGeminiStream = (
}
// Restore original model if it was temporarily overridden
restoreOriginalModel().catch((error) => {
console.error('Failed to restore original model:', error);
});
restoreOriginalModel();
} catch (error: unknown) {
// Restore original model if it was temporarily overridden
restoreOriginalModel().catch((error) => {
console.error('Failed to restore original model:', error);
});
restoreOriginalModel();
if (error instanceof UnauthorizedError) {
onAuthError();

View File

@@ -210,7 +210,7 @@ describe('useVisionAutoSwitch hook', () => {
let currentModel = initialModel;
const mockConfig: Partial<Config> = {
getModel: vi.fn(() => currentModel),
setModel: vi.fn(async (m: string) => {
setModel: vi.fn((m: string) => {
currentModel = m;
}),
getApprovalMode: vi.fn(() => approvalMode),
@@ -335,8 +335,8 @@ describe('useVisionAutoSwitch hook', () => {
});
// Now restore
await act(async () => {
await result.current.restoreOriginalModel();
act(() => {
result.current.restoreOriginalModel();
});
expect(config.setModel).toHaveBeenLastCalledWith(initialModel, {
reason: 'vision_auto_switch',
@@ -369,8 +369,8 @@ describe('useVisionAutoSwitch hook', () => {
});
// Restore should be a no-op since no one-time override was used
await act(async () => {
await result.current.restoreOriginalModel();
act(() => {
result.current.restoreOriginalModel();
});
// Last call should still be the persisted model set
expect((config.setModel as any).mock.calls.pop()?.[0]).toBe('coder-model');
@@ -565,8 +565,8 @@ describe('useVisionAutoSwitch hook', () => {
});
// Now restore the original model
await act(async () => {
await result.current.restoreOriginalModel();
act(() => {
result.current.restoreOriginalModel();
});
// Verify model was restored

View File

@@ -256,7 +256,7 @@ export function useVisionAutoSwitch(
if (config.getApprovalMode() === ApprovalMode.YOLO) {
const vlModelId = getDefaultVisionModel();
originalModelRef.current = config.getModel();
await config.setModel(vlModelId, {
config.setModel(vlModelId, {
reason: 'vision_auto_switch',
context: 'YOLO mode auto-switch for image content',
});
@@ -292,7 +292,7 @@ export function useVisionAutoSwitch(
if (visionSwitchResult.modelOverride) {
// One-time model override
originalModelRef.current = config.getModel();
await config.setModel(visionSwitchResult.modelOverride, {
config.setModel(visionSwitchResult.modelOverride, {
reason: 'vision_auto_switch',
context: `Default VLM switch mode: ${defaultVlmSwitchMode} (one-time override)`,
});
@@ -302,7 +302,7 @@ export function useVisionAutoSwitch(
};
} else if (visionSwitchResult.persistSessionModel) {
// Persistent session model change
await config.setModel(visionSwitchResult.persistSessionModel, {
config.setModel(visionSwitchResult.persistSessionModel, {
reason: 'vision_auto_switch',
context: `Default VLM switch mode: ${defaultVlmSwitchMode} (session persistent)`,
});
@@ -319,7 +319,7 @@ export function useVisionAutoSwitch(
if (visionSwitchResult.modelOverride) {
// One-time model override
originalModelRef.current = config.getModel();
await config.setModel(visionSwitchResult.modelOverride, {
config.setModel(visionSwitchResult.modelOverride, {
reason: 'vision_auto_switch',
context: 'User-prompted vision switch (one-time override)',
});
@@ -329,7 +329,7 @@ export function useVisionAutoSwitch(
};
} else if (visionSwitchResult.persistSessionModel) {
// Persistent session model change
await config.setModel(visionSwitchResult.persistSessionModel, {
config.setModel(visionSwitchResult.persistSessionModel, {
reason: 'vision_auto_switch',
context: 'User-prompted vision switch (session persistent)',
});
@@ -346,9 +346,9 @@ export function useVisionAutoSwitch(
[config, addItem, visionModelPreviewEnabled, onVisionSwitchRequired],
);
const restoreOriginalModel = useCallback(async () => {
const restoreOriginalModel = useCallback(() => {
if (originalModelRef.current) {
await config.setModel(originalModelRef.current, {
config.setModel(originalModelRef.current, {
reason: 'vision_auto_switch',
context: 'Restoring original model after vision switch',
});

View File

@@ -9,6 +9,7 @@ import { describe, it, expect, vi, beforeEach } from 'vitest';
import { MarkdownDisplay } from './MarkdownDisplay.js';
import { LoadedSettings } from '../../config/settings.js';
import { SettingsContext } from '../contexts/SettingsContext.js';
import { EOL } from 'node:os';
describe('<MarkdownDisplay />', () => {
const baseProps = {
@@ -56,7 +57,7 @@ describe('<MarkdownDisplay />', () => {
## Header 2
### Header 3
#### Header 4
`;
`.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -66,7 +67,10 @@ describe('<MarkdownDisplay />', () => {
});
it('renders a fenced code block with a language', () => {
const text = '```javascript\nconst x = 1;\nconsole.log(x);\n```';
const text = '```javascript\nconst x = 1;\nconsole.log(x);\n```'.replace(
/\n/g,
EOL,
);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -76,7 +80,7 @@ describe('<MarkdownDisplay />', () => {
});
it('renders a fenced code block without a language', () => {
const text = '```\nplain text\n```';
const text = '```\nplain text\n```'.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -86,7 +90,7 @@ describe('<MarkdownDisplay />', () => {
});
it('handles unclosed (pending) code blocks', () => {
const text = '```typescript\nlet y = 2;';
const text = '```typescript\nlet y = 2;'.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} isPending={true} />
@@ -100,7 +104,7 @@ describe('<MarkdownDisplay />', () => {
- item A
* item B
+ item C
`;
`.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -114,7 +118,7 @@ describe('<MarkdownDisplay />', () => {
* Level 1
* Level 2
* Level 3
`;
`.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -127,7 +131,7 @@ describe('<MarkdownDisplay />', () => {
const text = `
1. First item
2. Second item
`;
`.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -143,7 +147,7 @@ Hello
World
***
Test
`;
`.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -158,7 +162,7 @@ Test
|----------|:--------:|
| Cell 1 | Cell 2 |
| Cell 3 | Cell 4 |
`;
`.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -172,7 +176,7 @@ Test
Some text before.
| A | B |
|---|
| 1 | 2 |`;
| 1 | 2 |`.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -184,7 +188,7 @@ Some text before.
it('inserts a single space between paragraphs', () => {
const text = `Paragraph 1.
Paragraph 2.`;
Paragraph 2.`.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -207,7 +211,7 @@ some code
\`\`\`
Another paragraph.
`;
`.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -217,7 +221,7 @@ Another paragraph.
});
it('hides line numbers in code blocks when showLineNumbers is false', () => {
const text = '```javascript\nconst x = 1;\n```';
const text = '```javascript\nconst x = 1;\n```'.replace(/\n/g, EOL);
const settings = new LoadedSettings(
{ path: '', settings: {} },
{ path: '', settings: {} },
@@ -238,7 +242,7 @@ Another paragraph.
});
it('shows line numbers in code blocks by default', () => {
const text = '```javascript\nconst x = 1;\n```';
const text = '```javascript\nconst x = 1;\n```'.replace(/\n/g, EOL);
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={text} />
@@ -247,21 +251,4 @@ Another paragraph.
expect(lastFrame()).toMatchSnapshot();
expect(lastFrame()).toContain(' 1 ');
});
it('correctly splits lines using \\n regardless of platform EOL', () => {
// Test that the component uses \n for splitting, not EOL
const textWithUnixLineEndings = 'Line 1\nLine 2\nLine 3';
const { lastFrame } = render(
<SettingsContext.Provider value={mockSettings}>
<MarkdownDisplay {...baseProps} text={textWithUnixLineEndings} />
</SettingsContext.Provider>,
);
const output = lastFrame();
expect(output).toContain('Line 1');
expect(output).toContain('Line 2');
expect(output).toContain('Line 3');
expect(output).toMatchSnapshot();
});
});

View File

@@ -6,6 +6,7 @@
import React from 'react';
import { Text, Box } from 'ink';
import { EOL } from 'node:os';
import { Colors } from '../colors.js';
import { colorizeCode } from './CodeColorizer.js';
import { TableRenderer } from './TableRenderer.js';
@@ -34,7 +35,7 @@ const MarkdownDisplayInternal: React.FC<MarkdownDisplayProps> = ({
}) => {
if (!text) return <></>;
const lines = text.split(`\n`);
const lines = text.split(EOL);
const headerRegex = /^ *(#{1,4}) +(.*)/;
const codeFenceRegex = /^ *(`{3,}|~{3,}) *(\w*?) *$/;
const ulItemRegex = /^([ \t]*)([-*+]) +(.*)/;

View File

@@ -14,12 +14,6 @@ Another paragraph.
"
`;
exports[`<MarkdownDisplay /> > correctly splits lines using \\n regardless of platform EOL 1`] = `
"Line 1
Line 2
Line 3"
`;
exports[`<MarkdownDisplay /> > handles a table at the end of the input 1`] = `
"Some text before.
| A | B |

View File

@@ -1,6 +1,6 @@
{
"name": "@qwen-code/qwen-code-core",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"description": "Qwen Code Core",
"repository": {
"type": "git",

View File

@@ -755,7 +755,7 @@ describe('setApprovalMode with folder trust', () => {
const logModelSwitchSpy = vi.spyOn(config['logger']!, 'logModelSwitch');
// Change the model
await config.setModel('qwen-vl-max-latest', {
config.setModel('qwen-vl-max-latest', {
reason: 'vision_auto_switch',
context: 'Test model switch',
});
@@ -785,7 +785,7 @@ describe('setApprovalMode with folder trust', () => {
const logModelSwitchSpy = vi.spyOn(config['logger']!, 'logModelSwitch');
// Set the same model
await config.setModel('qwen3-coder-plus');
config.setModel('qwen3-coder-plus');
// Verify that logModelSwitch was not called
expect(logModelSwitchSpy).not.toHaveBeenCalled();
@@ -807,7 +807,7 @@ describe('setApprovalMode with folder trust', () => {
const logModelSwitchSpy = vi.spyOn(config['logger']!, 'logModelSwitch');
// Change the model without options
await config.setModel('qwen-vl-max-latest');
config.setModel('qwen-vl-max-latest');
// Verify that logModelSwitch was called with default reason
expect(logModelSwitchSpy).toHaveBeenCalledWith({

View File

@@ -528,13 +528,13 @@ export class Config {
return this.contentGeneratorConfig?.model || this.model;
}
async setModel(
setModel(
newModel: string,
options?: {
reason?: ModelSwitchEvent['reason'];
context?: string;
},
): Promise<void> {
): void {
const oldModel = this.getModel();
if (this.contentGeneratorConfig) {
@@ -559,16 +559,13 @@ export class Config {
// Reinitialize chat with updated configuration while preserving history
const geminiClient = this.getGeminiClient();
if (geminiClient && geminiClient.isInitialized()) {
// Now await the reinitialize operation to ensure completion
try {
await geminiClient.reinitialize();
} catch (error) {
// Use async operation but don't await to avoid blocking
geminiClient.reinitialize().catch((error) => {
console.error(
'Failed to reinitialize chat with updated config:',
error,
);
throw error; // Re-throw to let callers handle the error
}
});
}
}

View File

@@ -41,7 +41,7 @@ describe('Flash Model Fallback Configuration', () => {
// with the fallback mechanism. This will be necessary we introduce more
// intelligent model routing.
describe('setModel', () => {
it('should only mark as switched if contentGeneratorConfig exists', async () => {
it('should only mark as switched if contentGeneratorConfig exists', () => {
// Create config without initializing contentGeneratorConfig
const newConfig = new Config({
sessionId: 'test-session-2',
@@ -52,15 +52,15 @@ describe('Flash Model Fallback Configuration', () => {
});
// Should not crash when contentGeneratorConfig is undefined
await newConfig.setModel(DEFAULT_GEMINI_FLASH_MODEL);
newConfig.setModel(DEFAULT_GEMINI_FLASH_MODEL);
expect(newConfig.isInFallbackMode()).toBe(false);
});
});
describe('getModel', () => {
it('should return contentGeneratorConfig model if available', async () => {
it('should return contentGeneratorConfig model if available', () => {
// Simulate initialized content generator config
await config.setModel(DEFAULT_GEMINI_FLASH_MODEL);
config.setModel(DEFAULT_GEMINI_FLASH_MODEL);
expect(config.getModel()).toBe(DEFAULT_GEMINI_FLASH_MODEL);
});
@@ -88,8 +88,8 @@ describe('Flash Model Fallback Configuration', () => {
expect(config.isInFallbackMode()).toBe(false);
});
it('should persist switched state throughout session', async () => {
await config.setModel(DEFAULT_GEMINI_FLASH_MODEL);
it('should persist switched state throughout session', () => {
config.setModel(DEFAULT_GEMINI_FLASH_MODEL);
// Setting state for fallback mode as is expected of clients
config.setFallbackMode(true);
expect(config.isInFallbackMode()).toBe(true);

View File

@@ -1053,7 +1053,7 @@ export class GeminiClient {
error,
);
if (accepted !== false && accepted !== null) {
await this.config.setModel(fallbackModel);
this.config.setModel(fallbackModel);
this.config.setFallbackMode(true);
return fallbackModel;
}

View File

@@ -224,7 +224,7 @@ export class GeminiChat {
error,
);
if (accepted !== false && accepted !== null) {
await this.config.setModel(fallbackModel);
this.config.setModel(fallbackModel);
this.config.setFallbackMode(true);
return fallbackModel;
}

View File

@@ -72,19 +72,6 @@ async function createMockConfig(
} as unknown as ToolRegistry;
vi.spyOn(config, 'getToolRegistry').mockReturnValue(mockToolRegistry);
// Mock getContentGeneratorConfig to return a valid config
vi.spyOn(config, 'getContentGeneratorConfig').mockReturnValue({
model: DEFAULT_GEMINI_MODEL,
authType: AuthType.USE_GEMINI,
});
// Mock setModel method
vi.spyOn(config, 'setModel').mockResolvedValue();
// Mock getSessionId method
vi.spyOn(config, 'getSessionId').mockReturnValue('test-session');
return { config, toolRegistry: mockToolRegistry };
}

View File

@@ -826,7 +826,7 @@ export class SubAgentScope {
);
if (this.modelConfig.model) {
await this.runtimeContext.setModel(this.modelConfig.model);
this.runtimeContext.setModel(this.modelConfig.model);
}
return new GeminiChat(

View File

@@ -1,6 +1,6 @@
{
"name": "@qwen-code/qwen-code-test-utils",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"private": true,
"main": "src/index.ts",
"license": "Apache-2.0",

View File

@@ -2,7 +2,7 @@
"name": "qwen-code-vscode-ide-companion",
"displayName": "Qwen Code Companion",
"description": "Enable Qwen Code with direct access to your VS Code workspace.",
"version": "0.0.13",
"version": "0.0.13-nightly.2",
"publisher": "qwenlm",
"icon": "assets/icon.png",
"repository": {