mirror of
https://github.com/QwenLM/qwen-code.git
synced 2026-01-17 06:19:13 +00:00
Compare commits
8 Commits
mingholy/f
...
feat/add-u
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
45236b6ec5 | ||
|
|
9e8724a749 | ||
|
|
d91e372c72 | ||
|
|
9325721811 | ||
|
|
56391b11ad | ||
|
|
e748532e6d | ||
|
|
d095a8b3f1 | ||
|
|
f7585153b7 |
@@ -434,6 +434,16 @@ const SETTINGS_SCHEMA = {
|
||||
'Show welcome back dialog when returning to a project with conversation history.',
|
||||
showInDialog: true,
|
||||
},
|
||||
enableUserFeedback: {
|
||||
type: 'boolean',
|
||||
label: 'Enable User Feedback',
|
||||
category: 'UI',
|
||||
requiresRestart: false,
|
||||
default: true,
|
||||
description:
|
||||
'Show optional feedback dialog after conversations to help improve Qwen performance.',
|
||||
showInDialog: true,
|
||||
},
|
||||
accessibility: {
|
||||
type: 'object',
|
||||
label: 'Accessibility',
|
||||
|
||||
@@ -289,6 +289,12 @@ export default {
|
||||
'Show Citations': 'Quellenangaben anzeigen',
|
||||
'Custom Witty Phrases': 'Benutzerdefinierte Witzige Sprüche',
|
||||
'Enable Welcome Back': 'Willkommen-zurück aktivieren',
|
||||
'Enable User Feedback': 'Benutzerfeedback aktivieren',
|
||||
'How is Qwen doing this session? (optional)':
|
||||
'Wie macht sich Qwen in dieser Sitzung? (optional)',
|
||||
Bad: 'Schlecht',
|
||||
Good: 'Gut',
|
||||
'Not Sure Yet': 'Noch nicht sicher',
|
||||
'Disable Loading Phrases': 'Ladesprüche deaktivieren',
|
||||
'Screen Reader Mode': 'Bildschirmleser-Modus',
|
||||
'IDE Mode': 'IDE-Modus',
|
||||
|
||||
@@ -286,6 +286,12 @@ export default {
|
||||
'Show Citations': 'Show Citations',
|
||||
'Custom Witty Phrases': 'Custom Witty Phrases',
|
||||
'Enable Welcome Back': 'Enable Welcome Back',
|
||||
'Enable User Feedback': 'Enable User Feedback',
|
||||
'How is Qwen doing this session? (optional)':
|
||||
'How is Qwen doing this session? (optional)',
|
||||
Bad: 'Bad',
|
||||
Good: 'Good',
|
||||
'Not Sure Yet': 'Not Sure Yet',
|
||||
'Disable Loading Phrases': 'Disable Loading Phrases',
|
||||
'Screen Reader Mode': 'Screen Reader Mode',
|
||||
'IDE Mode': 'IDE Mode',
|
||||
|
||||
@@ -289,6 +289,12 @@ export default {
|
||||
'Show Citations': 'Показывать цитаты',
|
||||
'Custom Witty Phrases': 'Пользовательские остроумные фразы',
|
||||
'Enable Welcome Back': 'Включить приветствие при возврате',
|
||||
'Enable User Feedback': 'Включить отзывы пользователей',
|
||||
'How is Qwen doing this session? (optional)':
|
||||
'Как дела у Qwen в этой сессии? (необязательно)',
|
||||
Bad: 'Плохо',
|
||||
Good: 'Хорошо',
|
||||
'Not Sure Yet': 'Пока не уверен',
|
||||
'Disable Loading Phrases': 'Отключить фразы при загрузке',
|
||||
'Screen Reader Mode': 'Режим программы чтения с экрана',
|
||||
'IDE Mode': 'Режим IDE',
|
||||
|
||||
@@ -277,6 +277,11 @@ export default {
|
||||
'Show Citations': '显示引用',
|
||||
'Custom Witty Phrases': '自定义诙谐短语',
|
||||
'Enable Welcome Back': '启用欢迎回来',
|
||||
'Enable User Feedback': '启用用户反馈',
|
||||
'How is Qwen doing this session? (optional)': 'Qwen 这次表现如何?(可选)',
|
||||
Bad: '不满意',
|
||||
Good: '满意',
|
||||
'Not Sure Yet': '暂不评价',
|
||||
'Disable Loading Phrases': '禁用加载短语',
|
||||
'Screen Reader Mode': '屏幕阅读器模式',
|
||||
'IDE Mode': 'IDE 模式',
|
||||
|
||||
@@ -45,6 +45,7 @@ import process from 'node:process';
|
||||
import { useHistory } from './hooks/useHistoryManager.js';
|
||||
import { useMemoryMonitor } from './hooks/useMemoryMonitor.js';
|
||||
import { useThemeCommand } from './hooks/useThemeCommand.js';
|
||||
import { useFeedbackDialog } from './hooks/useFeedbackDialog.js';
|
||||
import { useAuthCommand } from './auth/useAuth.js';
|
||||
import { useEditorSettings } from './hooks/useEditorSettings.js';
|
||||
import { useSettingsCommand } from './hooks/useSettingsCommand.js';
|
||||
@@ -1196,6 +1197,20 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
isApprovalModeDialogOpen ||
|
||||
isResumeDialogOpen;
|
||||
|
||||
const {
|
||||
isFeedbackDialogOpen,
|
||||
openFeedbackDialog,
|
||||
closeFeedbackDialog,
|
||||
submitFeedback,
|
||||
} = useFeedbackDialog({
|
||||
config,
|
||||
settings,
|
||||
streamingState,
|
||||
history: historyManager.history,
|
||||
sessionStats,
|
||||
dialogsVisible,
|
||||
});
|
||||
|
||||
const pendingHistoryItems = useMemo(
|
||||
() => [...pendingSlashCommandHistoryItems, ...pendingGeminiHistoryItems],
|
||||
[pendingSlashCommandHistoryItems, pendingGeminiHistoryItems],
|
||||
@@ -1292,6 +1307,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
// Subagent dialogs
|
||||
isSubagentCreateDialogOpen,
|
||||
isAgentsManagerDialogOpen,
|
||||
// Feedback dialog
|
||||
isFeedbackDialogOpen,
|
||||
}),
|
||||
[
|
||||
isThemeDialogOpen,
|
||||
@@ -1382,6 +1399,8 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
// Subagent dialogs
|
||||
isSubagentCreateDialogOpen,
|
||||
isAgentsManagerDialogOpen,
|
||||
// Feedback dialog
|
||||
isFeedbackDialogOpen,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -1422,6 +1441,10 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
openResumeDialog,
|
||||
closeResumeDialog,
|
||||
handleResume,
|
||||
// Feedback dialog
|
||||
openFeedbackDialog,
|
||||
closeFeedbackDialog,
|
||||
submitFeedback,
|
||||
}),
|
||||
[
|
||||
handleThemeSelect,
|
||||
@@ -1457,6 +1480,10 @@ export const AppContainer = (props: AppContainerProps) => {
|
||||
openResumeDialog,
|
||||
closeResumeDialog,
|
||||
handleResume,
|
||||
// Feedback dialog
|
||||
openFeedbackDialog,
|
||||
closeFeedbackDialog,
|
||||
submitFeedback,
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
63
packages/cli/src/ui/FeedbackDialog.tsx
Normal file
63
packages/cli/src/ui/FeedbackDialog.tsx
Normal file
@@ -0,0 +1,63 @@
|
||||
import { Box, Text } from 'ink';
|
||||
import type React from 'react';
|
||||
import { t } from '../i18n/index.js';
|
||||
import { useUIActions } from './contexts/UIActionsContext.js';
|
||||
import { useUIState } from './contexts/UIStateContext.js';
|
||||
import { useKeypress } from './hooks/useKeypress.js';
|
||||
|
||||
const FEEDBACK_OPTIONS = {
|
||||
GOOD: 1,
|
||||
BAD: 2,
|
||||
NOT_SURE: 3,
|
||||
} as const;
|
||||
|
||||
const FEEDBACK_OPTION_KEYS = {
|
||||
[FEEDBACK_OPTIONS.GOOD]: '1',
|
||||
[FEEDBACK_OPTIONS.BAD]: '2',
|
||||
[FEEDBACK_OPTIONS.NOT_SURE]: '3',
|
||||
} as const;
|
||||
|
||||
export const FEEDBACK_DIALOG_KEYS = ['1', '2', '3'] as const;
|
||||
|
||||
export const FeedbackDialog: React.FC = () => {
|
||||
const uiState = useUIState();
|
||||
const uiActions = useUIActions();
|
||||
|
||||
useKeypress(
|
||||
(key) => {
|
||||
if (key.name === FEEDBACK_OPTION_KEYS[FEEDBACK_OPTIONS.GOOD]) {
|
||||
uiActions.submitFeedback(FEEDBACK_OPTIONS.GOOD);
|
||||
} else if (key.name === FEEDBACK_OPTION_KEYS[FEEDBACK_OPTIONS.BAD]) {
|
||||
uiActions.submitFeedback(FEEDBACK_OPTIONS.BAD);
|
||||
} else if (key.name === FEEDBACK_OPTION_KEYS[FEEDBACK_OPTIONS.NOT_SURE]) {
|
||||
uiActions.submitFeedback(FEEDBACK_OPTIONS.NOT_SURE);
|
||||
}
|
||||
|
||||
uiActions.closeFeedbackDialog();
|
||||
},
|
||||
{ isActive: uiState.isFeedbackDialogOpen },
|
||||
);
|
||||
|
||||
return (
|
||||
<Box flexDirection="column" marginY={1}>
|
||||
<Box>
|
||||
<Text color="cyan">● </Text>
|
||||
<Text bold>{t('How is Qwen doing this session? (optional)')}</Text>
|
||||
</Box>
|
||||
<Box marginTop={1}>
|
||||
<Text color="cyan">
|
||||
{FEEDBACK_OPTION_KEYS[FEEDBACK_OPTIONS.GOOD]}:{' '}
|
||||
</Text>
|
||||
<Text>{t('Good')}</Text>
|
||||
<Text> </Text>
|
||||
<Text color="cyan">{FEEDBACK_OPTION_KEYS[FEEDBACK_OPTIONS.BAD]}: </Text>
|
||||
<Text>{t('Bad')}</Text>
|
||||
<Text> </Text>
|
||||
<Text color="cyan">
|
||||
{FEEDBACK_OPTION_KEYS[FEEDBACK_OPTIONS.NOT_SURE]}:{' '}
|
||||
</Text>
|
||||
<Text>{t('Not Sure Yet')}</Text>
|
||||
</Box>
|
||||
</Box>
|
||||
);
|
||||
};
|
||||
@@ -26,6 +26,7 @@ import { useSettings } from '../contexts/SettingsContext.js';
|
||||
import { ApprovalMode } from '@qwen-code/qwen-code-core';
|
||||
import { StreamingState } from '../types.js';
|
||||
import { ConfigInitDisplay } from '../components/ConfigInitDisplay.js';
|
||||
import { FeedbackDialog } from '../FeedbackDialog.js';
|
||||
import { t } from '../../i18n/index.js';
|
||||
|
||||
export const Composer = () => {
|
||||
@@ -134,6 +135,8 @@ export const Composer = () => {
|
||||
</OverflowProvider>
|
||||
)}
|
||||
|
||||
{uiState.isFeedbackDialogOpen && <FeedbackDialog />}
|
||||
|
||||
{uiState.isInputActive && (
|
||||
<InputPrompt
|
||||
buffer={uiState.buffer}
|
||||
|
||||
@@ -33,6 +33,9 @@ vi.mock('../hooks/useCommandCompletion.js');
|
||||
vi.mock('../hooks/useInputHistory.js');
|
||||
vi.mock('../hooks/useReverseSearchCompletion.js');
|
||||
vi.mock('../utils/clipboardUtils.js');
|
||||
vi.mock('../contexts/UIStateContext.js', () => ({
|
||||
useUIState: vi.fn(() => ({ isFeedbackDialogOpen: false })),
|
||||
}));
|
||||
|
||||
const mockSlashCommands: SlashCommand[] = [
|
||||
{
|
||||
|
||||
@@ -36,6 +36,8 @@ import {
|
||||
import * as path from 'node:path';
|
||||
import { SCREEN_READER_USER_PREFIX } from '../textConstants.js';
|
||||
import { useShellFocusState } from '../contexts/ShellFocusContext.js';
|
||||
import { useUIState } from '../contexts/UIStateContext.js';
|
||||
import { FEEDBACK_DIALOG_KEYS } from '../FeedbackDialog.js';
|
||||
export interface InputPromptProps {
|
||||
buffer: TextBuffer;
|
||||
onSubmit: (value: string) => void;
|
||||
@@ -100,6 +102,7 @@ export const InputPrompt: React.FC<InputPromptProps> = ({
|
||||
isEmbeddedShellFocused,
|
||||
}) => {
|
||||
const isShellFocused = useShellFocusState();
|
||||
const uiState = useUIState();
|
||||
const [justNavigatedHistory, setJustNavigatedHistory] = useState(false);
|
||||
const [escPressCount, setEscPressCount] = useState(0);
|
||||
const [showEscapePrompt, setShowEscapePrompt] = useState(false);
|
||||
@@ -326,6 +329,14 @@ export const InputPrompt: React.FC<InputPromptProps> = ({
|
||||
return;
|
||||
}
|
||||
|
||||
// Intercept feedback dialog option keys (1, 2, 3) when dialog is open
|
||||
if (
|
||||
uiState.isFeedbackDialogOpen &&
|
||||
(FEEDBACK_DIALOG_KEYS as readonly string[]).includes(key.name)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Reset ESC count and hide prompt on any non-ESC key
|
||||
if (key.name !== 'escape') {
|
||||
if (escPressCount > 0 || showEscapePrompt) {
|
||||
@@ -670,6 +681,7 @@ export const InputPrompt: React.FC<InputPromptProps> = ({
|
||||
recentPasteTime,
|
||||
commandSearchActive,
|
||||
commandSearchCompletion,
|
||||
uiState,
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -66,6 +66,10 @@ export interface UIActions {
|
||||
openResumeDialog: () => void;
|
||||
closeResumeDialog: () => void;
|
||||
handleResume: (sessionId: string) => void;
|
||||
// Feedback dialog
|
||||
openFeedbackDialog: () => void;
|
||||
closeFeedbackDialog: () => void;
|
||||
submitFeedback: (rating: number) => void;
|
||||
}
|
||||
|
||||
export const UIActionsContext = createContext<UIActions | null>(null);
|
||||
|
||||
@@ -126,6 +126,8 @@ export interface UIState {
|
||||
// Subagent dialogs
|
||||
isSubagentCreateDialogOpen: boolean;
|
||||
isAgentsManagerDialogOpen: boolean;
|
||||
// Feedback dialog
|
||||
isFeedbackDialogOpen: boolean;
|
||||
}
|
||||
|
||||
export const UIStateContext = createContext<UIState | null>(null);
|
||||
|
||||
250
packages/cli/src/ui/hooks/useFeedbackDialog.ts
Normal file
250
packages/cli/src/ui/hooks/useFeedbackDialog.ts
Normal file
@@ -0,0 +1,250 @@
|
||||
import { useState, useCallback, useEffect } from 'react';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as path from 'node:path';
|
||||
import {
|
||||
type Config,
|
||||
logUserFeedback,
|
||||
UserFeedbackEvent,
|
||||
type UserFeedbackRating,
|
||||
Storage,
|
||||
isNodeError,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
import { StreamingState, MessageType, type HistoryItem } from '../types.js';
|
||||
import type { LoadedSettings } from '../../config/settings.js';
|
||||
import type { SessionStatsState } from '../contexts/SessionContext.js';
|
||||
|
||||
const FEEDBACK_SHOW_PROBABILITY = 0.25; // 25% probability of showing feedback dialog
|
||||
const MIN_TOOL_CALLS = 10; // Minimum tool calls to show feedback dialog
|
||||
const MIN_USER_MESSAGES = 5; // Minimum user messages to show feedback dialog
|
||||
|
||||
// Fatigue mechanism constants
|
||||
const FEEDBACK_COOLDOWN_HOURS = 24; // Hours to wait before showing feedback dialog again
|
||||
const FEEDBACK_HISTORY_FILENAME = 'feedback-history.json';
|
||||
|
||||
/**
|
||||
* Check if there's an AI response after the last user message in the conversation history
|
||||
*/
|
||||
const hasAIResponseAfterLastUserMessage = (history: HistoryItem[]): boolean => {
|
||||
// Find the last user message
|
||||
let lastUserMessageIndex = -1;
|
||||
for (let i = history.length - 1; i >= 0; i--) {
|
||||
if (history[i].type === MessageType.USER) {
|
||||
lastUserMessageIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if there's any AI response (GEMINI message) after the last user message
|
||||
if (lastUserMessageIndex !== -1) {
|
||||
for (let i = lastUserMessageIndex + 1; i < history.length; i++) {
|
||||
if (history[i].type === MessageType.GEMINI) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Count the number of user messages in the conversation history
|
||||
*/
|
||||
const countUserMessages = (history: HistoryItem[]): number =>
|
||||
history.filter((item) => item.type === MessageType.USER).length;
|
||||
|
||||
/**
|
||||
* Interface for feedback history storage
|
||||
*/
|
||||
interface FeedbackHistory {
|
||||
lastShownTimestamp: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the feedback history file path using global Storage
|
||||
*/
|
||||
function getFeedbackHistoryPath(): string {
|
||||
const globalQwenDir = Storage.getGlobalQwenDir();
|
||||
return path.join(globalQwenDir, FEEDBACK_HISTORY_FILENAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the last feedback dialog show time from file storage
|
||||
*/
|
||||
const getFeedbackHistory = async (): Promise<FeedbackHistory | null> => {
|
||||
try {
|
||||
const filePath = getFeedbackHistoryPath();
|
||||
const content = await fs.readFile(filePath, 'utf-8');
|
||||
return JSON.parse(content) as FeedbackHistory;
|
||||
} catch (error) {
|
||||
if (isNodeError(error) && error.code === 'ENOENT') {
|
||||
// File doesn't exist yet, which is normal for first time
|
||||
return null;
|
||||
}
|
||||
console.warn('Failed to read feedback history from file:', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Save feedback history to file storage
|
||||
*/
|
||||
const saveFeedbackHistory = async (history: FeedbackHistory): Promise<void> => {
|
||||
try {
|
||||
const filePath = getFeedbackHistoryPath();
|
||||
|
||||
// Ensure the directory exists
|
||||
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
||||
|
||||
// Write the history file
|
||||
await fs.writeFile(filePath, JSON.stringify(history, null, 2), 'utf-8');
|
||||
} catch (error) {
|
||||
console.warn('Failed to save feedback history to file:', error);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if we should show the feedback dialog based on fatigue mechanism
|
||||
*/
|
||||
const shouldShowFeedbackBasedOnFatigue = async (): Promise<boolean> => {
|
||||
const history = await getFeedbackHistory();
|
||||
if (!history) return true; // No history, allow showing
|
||||
|
||||
const now = Date.now();
|
||||
const timeSinceLastShown = now - history.lastShownTimestamp;
|
||||
const cooldownMs = FEEDBACK_COOLDOWN_HOURS * 60 * 60 * 1000;
|
||||
|
||||
return timeSinceLastShown >= cooldownMs;
|
||||
};
|
||||
|
||||
export interface UseFeedbackDialogProps {
|
||||
config: Config;
|
||||
settings: LoadedSettings;
|
||||
streamingState: StreamingState;
|
||||
history: HistoryItem[];
|
||||
sessionStats: SessionStatsState;
|
||||
dialogsVisible: boolean;
|
||||
}
|
||||
|
||||
export const useFeedbackDialog = ({
|
||||
config,
|
||||
settings,
|
||||
streamingState,
|
||||
history,
|
||||
sessionStats,
|
||||
dialogsVisible,
|
||||
}: UseFeedbackDialogProps) => {
|
||||
// Feedback dialog state
|
||||
const [isFeedbackDialogOpen, setIsFeedbackDialogOpen] = useState(false);
|
||||
|
||||
const openFeedbackDialog = useCallback(() => {
|
||||
setIsFeedbackDialogOpen(true);
|
||||
|
||||
// Record the timestamp when feedback dialog is shown (fire and forget)
|
||||
saveFeedbackHistory({
|
||||
lastShownTimestamp: Date.now(),
|
||||
});
|
||||
}, []);
|
||||
|
||||
const closeFeedbackDialog = useCallback(
|
||||
() => setIsFeedbackDialogOpen(false),
|
||||
[],
|
||||
);
|
||||
|
||||
const submitFeedback = useCallback(
|
||||
(rating: number) => {
|
||||
// Calculate session duration and turn count
|
||||
const sessionDurationMs =
|
||||
Date.now() - sessionStats.sessionStartTime.getTime();
|
||||
let lastUserMessageIndex = -1;
|
||||
for (let i = history.length - 1; i >= 0; i--) {
|
||||
if (history[i].type === MessageType.USER) {
|
||||
lastUserMessageIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
const turnCount =
|
||||
lastUserMessageIndex === -1 ? 0 : history.length - lastUserMessageIndex;
|
||||
|
||||
// Create and log the feedback event
|
||||
const feedbackEvent = new UserFeedbackEvent(
|
||||
sessionStats.sessionId,
|
||||
rating as UserFeedbackRating,
|
||||
sessionDurationMs,
|
||||
turnCount,
|
||||
config.getModel(),
|
||||
config.getApprovalMode(),
|
||||
);
|
||||
|
||||
logUserFeedback(config, feedbackEvent);
|
||||
closeFeedbackDialog();
|
||||
},
|
||||
[config, sessionStats, history, closeFeedbackDialog],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const checkAndShowFeedback = async () => {
|
||||
if (streamingState === StreamingState.Idle && history.length > 0) {
|
||||
const hasAIResponseAfterLastUser =
|
||||
hasAIResponseAfterLastUserMessage(history);
|
||||
|
||||
// Get tool calls count and user messages count
|
||||
const toolCallsCount = sessionStats.metrics.tools.totalCalls;
|
||||
const userMessagesCount = countUserMessages(history);
|
||||
|
||||
// Check if the session meets the minimum requirements:
|
||||
// Either tool calls > 10 OR user messages > 5
|
||||
const meetsMinimumRequirements =
|
||||
toolCallsCount > MIN_TOOL_CALLS ||
|
||||
userMessagesCount > MIN_USER_MESSAGES;
|
||||
|
||||
// Check fatigue mechanism (async)
|
||||
let passedFatigueCheck = false;
|
||||
try {
|
||||
passedFatigueCheck = await shouldShowFeedbackBasedOnFatigue();
|
||||
} catch (error) {
|
||||
console.warn('Failed to check feedback fatigue:', error);
|
||||
}
|
||||
|
||||
// Show feedback dialog if:
|
||||
// 1. Qwen logger is enabled (required for feedback submission)
|
||||
// 2. User feedback is enabled in settings
|
||||
// 3. There's an AI response after the last user message (real AI conversation)
|
||||
// 4. No other dialogs are currently visible
|
||||
// 5. Random chance (25% probability)
|
||||
// 6. Meets minimum requirements (tool calls > 10 OR user messages > 5)
|
||||
// 7. Fatigue mechanism allows showing (not shown recently across sessions)
|
||||
if (
|
||||
config.getUsageStatisticsEnabled() &&
|
||||
settings.merged.ui?.enableUserFeedback !== false &&
|
||||
hasAIResponseAfterLastUser &&
|
||||
!dialogsVisible &&
|
||||
Math.random() < FEEDBACK_SHOW_PROBABILITY &&
|
||||
meetsMinimumRequirements &&
|
||||
passedFatigueCheck
|
||||
) {
|
||||
openFeedbackDialog();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
checkAndShowFeedback().catch((error) => {
|
||||
console.warn('Error in feedback check:', error);
|
||||
});
|
||||
}, [
|
||||
streamingState,
|
||||
history,
|
||||
sessionStats,
|
||||
isFeedbackDialogOpen,
|
||||
openFeedbackDialog,
|
||||
settings.merged.ui?.enableUserFeedback,
|
||||
config,
|
||||
dialogsVisible,
|
||||
]);
|
||||
|
||||
return {
|
||||
isFeedbackDialogOpen,
|
||||
openFeedbackDialog,
|
||||
closeFeedbackDialog,
|
||||
submitFeedback,
|
||||
};
|
||||
};
|
||||
@@ -28,7 +28,6 @@ type RawMessageStreamEvent = Anthropic.RawMessageStreamEvent;
|
||||
import { getDefaultTokenizer } from '../../utils/request-tokenizer/index.js';
|
||||
import { safeJsonParse } from '../../utils/safeJsonParse.js';
|
||||
import { AnthropicContentConverter } from './converter.js';
|
||||
import { buildRuntimeFetchOptions } from '../../utils/runtimeFetchOptions.js';
|
||||
|
||||
type StreamingBlockState = {
|
||||
type: string;
|
||||
@@ -55,9 +54,6 @@ export class AnthropicContentGenerator implements ContentGenerator {
|
||||
) {
|
||||
const defaultHeaders = this.buildHeaders();
|
||||
const baseURL = contentGeneratorConfig.baseUrl;
|
||||
// Configure runtime options to ensure user-configured timeout works as expected
|
||||
// bodyTimeout is always disabled (0) to let Anthropic SDK timeout control the request
|
||||
const runtimeOptions = buildRuntimeFetchOptions('anthropic');
|
||||
|
||||
this.client = new Anthropic({
|
||||
apiKey: contentGeneratorConfig.apiKey,
|
||||
@@ -65,7 +61,6 @@ export class AnthropicContentGenerator implements ContentGenerator {
|
||||
timeout: contentGeneratorConfig.timeout,
|
||||
maxRetries: contentGeneratorConfig.maxRetries,
|
||||
defaultHeaders,
|
||||
...runtimeOptions,
|
||||
});
|
||||
|
||||
this.converter = new AnthropicContentConverter(
|
||||
|
||||
@@ -16,7 +16,6 @@ import type {
|
||||
ChatCompletionContentPartWithCache,
|
||||
ChatCompletionToolWithCache,
|
||||
} from './types.js';
|
||||
import { buildRuntimeFetchOptions } from '../../../utils/runtimeFetchOptions.js';
|
||||
|
||||
export class DashScopeOpenAICompatibleProvider
|
||||
implements OpenAICompatibleProvider
|
||||
@@ -69,16 +68,12 @@ export class DashScopeOpenAICompatibleProvider
|
||||
maxRetries = DEFAULT_MAX_RETRIES,
|
||||
} = this.contentGeneratorConfig;
|
||||
const defaultHeaders = this.buildHeaders();
|
||||
// Configure fetch options to ensure user-configured timeout works as expected
|
||||
// bodyTimeout is always disabled (0) to let OpenAI SDK timeout control the request
|
||||
const fetchOptions = buildRuntimeFetchOptions('openai');
|
||||
return new OpenAI({
|
||||
apiKey,
|
||||
baseURL: baseUrl,
|
||||
timeout,
|
||||
maxRetries,
|
||||
defaultHeaders,
|
||||
...(fetchOptions ? { fetchOptions } : {}),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import type { Config } from '../../../config/config.js';
|
||||
import type { ContentGeneratorConfig } from '../../contentGenerator.js';
|
||||
import { DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES } from '../constants.js';
|
||||
import type { OpenAICompatibleProvider } from './types.js';
|
||||
import { buildRuntimeFetchOptions } from '../../../utils/runtimeFetchOptions.js';
|
||||
|
||||
/**
|
||||
* Default provider for standard OpenAI-compatible APIs
|
||||
@@ -44,16 +43,12 @@ export class DefaultOpenAICompatibleProvider
|
||||
maxRetries = DEFAULT_MAX_RETRIES,
|
||||
} = this.contentGeneratorConfig;
|
||||
const defaultHeaders = this.buildHeaders();
|
||||
// Configure fetch options to ensure user-configured timeout works as expected
|
||||
// bodyTimeout is always disabled (0) to let OpenAI SDK timeout control the request
|
||||
const fetchOptions = buildRuntimeFetchOptions('openai');
|
||||
return new OpenAI({
|
||||
apiKey,
|
||||
baseURL: baseUrl,
|
||||
timeout,
|
||||
maxRetries,
|
||||
defaultHeaders,
|
||||
...(fetchOptions ? { fetchOptions } : {}),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -35,6 +35,7 @@ export const EVENT_MODEL_SLASH_COMMAND = 'qwen-code.slash_command.model';
|
||||
export const EVENT_SUBAGENT_EXECUTION = 'qwen-code.subagent_execution';
|
||||
export const EVENT_SKILL_LAUNCH = 'qwen-code.skill_launch';
|
||||
export const EVENT_AUTH = 'qwen-code.auth';
|
||||
export const EVENT_USER_FEEDBACK = 'qwen-code.user_feedback';
|
||||
|
||||
// Performance Events
|
||||
export const EVENT_STARTUP_PERFORMANCE = 'qwen-code.startup.performance';
|
||||
|
||||
@@ -45,6 +45,7 @@ export {
|
||||
logNextSpeakerCheck,
|
||||
logAuth,
|
||||
logSkillLaunch,
|
||||
logUserFeedback,
|
||||
} from './loggers.js';
|
||||
export type { SlashCommandEvent, ChatCompressionEvent } from './types.js';
|
||||
export {
|
||||
@@ -65,6 +66,8 @@ export {
|
||||
NextSpeakerCheckEvent,
|
||||
AuthEvent,
|
||||
SkillLaunchEvent,
|
||||
UserFeedbackEvent,
|
||||
UserFeedbackRating,
|
||||
} from './types.js';
|
||||
export { makeSlashCommandEvent, makeChatCompressionEvent } from './types.js';
|
||||
export type { TelemetryEvent } from './types.js';
|
||||
|
||||
@@ -38,6 +38,7 @@ import {
|
||||
EVENT_INVALID_CHUNK,
|
||||
EVENT_AUTH,
|
||||
EVENT_SKILL_LAUNCH,
|
||||
EVENT_USER_FEEDBACK,
|
||||
} from './constants.js';
|
||||
import {
|
||||
recordApiErrorMetrics,
|
||||
@@ -86,6 +87,7 @@ import type {
|
||||
InvalidChunkEvent,
|
||||
AuthEvent,
|
||||
SkillLaunchEvent,
|
||||
UserFeedbackEvent,
|
||||
} from './types.js';
|
||||
import type { UiEvent } from './uiTelemetry.js';
|
||||
import { uiTelemetryService } from './uiTelemetry.js';
|
||||
@@ -887,3 +889,32 @@ export function logSkillLaunch(config: Config, event: SkillLaunchEvent): void {
|
||||
};
|
||||
logger.emit(logRecord);
|
||||
}
|
||||
|
||||
export function logUserFeedback(
|
||||
config: Config,
|
||||
event: UserFeedbackEvent,
|
||||
): void {
|
||||
const uiEvent = {
|
||||
...event,
|
||||
'event.name': EVENT_USER_FEEDBACK,
|
||||
'event.timestamp': new Date().toISOString(),
|
||||
} as UiEvent;
|
||||
uiTelemetryService.addEvent(uiEvent);
|
||||
config.getChatRecordingService()?.recordUiTelemetryEvent(uiEvent);
|
||||
QwenLogger.getInstance(config)?.logUserFeedbackEvent(event);
|
||||
if (!isTelemetrySdkInitialized()) return;
|
||||
|
||||
const attributes: LogAttributes = {
|
||||
...getCommonAttributes(config),
|
||||
...event,
|
||||
'event.name': EVENT_USER_FEEDBACK,
|
||||
'event.timestamp': new Date().toISOString(),
|
||||
};
|
||||
|
||||
const logger = logs.getLogger(SERVICE_NAME);
|
||||
const logRecord: LogRecord = {
|
||||
body: `User feedback: Rating ${event.rating} for session ${event.session_id}. Turn count: ${event.turn_count}. Duration: ${event.session_duration_ms}ms.`,
|
||||
attributes,
|
||||
};
|
||||
logger.emit(logRecord);
|
||||
}
|
||||
|
||||
@@ -39,6 +39,7 @@ import type {
|
||||
ExtensionDisableEvent,
|
||||
AuthEvent,
|
||||
SkillLaunchEvent,
|
||||
UserFeedbackEvent,
|
||||
RipgrepFallbackEvent,
|
||||
EndSessionEvent,
|
||||
} from '../types.js';
|
||||
@@ -842,6 +843,23 @@ export class QwenLogger {
|
||||
this.flushIfNeeded();
|
||||
}
|
||||
|
||||
logUserFeedbackEvent(event: UserFeedbackEvent): void {
|
||||
const rumEvent = this.createActionEvent('user', 'user_feedback', {
|
||||
properties: {
|
||||
session_id: event.session_id,
|
||||
rating: event.rating,
|
||||
session_duration_ms: event.session_duration_ms,
|
||||
turn_count: event.turn_count,
|
||||
model: event.model,
|
||||
approval_mode: event.approval_mode,
|
||||
prompt_id: event.prompt_id || '',
|
||||
},
|
||||
});
|
||||
|
||||
this.enqueueLogEvent(rumEvent);
|
||||
this.flushIfNeeded();
|
||||
}
|
||||
|
||||
logChatCompressionEvent(event: ChatCompressionEvent): void {
|
||||
const rumEvent = this.createActionEvent('misc', 'chat_compression', {
|
||||
properties: {
|
||||
|
||||
@@ -757,6 +757,44 @@ export class SkillLaunchEvent implements BaseTelemetryEvent {
|
||||
}
|
||||
}
|
||||
|
||||
export enum UserFeedbackRating {
|
||||
BAD = 1,
|
||||
FINE = 2,
|
||||
GOOD = 3,
|
||||
}
|
||||
|
||||
export class UserFeedbackEvent implements BaseTelemetryEvent {
|
||||
'event.name': 'user_feedback';
|
||||
'event.timestamp': string;
|
||||
session_id: string;
|
||||
rating: UserFeedbackRating;
|
||||
session_duration_ms: number;
|
||||
turn_count: number;
|
||||
model: string;
|
||||
approval_mode: string;
|
||||
prompt_id?: string;
|
||||
|
||||
constructor(
|
||||
session_id: string,
|
||||
rating: UserFeedbackRating,
|
||||
session_duration_ms: number,
|
||||
turn_count: number,
|
||||
model: string,
|
||||
approval_mode: string,
|
||||
prompt_id?: string,
|
||||
) {
|
||||
this['event.name'] = 'user_feedback';
|
||||
this['event.timestamp'] = new Date().toISOString();
|
||||
this.session_id = session_id;
|
||||
this.rating = rating;
|
||||
this.session_duration_ms = session_duration_ms;
|
||||
this.turn_count = turn_count;
|
||||
this.model = model;
|
||||
this.approval_mode = approval_mode;
|
||||
this.prompt_id = prompt_id;
|
||||
}
|
||||
}
|
||||
|
||||
export type TelemetryEvent =
|
||||
| StartSessionEvent
|
||||
| EndSessionEvent
|
||||
@@ -786,7 +824,8 @@ export type TelemetryEvent =
|
||||
| ToolOutputTruncatedEvent
|
||||
| ModelSlashCommandEvent
|
||||
| AuthEvent
|
||||
| SkillLaunchEvent;
|
||||
| SkillLaunchEvent
|
||||
| UserFeedbackEvent;
|
||||
|
||||
export class ExtensionDisableEvent implements BaseTelemetryEvent {
|
||||
'event.name': 'extension_disable';
|
||||
|
||||
@@ -1,167 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2025 Qwen Team
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { EnvHttpProxyAgent } from 'undici';
|
||||
|
||||
/**
|
||||
* JavaScript runtime type
|
||||
*/
|
||||
export type Runtime = 'node' | 'bun' | 'unknown';
|
||||
|
||||
/**
|
||||
* Detect the current JavaScript runtime
|
||||
*/
|
||||
export function detectRuntime(): Runtime {
|
||||
if (typeof process !== 'undefined' && process.versions?.['bun']) {
|
||||
return 'bun';
|
||||
}
|
||||
if (typeof process !== 'undefined' && process.versions?.node) {
|
||||
return 'node';
|
||||
}
|
||||
return 'unknown';
|
||||
}
|
||||
|
||||
/**
|
||||
* Runtime fetch options for OpenAI SDK
|
||||
*/
|
||||
export type OpenAIRuntimeFetchOptions =
|
||||
| {
|
||||
dispatcher?: EnvHttpProxyAgent;
|
||||
timeout?: false;
|
||||
}
|
||||
| undefined;
|
||||
|
||||
/**
|
||||
* Runtime fetch options for Anthropic SDK
|
||||
*/
|
||||
export type AnthropicRuntimeFetchOptions = {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
httpAgent?: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
fetch?: any;
|
||||
};
|
||||
|
||||
/**
|
||||
* SDK type identifier
|
||||
*/
|
||||
export type SDKType = 'openai' | 'anthropic';
|
||||
|
||||
/**
|
||||
* Build runtime-specific fetch options for OpenAI SDK
|
||||
*/
|
||||
export function buildRuntimeFetchOptions(
|
||||
sdkType: 'openai',
|
||||
): OpenAIRuntimeFetchOptions;
|
||||
/**
|
||||
* Build runtime-specific fetch options for Anthropic SDK
|
||||
*/
|
||||
export function buildRuntimeFetchOptions(
|
||||
sdkType: 'anthropic',
|
||||
): AnthropicRuntimeFetchOptions;
|
||||
/**
|
||||
* Build runtime-specific fetch options based on the detected runtime and SDK type
|
||||
* This function applies runtime-specific configurations to handle timeout differences
|
||||
* across Node.js and Bun, ensuring user-configured timeout works as expected.
|
||||
*
|
||||
* @param sdkType - The SDK type ('openai' or 'anthropic') to determine return type
|
||||
* @returns Runtime-specific options compatible with the specified SDK
|
||||
*/
|
||||
export function buildRuntimeFetchOptions(
|
||||
sdkType: SDKType,
|
||||
): OpenAIRuntimeFetchOptions | AnthropicRuntimeFetchOptions {
|
||||
const runtime = detectRuntime();
|
||||
|
||||
// Always disable bodyTimeout (set to 0) to let SDK's timeout parameter
|
||||
// control the total request time. bodyTimeout only monitors intervals between
|
||||
// data chunks, not the total request time, so we disable it to ensure user-configured
|
||||
// timeout works as expected for both streaming and non-streaming requests.
|
||||
|
||||
switch (runtime) {
|
||||
case 'bun': {
|
||||
if (sdkType === 'openai') {
|
||||
// Bun: Disable built-in 300s timeout to let OpenAI SDK timeout control
|
||||
// This ensures user-configured timeout works as expected without interference
|
||||
return {
|
||||
timeout: false,
|
||||
};
|
||||
} else {
|
||||
// Bun: Use custom fetch to disable built-in 300s timeout
|
||||
// This allows Anthropic SDK timeout to control the request
|
||||
// Note: Bun's fetch automatically uses proxy settings from environment variables
|
||||
// (HTTP_PROXY, HTTPS_PROXY, NO_PROXY), so proxy behavior is preserved
|
||||
const bunFetch: typeof fetch = async (
|
||||
input: RequestInfo | URL,
|
||||
init?: RequestInit,
|
||||
) => {
|
||||
const bunFetchOptions: RequestInit = {
|
||||
...init,
|
||||
// @ts-expect-error - Bun-specific timeout option
|
||||
timeout: false,
|
||||
};
|
||||
return fetch(input, bunFetchOptions);
|
||||
};
|
||||
return {
|
||||
fetch: bunFetch,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
case 'node': {
|
||||
// Node.js: Use EnvHttpProxyAgent to configure proxy and disable bodyTimeout
|
||||
// EnvHttpProxyAgent automatically reads proxy settings from environment variables
|
||||
// (HTTP_PROXY, HTTPS_PROXY, NO_PROXY, etc.) to preserve proxy functionality
|
||||
// bodyTimeout is always 0 (disabled) to let SDK timeout control the request
|
||||
try {
|
||||
const agent = new EnvHttpProxyAgent({
|
||||
bodyTimeout: 0, // Disable to let SDK timeout control total request time
|
||||
});
|
||||
|
||||
if (sdkType === 'openai') {
|
||||
return {
|
||||
dispatcher: agent,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
httpAgent: agent,
|
||||
};
|
||||
}
|
||||
} catch {
|
||||
// If undici is not available, return appropriate default
|
||||
if (sdkType === 'openai') {
|
||||
return undefined;
|
||||
} else {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
default: {
|
||||
// Unknown runtime: Try to use EnvHttpProxyAgent if available
|
||||
// EnvHttpProxyAgent automatically reads proxy settings from environment variables
|
||||
try {
|
||||
const agent = new EnvHttpProxyAgent({
|
||||
bodyTimeout: 0, // Disable to let SDK timeout control total request time
|
||||
});
|
||||
|
||||
if (sdkType === 'openai') {
|
||||
return {
|
||||
dispatcher: agent,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
httpAgent: agent,
|
||||
};
|
||||
}
|
||||
} catch {
|
||||
if (sdkType === 'openai') {
|
||||
return undefined;
|
||||
} else {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user