Compare commits

...

2 Commits

Author SHA1 Message Date
koalazf.99
5854ac67c6 fix sampling params 2025-12-11 13:46:37 +08:00
koalazf.99
354c85bcff revert: topp & temperature default velue to none 2025-10-28 13:00:10 +08:00
3 changed files with 62 additions and 8 deletions

View File

@@ -5,6 +5,7 @@
*/
import type {
ContentGeneratorConfig,
FileFilteringOptions,
MCPServerConfig,
OutputFormat,
@@ -123,6 +124,24 @@ export interface CliArgs {
outputFormat: string | undefined;
}
type LegacySamplingSettings = {
sampling_params?: ContentGeneratorConfig['samplingParams'];
};
function getLegacySamplingParams(
settings: Settings,
): ContentGeneratorConfig['samplingParams'] | undefined {
if (
typeof settings !== 'object' ||
settings === null ||
!('sampling_params' in (settings as Record<string, unknown>))
) {
return undefined;
}
return (settings as Settings & LegacySamplingSettings).sampling_params;
}
export async function parseArguments(settings: Settings): Promise<CliArgs> {
const rawArgv = hideBin(process.argv);
const yargsInstance = yargs(rawArgv)
@@ -685,6 +704,7 @@ export async function loadCliConfig(
const vlmSwitchMode =
argv.vlmSwitchMode || settings.experimental?.vlmSwitchMode;
const legacySamplingParams = getLegacySamplingParams(settings);
return new Config({
sessionId,
embeddingModel: DEFAULT_QWEN_EMBEDDING_MODEL,
@@ -745,6 +765,8 @@ export async function loadCliConfig(
(typeof argv.openaiLogging === 'undefined'
? settings.model?.enableOpenAILogging
: argv.openaiLogging) ?? false,
// Include sampling_params from root level settings
...(legacySamplingParams ? { samplingParams: legacySamplingParams } : {}),
},
cliVersion: await getCliVersion(),
tavilyApiKey:

View File

@@ -65,10 +65,7 @@ export interface GenerateJsonOptions {
*/
export class BaseLlmClient {
// Default configuration for utility tasks
private readonly defaultUtilityConfig: GenerateContentConfig = {
temperature: 0,
topP: 1,
};
private readonly defaultUtilityConfig: GenerateContentConfig = {};
constructor(
private readonly contentGenerator: ContentGenerator,

View File

@@ -149,10 +149,7 @@ const COMPRESSION_PRESERVE_THRESHOLD = 0.3;
export class GeminiClient {
private chat?: GeminiChat;
private readonly generateContentConfig: GenerateContentConfig = {
temperature: 0,
topP: 1,
};
private readonly generateContentConfig: GenerateContentConfig;
private sessionTurnCount = 0;
private readonly loopDetector: LoopDetectionService;
@@ -169,6 +166,44 @@ export class GeminiClient {
constructor(private readonly config: Config) {
this.loopDetector = new LoopDetectionService(config);
this.lastPromptId = this.config.getSessionId();
this.generateContentConfig = this.buildDefaultGenerateContentConfig();
}
private buildDefaultGenerateContentConfig(): GenerateContentConfig {
const samplingParams =
this.config.getContentGeneratorConfig()?.samplingParams;
if (!samplingParams) {
return {};
}
const config: GenerateContentConfig = {};
if (samplingParams.temperature !== undefined) {
config.temperature = samplingParams.temperature;
}
if (samplingParams.top_p !== undefined) {
config.topP = samplingParams.top_p;
}
if (samplingParams.top_k !== undefined) {
config.topK = samplingParams.top_k;
}
if (samplingParams.max_tokens !== undefined) {
config.maxOutputTokens = samplingParams.max_tokens;
}
if (samplingParams.presence_penalty !== undefined) {
config.presencePenalty = samplingParams.presence_penalty;
}
if (samplingParams.frequency_penalty !== undefined) {
config.frequencyPenalty = samplingParams.frequency_penalty;
}
return config;
}
async initialize() {