pre-release commit

This commit is contained in:
koalazf.99
2025-07-22 19:59:07 +08:00
parent c5dee4bb17
commit a9d6965bef
485 changed files with 111444 additions and 2 deletions

55
scripts/build.js Normal file
View File

@@ -0,0 +1,55 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { execSync } from 'child_process';
import { existsSync } from 'fs';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
const root = join(__dirname, '..');
// npm install if node_modules was removed (e.g. via npm run clean or scripts/clean.js)
if (!existsSync(join(root, 'node_modules'))) {
execSync('npm install', { stdio: 'inherit', cwd: root });
}
// build all workspaces/packages
execSync('npm run generate', { stdio: 'inherit', cwd: root });
execSync('npm run build --workspaces', { stdio: 'inherit', cwd: root });
// also build container image if sandboxing is enabled
// skip (-s) npm install + build since we did that above
try {
execSync('node scripts/sandbox_command.js -q', {
stdio: 'inherit',
cwd: root,
});
if (
process.env.BUILD_SANDBOX === '1' ||
process.env.BUILD_SANDBOX === 'true'
) {
execSync('node scripts/build_sandbox.js -s', {
stdio: 'inherit',
cwd: root,
});
}
} catch {
// ignore
}

37
scripts/build_package.js Normal file
View File

@@ -0,0 +1,37 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { execSync } from 'child_process';
import { writeFileSync } from 'fs';
import { join } from 'path';
if (!process.cwd().includes('packages')) {
console.error('must be invoked from a package directory');
process.exit(1);
}
// build typescript files
execSync('tsc --build', { stdio: 'inherit' });
// copy .{md,json} files
execSync('node ../../scripts/copy_files.js', { stdio: 'inherit' });
// touch dist/.last_build
writeFileSync(join(process.cwd(), 'dist', '.last_build'), '');
process.exit(0);

155
scripts/build_sandbox.js Normal file
View File

@@ -0,0 +1,155 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { execSync } from 'child_process';
import { chmodSync, existsSync, readFileSync, rmSync, writeFileSync } from 'fs';
import { join } from 'path';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import cliPkgJson from '../packages/cli/package.json' with { type: 'json' };
const argv = yargs(hideBin(process.argv))
.option('s', {
alias: 'skip-npm-install-build',
type: 'boolean',
default: false,
description: 'skip npm install + npm run build',
})
.option('f', {
alias: 'dockerfile',
type: 'string',
description: 'use <dockerfile> for custom image',
})
.option('i', {
alias: 'image',
type: 'string',
description: 'use <image> name for custom image',
}).argv;
let sandboxCommand;
try {
sandboxCommand = execSync('node scripts/sandbox_command.js')
.toString()
.trim();
} catch {
console.warn('ERROR: could not detect sandbox container command');
process.exit(0);
}
if (sandboxCommand === 'sandbox-exec') {
console.warn(
'WARNING: container-based sandboxing is disabled (see README.md#sandboxing)',
);
process.exit(0);
}
console.log(`using ${sandboxCommand} for sandboxing`);
const baseImage = cliPkgJson.config.sandboxImageUri;
const customImage = argv.i;
const baseDockerfile = 'Dockerfile';
const customDockerfile = argv.f;
if (!baseImage?.length) {
console.warn(
'No default image tag specified in gemini-cli/packages/cli/package.json',
);
}
if (!argv.s) {
execSync('npm install', { stdio: 'inherit' });
execSync('npm run build --workspaces', { stdio: 'inherit' });
}
console.log('packing @google/gemini-cli ...');
const cliPackageDir = join('packages', 'cli');
rmSync(join(cliPackageDir, 'dist', 'google-gemini-cli-*.tgz'), { force: true });
execSync(
`npm pack -w @google/gemini-cli --pack-destination ./packages/cli/dist`,
{
stdio: 'ignore',
},
);
console.log('packing @google/gemini-cli-core ...');
const corePackageDir = join('packages', 'core');
rmSync(join(corePackageDir, 'dist', 'google-gemini-cli-core-*.tgz'), {
force: true,
});
execSync(
`npm pack -w @google/gemini-cli-core --pack-destination ./packages/core/dist`,
{ stdio: 'ignore' },
);
const packageVersion = JSON.parse(
readFileSync(join(process.cwd(), 'package.json'), 'utf-8'),
).version;
chmodSync(
join(cliPackageDir, 'dist', `google-gemini-cli-${packageVersion}.tgz`),
0o755,
);
chmodSync(
join(corePackageDir, 'dist', `google-gemini-cli-core-${packageVersion}.tgz`),
0o755,
);
const buildStdout = process.env.VERBOSE ? 'inherit' : 'ignore';
function buildImage(imageName, dockerfile) {
console.log(`building ${imageName} ... (can be slow first time)`);
const buildCommand =
sandboxCommand === 'podman'
? `${sandboxCommand} build --authfile=<(echo '{}')`
: `${sandboxCommand} build`;
const npmPackageVersion = JSON.parse(
readFileSync(join(process.cwd(), 'package.json'), 'utf-8'),
).version;
const imageTag =
process.env.GEMINI_SANDBOX_IMAGE_TAG || imageName.split(':')[1];
const finalImageName = `${imageName.split(':')[0]}:${imageTag}`;
execSync(
`${buildCommand} ${
process.env.BUILD_SANDBOX_FLAGS || ''
} --build-arg CLI_VERSION_ARG=${npmPackageVersion} -f "${dockerfile}" -t "${finalImageName}" .`,
{ stdio: buildStdout, shell: '/bin/bash' },
);
console.log(`built ${finalImageName}`);
if (existsSync('/workspace/final_image_uri.txt')) {
// The publish step only supports one image. If we build multiple, only the last one
// will be published. Throw an error to make this failure explicit.
throw new Error(
'CI artifact file /workspace/final_image_uri.txt already exists. Refusing to overwrite.',
);
}
writeFileSync('/workspace/final_image_uri.txt', finalImageName);
}
if (baseImage && baseDockerfile) {
buildImage(baseImage, baseDockerfile);
}
if (customDockerfile && customImage) {
buildImage(customImage, customDockerfile);
}
execSync(`${sandboxCommand} image prune -f`, { stdio: 'ignore' });

View File

@@ -0,0 +1,148 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import fs from 'fs';
import path from 'path';
import os from 'os'; // Import os module
// --- Configuration ---
const cliPackageDir = path.resolve('packages', 'cli'); // Base directory for the CLI package
const buildTimestampPath = path.join(cliPackageDir, 'dist', '.last_build'); // Path to the timestamp file within the CLI package
const sourceDirs = [path.join(cliPackageDir, 'src')]; // Source directory within the CLI package
const filesToWatch = [
path.join(cliPackageDir, 'package.json'),
path.join(cliPackageDir, 'tsconfig.json'),
]; // Specific files within the CLI package
const buildDir = path.join(cliPackageDir, 'dist'); // Build output directory within the CLI package
const warningsFilePath = path.join(os.tmpdir(), 'gemini-cli-warnings.txt'); // Temp file for warnings
// ---------------------
function getMtime(filePath) {
try {
return fs.statSync(filePath).mtimeMs; // Use mtimeMs for higher precision
} catch (err) {
if (err.code === 'ENOENT') {
return null; // File doesn't exist
}
console.error(`Error getting stats for ${filePath}:`, err);
process.exit(1); // Exit on unexpected errors getting stats
}
}
function findSourceFiles(dir, allFiles = []) {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
// Simple check to avoid recursing into node_modules or build dir itself
if (
entry.isDirectory() &&
entry.name !== 'node_modules' &&
fullPath !== buildDir
) {
findSourceFiles(fullPath, allFiles);
} else if (entry.isFile()) {
allFiles.push(fullPath);
}
}
return allFiles;
}
console.log('Checking build status...');
// Clean up old warnings file before check
try {
if (fs.existsSync(warningsFilePath)) {
fs.unlinkSync(warningsFilePath);
}
} catch (err) {
console.warn(
`[Check Script] Warning: Could not delete previous warnings file: ${err.message}`,
);
}
const buildMtime = getMtime(buildTimestampPath);
if (!buildMtime) {
// If build is missing, write that as a warning and exit(0) so app can display it
const errorMessage = `ERROR: Build timestamp file (${path.relative(process.cwd(), buildTimestampPath)}) not found. Run \`npm run build\` first.`;
console.error(errorMessage); // Still log error here
try {
fs.writeFileSync(warningsFilePath, errorMessage);
} catch (writeErr) {
console.error(
`[Check Script] Error writing missing build warning file: ${writeErr.message}`,
);
}
process.exit(0); // Allow app to start and show the error
}
let newerSourceFileFound = false;
const warningMessages = []; // Collect warnings here
const allSourceFiles = [];
// Collect files from specified directories
sourceDirs.forEach((dir) => {
const dirPath = path.resolve(dir);
if (fs.existsSync(dirPath)) {
findSourceFiles(dirPath, allSourceFiles);
} else {
console.warn(`Warning: Source directory "${dir}" not found.`);
}
});
// Add specific files
filesToWatch.forEach((file) => {
const filePath = path.resolve(file);
if (fs.existsSync(filePath)) {
allSourceFiles.push(filePath);
} else {
console.warn(`Warning: Watched file "${file}" not found.`);
}
});
// Check modification times
for (const file of allSourceFiles) {
const sourceMtime = getMtime(file);
const relativePath = path.relative(process.cwd(), file);
const isNewer = sourceMtime && sourceMtime > buildMtime;
if (isNewer) {
const warning = `Warning: Source file "${relativePath}" has been modified since the last build.`;
console.warn(warning); // Keep console warning for script debugging
warningMessages.push(warning);
newerSourceFileFound = true;
// break; // Uncomment to stop checking after the first newer file
}
}
if (newerSourceFileFound) {
const finalWarning =
'\nRun "npm run build" to incorporate changes before starting.';
warningMessages.push(finalWarning);
console.warn(finalWarning);
// Write warnings to the temp file
try {
fs.writeFileSync(warningsFilePath, warningMessages.join('\n'));
// Removed debug log
} catch (err) {
console.error(`[Check Script] Error writing warnings file: ${err.message}`);
// Proceed without writing, app won't show warnings
}
} else {
console.log('Build is up-to-date.');
// Ensure no stale warning file exists if build is ok
try {
if (fs.existsSync(warningsFilePath)) {
fs.unlinkSync(warningsFilePath);
}
} catch (err) {
console.warn(
`[Check Script] Warning: Could not delete previous warnings file: ${err.message}`,
);
}
}
process.exit(0); // Always exit successfully so the app starts

47
scripts/clean.js Normal file
View File

@@ -0,0 +1,47 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { rmSync, readFileSync } from 'fs';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
import { globSync } from 'glob';
const __dirname = dirname(fileURLToPath(import.meta.url));
const root = join(__dirname, '..');
// remove npm install/build artifacts
rmSync(join(root, 'node_modules'), { recursive: true, force: true });
rmSync(join(root, 'bundle'), { recursive: true, force: true });
rmSync(join(root, 'packages/cli/src/generated/'), {
recursive: true,
force: true,
});
const RMRF_OPTIONS = { recursive: true, force: true };
rmSync(join(root, 'bundle'), RMRF_OPTIONS);
// Dynamically clean dist directories in all workspaces
const rootPackageJson = JSON.parse(
readFileSync(join(root, 'package.json'), 'utf-8'),
);
for (const workspace of rootPackageJson.workspaces) {
const packages = globSync(join(workspace, 'package.json'), { cwd: root });
for (const pkgPath of packages) {
const pkgDir = dirname(join(root, pkgPath));
rmSync(join(pkgDir, 'dist'), RMRF_OPTIONS);
}
}

View File

@@ -0,0 +1,40 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { copyFileSync, existsSync, mkdirSync } from 'fs';
import { dirname, join, basename } from 'path';
import { fileURLToPath } from 'url';
import { glob } from 'glob';
const __dirname = dirname(fileURLToPath(import.meta.url));
const root = join(__dirname, '..');
const bundleDir = join(root, 'bundle');
// Create the bundle directory if it doesn't exist
if (!existsSync(bundleDir)) {
mkdirSync(bundleDir);
}
// Find and copy all .sb files from packages to the root of the bundle directory
const sbFiles = glob.sync('packages/**/*.sb', { cwd: root });
for (const file of sbFiles) {
copyFileSync(join(root, file), join(bundleDir, basename(file)));
}
console.log('Assets copied to bundle/');

56
scripts/copy_files.js Normal file
View File

@@ -0,0 +1,56 @@
#!/usr/bin/env node
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import fs from 'fs';
import path from 'path';
const sourceDir = path.join('src');
const targetDir = path.join('dist', 'src');
const extensionsToCopy = ['.md', '.json', '.sb'];
function copyFilesRecursive(source, target) {
if (!fs.existsSync(target)) {
fs.mkdirSync(target, { recursive: true });
}
const items = fs.readdirSync(source, { withFileTypes: true });
for (const item of items) {
const sourcePath = path.join(source, item.name);
const targetPath = path.join(target, item.name);
if (item.isDirectory()) {
copyFilesRecursive(sourcePath, targetPath);
} else if (extensionsToCopy.includes(path.extname(item.name))) {
fs.copyFileSync(sourcePath, targetPath);
}
}
}
if (!fs.existsSync(sourceDir)) {
console.error(`Source directory ${sourceDir} not found.`);
process.exit(1);
}
copyFilesRecursive(sourceDir, targetDir);
console.log('Successfully copied files.');

38
scripts/create_alias.sh Executable file
View File

@@ -0,0 +1,38 @@
#!/bin/bash
# This script creates an alias for the Gemini CLI
# Determine the project directory
PROJECT_DIR=$(cd "$(dirname "$0")/.." && pwd)
ALIAS_COMMAND="alias gemini='node $PROJECT_DIR/scripts/start.js'"
# Detect shell and set config file path
if [[ "$SHELL" == *"/bash" ]]; then
CONFIG_FILE="$HOME/.bashrc"
elif [[ "$SHELL" == *"/zsh" ]]; then
CONFIG_FILE="$HOME/.zshrc"
else
echo "Unsupported shell. Only bash and zsh are supported."
exit 1
fi
echo "This script will add the following alias to your shell configuration file ($CONFIG_FILE):"
echo " $ALIAS_COMMAND"
echo ""
# Check if the alias already exists
if grep -q "alias gemini=" "$CONFIG_FILE"; then
echo "A 'gemini' alias already exists in $CONFIG_FILE. No changes were made."
exit 0
fi
read -p "Do you want to proceed? (y/n) " -n 1 -r
echo ""
if [[ $REPLY =~ ^[Yy]$ ]]; then
echo "$ALIAS_COMMAND" >> "$CONFIG_FILE"
echo ""
echo "Alias added to $CONFIG_FILE."
echo "Please run 'source $CONFIG_FILE' or open a new terminal to use the 'gemini' command."
else
echo "Aborted. No changes were made."
fi

View File

@@ -0,0 +1,63 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { execSync } from 'child_process';
import { existsSync, mkdirSync, writeFileSync } from 'fs';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
const root = join(__dirname, '..');
const generatedDir = join(root, 'packages/cli/src/generated');
const gitCommitFile = join(generatedDir, 'git-commit.ts');
let gitCommitInfo = 'N/A';
if (!existsSync(generatedDir)) {
mkdirSync(generatedDir, { recursive: true });
}
try {
const gitHash = execSync('git rev-parse --short HEAD', {
encoding: 'utf-8',
}).trim();
if (gitHash) {
gitCommitInfo = gitHash;
const gitStatus = execSync('git status --porcelain', {
encoding: 'utf-8',
}).trim();
if (gitStatus) {
gitCommitInfo = `${gitHash} (local modifications)`;
}
}
} catch {
// ignore
}
const fileContent = `/**
* @license
* Copyright ${new Date().getFullYear()} Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
// This file is auto-generated by the build script (scripts/build.js)
// Do not edit this file manually.
export const GIT_COMMIT_INFO = '${gitCommitInfo}';
`;
writeFileSync(gitCommitFile, fileContent);

View File

@@ -0,0 +1,89 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
function getPackageVersion() {
const packageJsonPath = path.resolve(process.cwd(), 'package.json');
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
return packageJson.version;
}
function getShortSha() {
return execSync('git rev-parse --short HEAD').toString().trim();
}
export function getNightlyTagName() {
const version = getPackageVersion();
const now = new Date();
const year = now.getUTCFullYear().toString().slice(-2);
const month = (now.getUTCMonth() + 1).toString().padStart(2, '0');
const day = now.getUTCDate().toString().padStart(2, '0');
const date = `${year}${month}${day}`;
const sha = getShortSha();
return `v${version}-nightly.${date}.${sha}`;
}
export function getReleaseVersion() {
const isNightly = process.env.IS_NIGHTLY === 'true';
const manualVersion = process.env.MANUAL_VERSION;
let releaseTag;
if (isNightly) {
console.error('Calculating next nightly version...');
releaseTag = getNightlyTagName();
} else if (manualVersion) {
console.error(`Using manual version: ${manualVersion}`);
releaseTag = manualVersion;
} else {
throw new Error(
'Error: No version specified and this is not a nightly release.',
);
}
if (!releaseTag) {
throw new Error('Error: Version could not be determined.');
}
if (!releaseTag.startsWith('v')) {
console.error("Version is missing 'v' prefix. Prepending it.");
releaseTag = `v${releaseTag}`;
}
if (releaseTag.includes('+')) {
throw new Error(
'Error: Versions with build metadata (+) are not supported for releases. Please use a pre-release version (e.g., v1.2.3-alpha.4) instead.',
);
}
if (!releaseTag.match(/^v[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$/)) {
throw new Error(
'Error: Version must be in the format vX.Y.Z or vX.Y.Z-prerelease',
);
}
const releaseVersion = releaseTag.substring(1);
let npmTag = 'latest';
if (releaseVersion.includes('-')) {
npmTag = releaseVersion.split('-')[1].split('.')[0];
}
return { releaseTag, releaseVersion, npmTag };
}
if (process.argv[1] === new URL(import.meta.url).pathname) {
try {
const versions = getReleaseVersion();
console.log(JSON.stringify(versions));
} catch (error) {
console.error(error.message);
process.exit(1);
}
}

219
scripts/local_telemetry.js Executable file
View File

@@ -0,0 +1,219 @@
#!/usr/bin/env node
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import path from 'path';
import fs from 'fs';
import { spawn, execSync } from 'child_process';
import { fileURLToPath } from 'url';
import {
BIN_DIR,
OTEL_DIR,
ensureBinary,
fileExists,
manageTelemetrySettings,
registerCleanup,
waitForPort,
} from './telemetry_utils.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const OTEL_CONFIG_FILE = path.join(OTEL_DIR, 'collector-local.yaml');
const OTEL_LOG_FILE = path.join(OTEL_DIR, 'collector.log');
const JAEGER_LOG_FILE = path.join(OTEL_DIR, 'jaeger.log');
const JAEGER_PORT = 16686;
// This configuration is for the primary otelcol-contrib instance.
// It receives from the CLI on 4317, exports traces to Jaeger on 14317,
// and sends metrics/logs to the debug log.
const OTEL_CONFIG_CONTENT = `
receivers:
otlp:
protocols:
grpc:
endpoint: "localhost:4317"
processors:
batch:
timeout: 1s
exporters:
otlp:
endpoint: "localhost:14317"
tls:
insecure: true
debug:
verbosity: detailed
service:
telemetry:
logs:
level: "debug"
metrics:
level: "none"
pipelines:
traces:
receivers: [otlp]
processors: [batch]
exporters: [otlp]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [debug]
logs:
receivers: [otlp]
processors: [batch]
exporters: [debug]
`;
async function main() {
// 1. Ensure binaries are available, downloading if necessary.
// Binaries are stored in the project's .qwen/otel/bin directory
// to avoid modifying the user's system.
if (!fileExists(BIN_DIR)) fs.mkdirSync(BIN_DIR, { recursive: true });
const otelcolPath = await ensureBinary(
'otelcol-contrib',
'open-telemetry/opentelemetry-collector-releases',
(version, platform, arch, ext) =>
`otelcol-contrib_${version}_${platform}_${arch}.${ext}`,
'otelcol-contrib',
false, // isJaeger = false
).catch((e) => {
console.error(`<EFBFBD><EFBFBD><EFBFBD> Error getting otelcol-contrib: ${e.message}`);
return null;
});
if (!otelcolPath) process.exit(1);
const jaegerPath = await ensureBinary(
'jaeger',
'jaegertracing/jaeger',
(version, platform, arch, ext) =>
`jaeger-${version}-${platform}-${arch}.${ext}`,
'jaeger',
true, // isJaeger = true
).catch((e) => {
console.error(`🛑 Error getting jaeger: ${e.message}`);
return null;
});
if (!jaegerPath) process.exit(1);
// 2. Kill any existing processes to ensure a clean start.
console.log('🧹 Cleaning up old processes and logs...');
try {
execSync('pkill -f "otelcol-contrib"');
console.log('✅ Stopped existing otelcol-contrib process.');
} catch (_e) {} // eslint-disable-line no-empty
try {
execSync('pkill -f "jaeger"');
console.log('✅ Stopped existing jaeger process.');
} catch (_e) {} // eslint-disable-line no-empty
try {
if (fileExists(OTEL_LOG_FILE)) fs.unlinkSync(OTEL_LOG_FILE);
console.log('✅ Deleted old collector log.');
} catch (e) {
if (e.code !== 'ENOENT') console.error(e);
}
try {
if (fileExists(JAEGER_LOG_FILE)) fs.unlinkSync(JAEGER_LOG_FILE);
console.log('✅ Deleted old jaeger log.');
} catch (e) {
if (e.code !== 'ENOENT') console.error(e);
}
let jaegerProcess, collectorProcess;
let jaegerLogFd, collectorLogFd;
const originalSandboxSetting = manageTelemetrySettings(
true,
'http://localhost:4317',
'local',
);
registerCleanup(
() => [jaegerProcess, collectorProcess],
() => [jaegerLogFd, collectorLogFd],
originalSandboxSetting,
);
if (!fileExists(OTEL_DIR)) fs.mkdirSync(OTEL_DIR, { recursive: true });
fs.writeFileSync(OTEL_CONFIG_FILE, OTEL_CONFIG_CONTENT);
console.log('📄 Wrote OTEL collector config.');
// Start Jaeger
console.log(`🚀 Starting Jaeger service... Logs: ${JAEGER_LOG_FILE}`);
jaegerLogFd = fs.openSync(JAEGER_LOG_FILE, 'a');
jaegerProcess = spawn(
jaegerPath,
['--set=receivers.otlp.protocols.grpc.endpoint=localhost:14317'],
{ stdio: ['ignore', jaegerLogFd, jaegerLogFd] },
);
console.log(`⏳ Waiting for Jaeger to start (PID: ${jaegerProcess.pid})...`);
try {
await waitForPort(JAEGER_PORT);
console.log(`✅ Jaeger started successfully.`);
} catch (_) {
console.error(`🛑 Error: Jaeger failed to start on port ${JAEGER_PORT}.`);
if (jaegerProcess && jaegerProcess.pid) {
process.kill(jaegerProcess.pid, 'SIGKILL');
}
if (fileExists(JAEGER_LOG_FILE)) {
console.error('📄 Jaeger Log Output:');
console.error(fs.readFileSync(JAEGER_LOG_FILE, 'utf-8'));
}
process.exit(1);
}
// Start the primary OTEL collector
console.log(`🚀 Starting OTEL collector... Logs: ${OTEL_LOG_FILE}`);
collectorLogFd = fs.openSync(OTEL_LOG_FILE, 'a');
collectorProcess = spawn(otelcolPath, ['--config', OTEL_CONFIG_FILE], {
stdio: ['ignore', collectorLogFd, collectorLogFd],
});
console.log(
`⏳ Waiting for OTEL collector to start (PID: ${collectorProcess.pid})...`,
);
try {
await waitForPort(4317);
console.log(`✅ OTEL collector started successfully.`);
} catch (_) {
console.error(`🛑 Error: OTEL collector failed to start on port 4317.`);
if (collectorProcess && collectorProcess.pid) {
process.kill(collectorProcess.pid, 'SIGKILL');
}
if (fileExists(OTEL_LOG_FILE)) {
console.error('📄 OTEL Collector Log Output:');
console.error(fs.readFileSync(OTEL_LOG_FILE, 'utf-8'));
}
process.exit(1);
}
[jaegerProcess, collectorProcess].forEach((proc) => {
if (proc) {
proc.on('error', (err) => {
console.error(`${proc.spawnargs[0]} process error:`, err);
process.exit(1);
});
}
});
console.log(`
✨ Local telemetry environment is running.`);
console.log(
`
🔎 View traces in the Jaeger UI: http://localhost:${JAEGER_PORT}`,
);
console.log(`📊 View metrics in the logs and metrics: ${OTEL_LOG_FILE}`);
console.log(
`
📄 Tail logs and metrics in another terminal: tail -f ${OTEL_LOG_FILE}`,
);
console.log(`
Press Ctrl+C to exit.`);
}
main();

21
scripts/pack.sh Normal file
View File

@@ -0,0 +1,21 @@
#!/bin/bash
# Build the packages first
npm run build:packages
# Pack CLI package
cd packages/cli && npm pack && cd ../../
# Pack Core package
cd packages/core && npm pack && cd ../../
# Move the tgz files to root directory using absolute paths
cp packages/cli/*.tgz ./
cp packages/core/*.tgz ./
# Remove the original files from package directories
rm packages/cli/*.tgz
rm packages/core/*.tgz
echo "Packages created successfully:"
ls -la *.tgz

View File

@@ -0,0 +1,51 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
// ES module equivalent of __dirname
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const rootDir = path.resolve(__dirname, '..');
function copyFiles(packageName, filesToCopy) {
const packageDir = path.resolve(rootDir, 'packages', packageName);
if (!fs.existsSync(packageDir)) {
console.error(`Error: Package directory not found at ${packageDir}`);
process.exit(1);
}
console.log(`Preparing package: ${packageName}`);
for (const [source, dest] of Object.entries(filesToCopy)) {
const sourcePath = path.resolve(rootDir, source);
const destPath = path.resolve(packageDir, dest);
try {
fs.copyFileSync(sourcePath, destPath);
console.log(`Copied ${source} to packages/${packageName}/`);
} catch (err) {
console.error(`Error copying ${source}:`, err);
process.exit(1);
}
}
}
// Prepare 'core' package
copyFiles('core', {
'README.md': 'README.md',
LICENSE: 'LICENSE',
'.npmrc': '.npmrc',
});
// Prepare 'cli' package
copyFiles('cli', {
'README.md': 'README.md',
LICENSE: 'LICENSE',
});
console.log('Successfully prepared all packages.');

126
scripts/sandbox_command.js Normal file
View File

@@ -0,0 +1,126 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { execSync } from 'child_process';
import { existsSync, readFileSync } from 'fs';
import { join, dirname } from 'path';
import stripJsonComments from 'strip-json-comments';
import os from 'os';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import dotenv from 'dotenv';
const argv = yargs(hideBin(process.argv)).option('q', {
alias: 'quiet',
type: 'boolean',
default: false,
}).argv;
let geminiSandbox = process.env.GEMINI_SANDBOX;
if (!geminiSandbox) {
const userSettingsFile = join(os.homedir(), '.qwen', 'settings.json');
if (existsSync(userSettingsFile)) {
const settings = JSON.parse(
stripJsonComments(readFileSync(userSettingsFile, 'utf-8')),
);
if (settings.sandbox) {
geminiSandbox = settings.sandbox;
}
}
}
if (!geminiSandbox) {
let currentDir = process.cwd();
while (true) {
const geminiEnv = join(currentDir, '.qwen', '.env');
const regularEnv = join(currentDir, '.env');
if (existsSync(geminiEnv)) {
dotenv.config({ path: geminiEnv, quiet: true });
break;
} else if (existsSync(regularEnv)) {
dotenv.config({ path: regularEnv, quiet: true });
break;
}
const parentDir = dirname(currentDir);
if (parentDir === currentDir) {
break;
}
currentDir = parentDir;
}
geminiSandbox = process.env.GEMINI_SANDBOX;
}
geminiSandbox = (geminiSandbox || '').toLowerCase();
const commandExists = (cmd) => {
const checkCommand = os.platform() === 'win32' ? 'where' : 'command -v';
try {
execSync(`${checkCommand} ${cmd}`, { stdio: 'ignore' });
return true;
} catch {
if (os.platform() === 'win32') {
try {
execSync(`${checkCommand} ${cmd}.exe`, { stdio: 'ignore' });
return true;
} catch {
return false;
}
}
return false;
}
};
let command = '';
if (['1', 'true'].includes(geminiSandbox)) {
if (commandExists('docker')) {
command = 'docker';
} else if (commandExists('podman')) {
command = 'podman';
} else {
console.error(
'ERROR: install docker or podman or specify command in GEMINI_SANDBOX',
);
process.exit(1);
}
} else if (geminiSandbox && !['0', 'false'].includes(geminiSandbox)) {
if (commandExists(geminiSandbox)) {
command = geminiSandbox;
} else {
console.error(
`ERROR: missing sandbox command '${geminiSandbox}' (from GEMINI_SANDBOX)`,
);
process.exit(1);
}
} else {
if (os.platform() === 'darwin' && process.env.SEATBELT_PROFILE !== 'none') {
if (commandExists('sandbox-exec')) {
command = 'sandbox-exec';
} else {
process.exit(1);
}
} else {
process.exit(1);
}
}
if (!argv.q) {
console.log(command);
}
process.exit(0);

76
scripts/start.js Normal file
View File

@@ -0,0 +1,76 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law_or_agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { spawn, execSync } from 'child_process';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
import { readFileSync } from 'fs';
const __dirname = dirname(fileURLToPath(import.meta.url));
const root = join(__dirname, '..');
const pkg = JSON.parse(readFileSync(join(root, 'package.json'), 'utf-8'));
// check build status, write warnings to file for app to display if needed
execSync('node ./scripts/check-build-status.js', {
stdio: 'inherit',
cwd: root,
});
const nodeArgs = [];
let sandboxCommand = undefined;
try {
sandboxCommand = execSync('node scripts/sandbox_command.js', {
cwd: root,
})
.toString()
.trim();
} catch {
// ignore
}
// if debugging is enabled and sandboxing is disabled, use --inspect-brk flag
// note with sandboxing this flag is passed to the binary inside the sandbox
// inside sandbox SANDBOX should be set and sandbox_command.js should fail
if (process.env.DEBUG && !sandboxCommand) {
if (process.env.SANDBOX) {
const port = process.env.DEBUG_PORT || '9229';
nodeArgs.push(`--inspect-brk=0.0.0.0:${port}`);
} else {
nodeArgs.push('--inspect-brk');
}
}
nodeArgs.push('./packages/cli');
nodeArgs.push(...process.argv.slice(2));
const env = {
...process.env,
CLI_VERSION: pkg.version,
DEV: 'true',
};
if (process.env.DEBUG) {
// If this is not set, the debugger will pause on the outer process rather
// than the relaunched process making it harder to debug.
env.GEMINI_CLI_NO_RELAUNCH = 'true';
}
const child = spawn('node', nodeArgs, { stdio: 'inherit', env });
child.on('close', (code) => {
process.exit(code);
});

85
scripts/telemetry.js Executable file
View File

@@ -0,0 +1,85 @@
#!/usr/bin/env node
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { execSync } from 'child_process';
import { join } from 'path';
import { existsSync, readFileSync } from 'fs';
const projectRoot = join(import.meta.dirname, '..');
const SETTINGS_DIRECTORY_NAME = '.qwen';
const USER_SETTINGS_DIR = join(
process.env.HOME || process.env.USERPROFILE || process.env.HOMEPATH || '',
SETTINGS_DIRECTORY_NAME,
);
const USER_SETTINGS_PATH = join(USER_SETTINGS_DIR, 'settings.json');
const WORKSPACE_SETTINGS_PATH = join(
projectRoot,
SETTINGS_DIRECTORY_NAME,
'settings.json',
);
let settingsTarget = undefined;
function loadSettingsValue(filePath) {
try {
if (existsSync(filePath)) {
const content = readFileSync(filePath, 'utf-8');
const jsonContent = content.replace(/\/\/[^\n]*/g, '');
const settings = JSON.parse(jsonContent);
return settings.telemetry?.target;
}
} catch (e) {
console.warn(
`⚠️ Warning: Could not parse settings file at ${filePath}: ${e.message}`,
);
}
return undefined;
}
settingsTarget = loadSettingsValue(WORKSPACE_SETTINGS_PATH);
if (!settingsTarget) {
settingsTarget = loadSettingsValue(USER_SETTINGS_PATH);
}
let target = settingsTarget || 'local';
const allowedTargets = ['local', 'gcp'];
const targetArg = process.argv.find((arg) => arg.startsWith('--target='));
if (targetArg) {
const potentialTarget = targetArg.split('=')[1];
if (allowedTargets.includes(potentialTarget)) {
target = potentialTarget;
console.log(`⚙️ Using command-line target: ${target}`);
} else {
console.error(
`🛑 Error: Invalid target '${potentialTarget}'. Allowed targets are: ${allowedTargets.join(', ')}.`,
);
process.exit(1);
}
} else if (settingsTarget) {
console.log(
`⚙️ Using telemetry target from settings.json: ${settingsTarget}`,
);
}
const scriptPath = join(
projectRoot,
'scripts',
target === 'gcp' ? 'telemetry_gcp.js' : 'local_telemetry.js',
);
try {
console.log(`🚀 Running telemetry script for target: ${target}.`);
execSync(`node ${scriptPath}`, { stdio: 'inherit', cwd: projectRoot });
} catch (error) {
console.error(`🛑 Failed to run telemetry script for target: ${target}`);
console.error(error);
process.exit(1);
}

188
scripts/telemetry_gcp.js Executable file
View File

@@ -0,0 +1,188 @@
#!/usr/bin/env node
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import path from 'path';
import fs from 'fs';
import { spawn, execSync } from 'child_process';
import {
OTEL_DIR,
BIN_DIR,
fileExists,
waitForPort,
ensureBinary,
manageTelemetrySettings,
registerCleanup,
} from './telemetry_utils.js';
const OTEL_CONFIG_FILE = path.join(OTEL_DIR, 'collector-gcp.yaml');
const OTEL_LOG_FILE = path.join(OTEL_DIR, 'collector-gcp.log');
const getOtelConfigContent = (projectId) => `
receivers:
otlp:
protocols:
grpc:
endpoint: "localhost:4317"
processors:
batch:
timeout: 1s
exporters:
googlecloud:
project: "${projectId}"
metric:
prefix: "custom.googleapis.com/gemini_cli"
log:
default_log_name: "gemini_cli"
debug:
verbosity: detailed
service:
telemetry:
logs:
level: "debug"
metrics:
level: "none"
pipelines:
traces:
receivers: [otlp]
processors: [batch]
exporters: [googlecloud]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [googlecloud, debug]
logs:
receivers: [otlp]
processors: [batch]
exporters: [googlecloud, debug]
`;
async function main() {
console.log('✨ Starting Local Telemetry Exporter for Google Cloud ✨');
let collectorProcess;
let collectorLogFd;
const originalSandboxSetting = manageTelemetrySettings(
true,
'http://localhost:4317',
'gcp',
);
registerCleanup(
() => [collectorProcess].filter((p) => p), // Function to get processes
() => [collectorLogFd].filter((fd) => fd), // Function to get FDs
originalSandboxSetting,
);
const projectId = process.env.OTLP_GOOGLE_CLOUD_PROJECT;
if (!projectId) {
console.error(
'🛑 Error: OTLP_GOOGLE_CLOUD_PROJECT environment variable is not exported.',
);
console.log(
' Please set it to your Google Cloud Project ID and try again.',
);
console.log(' `export OTLP_GOOGLE_CLOUD_PROJECT=your-project-id`');
process.exit(1);
}
console.log(`✅ Using OTLP Google Cloud Project ID: ${projectId}`);
console.log('\n🔑 Please ensure you are authenticated with Google Cloud:');
console.log(
' - Run `gcloud auth application-default login` OR ensure `GOOGLE_APPLICATION_CREDENTIALS` environment variable points to a valid service account key.',
);
console.log(
' - The account needs "Cloud Trace Agent", "Monitoring Metric Writer", and "Logs Writer" roles.',
);
if (!fileExists(BIN_DIR)) fs.mkdirSync(BIN_DIR, { recursive: true });
const otelcolPath = await ensureBinary(
'otelcol-contrib',
'open-telemetry/opentelemetry-collector-releases',
(version, platform, arch, ext) =>
`otelcol-contrib_${version}_${platform}_${arch}.${ext}`,
'otelcol-contrib',
false, // isJaeger = false
).catch((e) => {
console.error(`🛑 Error getting otelcol-contrib: ${e.message}`);
return null;
});
if (!otelcolPath) process.exit(1);
console.log('🧹 Cleaning up old processes and logs...');
try {
execSync('pkill -f "otelcol-contrib"');
console.log('✅ Stopped existing otelcol-contrib process.');
} catch (_e) {
/* no-op */
}
try {
fs.unlinkSync(OTEL_LOG_FILE);
console.log('✅ Deleted old GCP collector log.');
} catch (e) {
if (e.code !== 'ENOENT') console.error(e);
}
if (!fileExists(OTEL_DIR)) fs.mkdirSync(OTEL_DIR, { recursive: true });
fs.writeFileSync(OTEL_CONFIG_FILE, getOtelConfigContent(projectId));
console.log(`📄 Wrote OTEL collector config to ${OTEL_CONFIG_FILE}`);
console.log(`🚀 Starting OTEL collector for GCP... Logs: ${OTEL_LOG_FILE}`);
collectorLogFd = fs.openSync(OTEL_LOG_FILE, 'a');
collectorProcess = spawn(otelcolPath, ['--config', OTEL_CONFIG_FILE], {
stdio: ['ignore', collectorLogFd, collectorLogFd],
env: { ...process.env },
});
console.log(
`⏳ Waiting for OTEL collector to start (PID: ${collectorProcess.pid})...`,
);
try {
await waitForPort(4317);
console.log(`✅ OTEL collector started successfully on port 4317.`);
} catch (err) {
console.error(`🛑 Error: OTEL collector failed to start on port 4317.`);
console.error(err.message);
if (collectorProcess && collectorProcess.pid) {
process.kill(collectorProcess.pid, 'SIGKILL');
}
if (fileExists(OTEL_LOG_FILE)) {
console.error('📄 OTEL Collector Log Output:');
console.error(fs.readFileSync(OTEL_LOG_FILE, 'utf-8'));
}
process.exit(1);
}
collectorProcess.on('error', (err) => {
console.error(`${collectorProcess.spawnargs[0]} process error:`, err);
process.exit(1);
});
console.log(`\n✨ Local OTEL collector for GCP is running.`);
console.log(
'\n🚀 To send telemetry, run the Gemini CLI in a separate terminal window.',
);
console.log(`\n📄 Collector logs are being written to: ${OTEL_LOG_FILE}`);
console.log(
`📄 Tail collector logs in another terminal: tail -f ${OTEL_LOG_FILE}`,
);
console.log(`\n📊 View your telemetry data in Google Cloud Console:`);
console.log(
` - Logs: https://console.cloud.google.com/logs/query;query=logName%3D%22projects%2F${projectId}%2Flogs%2Fgemini_cli%22?project=${projectId}`,
);
console.log(
` - Metrics: https://console.cloud.google.com/monitoring/metrics-explorer?project=${projectId}`,
);
console.log(
` - Traces: https://console.cloud.google.com/traces/list?project=${projectId}`,
);
console.log(`\nPress Ctrl+C to exit.`);
}
main();

397
scripts/telemetry_utils.js Normal file
View File

@@ -0,0 +1,397 @@
#!/usr/bin/env node
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import path from 'path';
import fs from 'fs';
import net from 'net';
import os from 'os';
import { execSync } from 'child_process';
import { fileURLToPath } from 'url';
import crypto from 'node:crypto';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const projectRoot = path.resolve(__dirname, '..');
const projectHash = crypto
.createHash('sha256')
.update(projectRoot)
.digest('hex');
// User-level .gemini directory in home
const USER_GEMINI_DIR = path.join(os.homedir(), '.qwen');
// Project-level .gemini directory in the workspace
const WORKSPACE_GEMINI_DIR = path.join(projectRoot, '.qwen');
// Telemetry artifacts are stored in a hashed directory under the user's ~/.qwen/tmp
export const OTEL_DIR = path.join(USER_GEMINI_DIR, 'tmp', projectHash, 'otel');
export const BIN_DIR = path.join(OTEL_DIR, 'bin');
// Workspace settings remain in the project's .gemini directory
export const WORKSPACE_SETTINGS_FILE = path.join(
WORKSPACE_GEMINI_DIR,
'settings.json',
);
export function getJson(url) {
const tmpFile = path.join(
os.tmpdir(),
`gemini-cli-releases-${Date.now()}.json`,
);
try {
execSync(
`curl -sL -H "User-Agent: gemini-cli-dev-script" -o "${tmpFile}" "${url}"`,
{ stdio: 'pipe' },
);
const content = fs.readFileSync(tmpFile, 'utf-8');
return JSON.parse(content);
} catch (e) {
console.error(`Failed to fetch or parse JSON from ${url}`);
throw e;
} finally {
if (fs.existsSync(tmpFile)) {
fs.unlinkSync(tmpFile);
}
}
}
export function downloadFile(url, dest) {
try {
execSync(`curl -fL -sS -o "${dest}" "${url}"`, {
stdio: 'pipe',
});
return dest;
} catch (e) {
console.error(`Failed to download file from ${url}`);
throw e;
}
}
export function findFile(startPath, filter) {
if (!fs.existsSync(startPath)) {
return null;
}
const files = fs.readdirSync(startPath);
for (const file of files) {
const filename = path.join(startPath, file);
const stat = fs.lstatSync(filename);
if (stat.isDirectory()) {
const result = findFile(filename, filter);
if (result) return result;
} else if (filter(file)) {
return filename;
}
}
return null;
}
export function fileExists(filePath) {
return fs.existsSync(filePath);
}
export function readJsonFile(filePath) {
if (!fileExists(filePath)) {
return {};
}
const content = fs.readFileSync(filePath, 'utf-8');
try {
return JSON.parse(content);
} catch (e) {
console.error(`Error parsing JSON from ${filePath}: ${e.message}`);
return {};
}
}
export function writeJsonFile(filePath, data) {
fs.writeFileSync(filePath, JSON.stringify(data, null, 2));
}
export function waitForPort(port, timeout = 10000) {
return new Promise((resolve, reject) => {
const startTime = Date.now();
const tryConnect = () => {
const socket = new net.Socket();
socket.once('connect', () => {
socket.end();
resolve();
});
socket.once('error', (_) => {
if (Date.now() - startTime > timeout) {
reject(new Error(`Timeout waiting for port ${port} to open.`));
} else {
setTimeout(tryConnect, 500);
}
});
socket.connect(port, 'localhost');
};
tryConnect();
});
}
export async function ensureBinary(
executableName,
repo,
assetNameCallback,
binaryNameInArchive,
isJaeger = false,
) {
const executablePath = path.join(BIN_DIR, executableName);
if (fileExists(executablePath)) {
console.log(`${executableName} already exists at ${executablePath}`);
return executablePath;
}
console.log(`🔍 ${executableName} not found. Downloading from ${repo}...`);
const platform = process.platform === 'win32' ? 'windows' : process.platform;
const arch = process.arch === 'x64' ? 'amd64' : process.arch;
const ext = platform === 'windows' ? 'zip' : 'tar.gz';
if (isJaeger && platform === 'windows' && arch === 'arm64') {
console.warn(
`⚠️ Jaeger does not have a release for Windows on ARM64. Skipping.`,
);
return null;
}
let release;
let asset;
if (isJaeger) {
console.log(`🔍 Finding latest Jaeger v2+ asset...`);
const releases = getJson(`https://api.github.com/repos/${repo}/releases`);
const sortedReleases = releases
.filter((r) => !r.prerelease && r.tag_name.startsWith('v'))
.sort((a, b) => {
const aVersion = a.tag_name.substring(1).split('.').map(Number);
const bVersion = b.tag_name.substring(1).split('.').map(Number);
for (let i = 0; i < Math.max(aVersion.length, bVersion.length); i++) {
if ((aVersion[i] || 0) > (bVersion[i] || 0)) return -1;
if ((aVersion[i] || 0) < (bVersion[i] || 0)) return 1;
}
return 0;
});
for (const r of sortedReleases) {
const expectedSuffix =
platform === 'windows'
? `-${platform}-${arch}.zip`
: `-${platform}-${arch}.tar.gz`;
const foundAsset = r.assets.find(
(a) =>
a.name.startsWith('jaeger-2.') && a.name.endsWith(expectedSuffix),
);
if (foundAsset) {
release = r;
asset = foundAsset;
console.log(
`⬇️ Found ${asset.name} in release ${r.tag_name}, downloading...`,
);
break;
}
}
if (!asset) {
throw new Error(
`Could not find a suitable Jaeger v2 asset for platform ${platform}/${arch}.`,
);
}
} else {
release = getJson(`https://api.github.com/repos/${repo}/releases/latest`);
const version = release.tag_name.startsWith('v')
? release.tag_name.substring(1)
: release.tag_name;
const assetName = assetNameCallback(version, platform, arch, ext);
asset = release.assets.find((a) => a.name === assetName);
if (!asset) {
throw new Error(
`Could not find a suitable asset for ${repo} (version ${version}) on platform ${platform}/${arch}. Searched for: ${assetName}`,
);
}
}
const downloadUrl = asset.browser_download_url;
const tmpDir = fs.mkdtempSync(
path.join(os.tmpdir(), 'gemini-cli-telemetry-'),
);
const archivePath = path.join(tmpDir, asset.name);
try {
console.log(`⬇️ Downloading ${asset.name}...`);
downloadFile(downloadUrl, archivePath);
console.log(`📦 Extracting ${asset.name}...`);
const actualExt = asset.name.endsWith('.zip') ? 'zip' : 'tar.gz';
if (actualExt === 'zip') {
execSync(`unzip -o "${archivePath}" -d "${tmpDir}"`, { stdio: 'pipe' });
} else {
execSync(`tar -xzf "${archivePath}" -C "${tmpDir}"`, { stdio: 'pipe' });
}
const nameToFind = binaryNameInArchive || executableName;
const foundBinaryPath = findFile(tmpDir, (file) => {
if (platform === 'windows') {
return file === `${nameToFind}.exe`;
}
return file === nameToFind;
});
if (!foundBinaryPath) {
throw new Error(
`Could not find binary "${nameToFind}" in extracted archive at ${tmpDir}. Contents: ${fs.readdirSync(tmpDir).join(', ')}`,
);
}
fs.renameSync(foundBinaryPath, executablePath);
if (platform !== 'windows') {
fs.chmodSync(executablePath, '755');
}
console.log(`${executableName} installed at ${executablePath}`);
return executablePath;
} finally {
fs.rmSync(tmpDir, { recursive: true, force: true });
if (fs.existsSync(archivePath)) {
fs.unlinkSync(archivePath);
}
}
}
export function manageTelemetrySettings(
enable,
oTelEndpoint = 'http://localhost:4317',
target = 'local',
originalSandboxSettingToRestore,
) {
const workspaceSettings = readJsonFile(WORKSPACE_SETTINGS_FILE);
const currentSandboxSetting = workspaceSettings.sandbox;
let settingsModified = false;
if (typeof workspaceSettings.telemetry !== 'object') {
workspaceSettings.telemetry = {};
}
if (enable) {
if (workspaceSettings.telemetry.enabled !== true) {
workspaceSettings.telemetry.enabled = true;
settingsModified = true;
console.log('⚙️ Enabled telemetry in workspace settings.');
}
if (workspaceSettings.sandbox !== false) {
workspaceSettings.sandbox = false;
settingsModified = true;
console.log('✅ Disabled sandbox mode for telemetry.');
}
if (workspaceSettings.telemetry.otlpEndpoint !== oTelEndpoint) {
workspaceSettings.telemetry.otlpEndpoint = oTelEndpoint;
settingsModified = true;
console.log(`🔧 Set telemetry OTLP endpoint to ${oTelEndpoint}.`);
}
if (workspaceSettings.telemetry.target !== target) {
workspaceSettings.telemetry.target = target;
settingsModified = true;
console.log(`🎯 Set telemetry target to ${target}.`);
}
} else {
if (workspaceSettings.telemetry.enabled === true) {
delete workspaceSettings.telemetry.enabled;
settingsModified = true;
console.log('⚙️ Disabled telemetry in workspace settings.');
}
if (workspaceSettings.telemetry.otlpEndpoint) {
delete workspaceSettings.telemetry.otlpEndpoint;
settingsModified = true;
console.log('🔧 Cleared telemetry OTLP endpoint.');
}
if (workspaceSettings.telemetry.target) {
delete workspaceSettings.telemetry.target;
settingsModified = true;
console.log('🎯 Cleared telemetry target.');
}
if (Object.keys(workspaceSettings.telemetry).length === 0) {
delete workspaceSettings.telemetry;
}
if (
originalSandboxSettingToRestore !== undefined &&
workspaceSettings.sandbox !== originalSandboxSettingToRestore
) {
workspaceSettings.sandbox = originalSandboxSettingToRestore;
settingsModified = true;
console.log('✅ Restored original sandbox setting.');
}
}
if (settingsModified) {
writeJsonFile(WORKSPACE_SETTINGS_FILE, workspaceSettings);
console.log('✅ Workspace settings updated.');
} else {
console.log(
enable
? '✅ Workspace settings are already configured for telemetry.'
: '✅ Workspace settings already reflect telemetry disabled.',
);
}
return currentSandboxSetting;
}
export function registerCleanup(
getProcesses,
getLogFileDescriptors,
originalSandboxSetting,
) {
let cleanedUp = false;
const cleanup = () => {
if (cleanedUp) return;
cleanedUp = true;
console.log('\n👋 Shutting down...');
manageTelemetrySettings(false, null, originalSandboxSetting);
const processes = getProcesses ? getProcesses() : [];
processes.forEach((proc) => {
if (proc && proc.pid) {
const name = path.basename(proc.spawnfile);
try {
console.log(`🛑 Stopping ${name} (PID: ${proc.pid})...`);
process.kill(proc.pid, 'SIGTERM');
console.log(`${name} stopped.`);
} catch (e) {
if (e.code !== 'ESRCH') {
console.error(`Error stopping ${name}: ${e.message}`);
}
}
}
});
const logFileDescriptors = getLogFileDescriptors
? getLogFileDescriptors()
: [];
logFileDescriptors.forEach((fd) => {
if (fd) {
try {
fs.closeSync(fd);
} catch (_) {
/* no-op */
}
}
});
};
process.on('exit', cleanup);
process.on('SIGINT', () => process.exit(0));
process.on('SIGTERM', () => process.exit(0));
process.on('uncaughtException', (err) => {
console.error('Uncaught Exception:', err);
cleanup();
process.exit(1);
});
}

View File

@@ -0,0 +1,111 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest';
import { getReleaseVersion } from '../get-release-version';
import { execSync } from 'child_process';
import * as fs from 'fs';
vi.mock('child_process', () => ({
execSync: vi.fn(),
}));
vi.mock('fs', async (importOriginal) => {
const mod = await importOriginal();
return {
...mod,
default: {
...mod.default,
readFileSync: vi.fn(),
},
};
});
describe('getReleaseVersion', () => {
const originalEnv = { ...process.env };
beforeEach(() => {
vi.resetModules();
process.env = { ...originalEnv };
vi.useFakeTimers();
});
afterEach(() => {
process.env = originalEnv;
vi.clearAllMocks();
vi.useRealTimers();
});
it('should calculate nightly version when IS_NIGHTLY is true', () => {
process.env.IS_NIGHTLY = 'true';
const knownDate = new Date('2025-07-20T10:00:00.000Z');
vi.setSystemTime(knownDate);
vi.mocked(fs.default.readFileSync).mockReturnValue(
JSON.stringify({ version: '0.1.0' }),
);
vi.mocked(execSync).mockReturnValue('abcdef');
const { releaseTag, releaseVersion, npmTag } = getReleaseVersion();
expect(releaseTag).toBe('v0.1.0-nightly.250720.abcdef');
expect(releaseVersion).toBe('0.1.0-nightly.250720.abcdef');
expect(npmTag).toBe('nightly');
});
it('should use manual version when provided', () => {
process.env.MANUAL_VERSION = '1.2.3';
const { releaseTag, releaseVersion, npmTag } = getReleaseVersion();
expect(releaseTag).toBe('v1.2.3');
expect(releaseVersion).toBe('1.2.3');
expect(npmTag).toBe('latest');
});
it('should prepend v to manual version if missing', () => {
process.env.MANUAL_VERSION = '1.2.3';
const { releaseTag } = getReleaseVersion();
expect(releaseTag).toBe('v1.2.3');
});
it('should handle pre-release versions correctly', () => {
process.env.MANUAL_VERSION = 'v1.2.3-beta.1';
const { releaseTag, releaseVersion, npmTag } = getReleaseVersion();
expect(releaseTag).toBe('v1.2.3-beta.1');
expect(releaseVersion).toBe('1.2.3-beta.1');
expect(npmTag).toBe('beta');
});
it('should throw an error for invalid version format', () => {
process.env.MANUAL_VERSION = '1.2';
expect(() => getReleaseVersion()).toThrow(
'Error: Version must be in the format vX.Y.Z or vX.Y.Z-prerelease',
);
});
it('should throw an error if no version is provided for non-nightly release', () => {
expect(() => getReleaseVersion()).toThrow(
'Error: No version specified and this is not a nightly release.',
);
});
it('should throw an error for versions with build metadata', () => {
process.env.MANUAL_VERSION = 'v1.2.3+build456';
expect(() => getReleaseVersion()).toThrow(
'Error: Versions with build metadata (+) are not supported for releases.',
);
});
});
describe('get-release-version script', () => {
it('should print version JSON to stdout when executed directly', () => {
const expectedJson = {
releaseTag: 'v0.1.0-nightly.20250705',
releaseVersion: '0.1.0-nightly.20250705',
npmTag: 'nightly',
};
execSync.mockReturnValue(JSON.stringify(expectedJson));
const result = execSync('node scripts/get-release-version.js').toString();
expect(JSON.parse(result)).toEqual(expectedJson);
});
});

View File

@@ -0,0 +1,12 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { vi } from 'vitest';
vi.mock('fs', () => ({
...vi.importActual('fs'),
appendFileSync: vi.fn(),
}));

View File

@@ -0,0 +1,20 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
globals: true,
environment: 'node',
include: ['scripts/tests/**/*.test.js'],
setupFiles: ['scripts/tests/test-setup.ts'],
coverage: {
provider: 'v8',
reporter: ['text', 'lcov'],
},
},
});

68
scripts/version.js Normal file
View File

@@ -0,0 +1,68 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { execSync } from 'child_process';
import { readFileSync, writeFileSync } from 'fs';
import { resolve } from 'path';
// A script to handle versioning and ensure all related changes are in a single, atomic commit.
function run(command) {
console.log(`> ${command}`);
execSync(command, { stdio: 'inherit' });
}
function readJson(filePath) {
return JSON.parse(readFileSync(filePath, 'utf-8'));
}
function writeJson(filePath, data) {
writeFileSync(filePath, JSON.stringify(data, null, 2) + '\n');
}
// 1. Get the version type from the command line arguments.
const versionType = process.argv[2];
if (!versionType) {
console.error('Error: No version type specified.');
console.error('Usage: npm run version <patch|minor|major|prerelease>');
process.exit(1);
}
// 2. Bump the version in the root and all workspace package.json files.
run(`npm version ${versionType} --no-git-tag-version --allow-same-version`);
run(
`npm version ${versionType} --workspaces --no-git-tag-version --allow-same-version`,
);
// 3. Get the new version number from the root package.json
const rootPackageJsonPath = resolve(process.cwd(), 'package.json');
const newVersion = readJson(rootPackageJsonPath).version;
// 4. Update the sandboxImageUri in the root package.json
const rootPackageJson = readJson(rootPackageJsonPath);
if (rootPackageJson.config?.sandboxImageUri) {
rootPackageJson.config.sandboxImageUri =
rootPackageJson.config.sandboxImageUri.replace(/:.*$/, `:${newVersion}`);
console.log(`Updated sandboxImageUri in root to use version ${newVersion}`);
writeJson(rootPackageJsonPath, rootPackageJson);
}
// 5. Update the sandboxImageUri in the cli package.json
const cliPackageJsonPath = resolve(process.cwd(), 'packages/cli/package.json');
const cliPackageJson = readJson(cliPackageJsonPath);
if (cliPackageJson.config?.sandboxImageUri) {
cliPackageJson.config.sandboxImageUri =
cliPackageJson.config.sandboxImageUri.replace(/:.*$/, `:${newVersion}`);
console.log(
`Updated sandboxImageUri in cli package to use version ${newVersion}`,
);
writeJson(cliPackageJsonPath, cliPackageJson);
}
// 6. Run `npm install` to update package-lock.json.
run('npm install');
console.log(`Successfully bumped versions to v${newVersion}.`);