diff --git a/packages/project/lib/build/ProjectBuilder.js b/packages/project/lib/build/ProjectBuilder.js index 429e4f4f69f..a5ca90b5045 100644 --- a/packages/project/lib/build/ProjectBuilder.js +++ b/packages/project/lib/build/ProjectBuilder.js @@ -5,6 +5,7 @@ import composeProjectList from "./helpers/composeProjectList.js"; import BuildContext from "./helpers/BuildContext.js"; import prettyHrtime from "pretty-hrtime"; import OutputStyleEnum from "./helpers/ProjectBuilderOutputStyle.js"; +import BuildTimings from "./cache/BuildTimings.js"; /** * @public @@ -357,6 +358,8 @@ class ProjectBuilder { throw err; } finally { await Promise.all(pCacheWrites); + // Log BuildTimings after cache writes complete, so write timings are included + BuildTimings.logSummary(); this._deregisterCleanupSigHooks(cleanupSigHooks); await this._executeCleanupTasks(); this.#buildIsRunning = false; diff --git a/packages/project/lib/build/cache/BuildTimings.js b/packages/project/lib/build/cache/BuildTimings.js new file mode 100644 index 00000000000..e2f942518ff --- /dev/null +++ b/packages/project/lib/build/cache/BuildTimings.js @@ -0,0 +1,223 @@ +import {getLogger} from "@ui5/logger"; + +const log = getLogger("build:cache:BuildTimings"); +const isEnabled = process.env.UI5_BUILD_TIMINGS === "true"; + +// label → {totalMs, count, minMs, maxMs} +const entries = new Map(); + +/** + * Start timing an operation. + * Returns a start token (timestamp) or undefined if timing is disabled. + * + * @param {string} label Operation label (e.g. "readIndexCache") + * @returns {number|undefined} Start timestamp, or undefined when disabled + */ +function start(label) { + if (!isEnabled) { + return undefined; + } + return performance.now(); +} + +/** + * Record the elapsed time for a previously started operation. + * No-op when timing is disabled or if startTime is undefined. + * + * @param {string} label Operation label (must match the label passed to start()) + * @param {number|undefined} startTime The value returned by start() + */ +function end(label, startTime) { + if (startTime === undefined) { + return; + } + const elapsed = performance.now() - startTime; + let entry = entries.get(label); + if (!entry) { + entry = {totalMs: 0, count: 0, minMs: Infinity, maxMs: -Infinity}; + entries.set(label, entry); + } + entry.totalMs += elapsed; + entry.count++; + if (elapsed < entry.minMs) { + entry.minMs = elapsed; + } + if (elapsed > entry.maxMs) { + entry.maxMs = elapsed; + } +} + +/** + * Format a millisecond value for display. + * Values >= 1000ms are shown as seconds (e.g. "1.234s"). + * Values < 1000ms are shown with ms suffix (e.g. "12.34ms"). + * Values < 0.01ms are shown as "<0.01ms". + * + * @param {number} ms Duration in milliseconds + * @returns {string} Formatted duration string + */ +function formatMs(ms) { + if (ms >= 1000) { + return `${(ms / 1000).toFixed(3)}s`; + } + if (ms < 0.01) { + return "<0.01ms"; + } + return `${ms.toFixed(2)}ms`; +} + +/** + * Right-pad a string to the given width. + * + * @param {string} str Input string + * @param {number} width Target width + * @returns {string} Padded string + */ +function pad(str, width) { + return str.padEnd(width); +} + +/** + * Left-pad a string to the given width. + * + * @param {string} str Input string + * @param {number} width Target width + * @returns {string} Padded string + */ +function lpad(str, width) { + return str.padStart(width); +} + +/** + * Log the timing summary to the build log. + * Groups entries into Reads, Writes, and Resource I/O categories. + * Only logs when timing is enabled and there are recorded entries. + */ +function logSummary() { + if (!isEnabled || entries.size === 0) { + return; + } + + const COL_OP = 28; + const COL_NUM = 10; + + const header = + pad("Operation", COL_OP) + + lpad("Calls", COL_NUM) + + lpad("Total", COL_NUM) + + lpad("Avg", COL_NUM) + + lpad("Min", COL_NUM) + + lpad("Max", COL_NUM); + + const separator = "─".repeat(header.length); + + const lines = [ + "", + "Build Cache Timings", + separator, + header, + separator, + ]; + + const readLabels = [ + "readBuildManifest", + "readIndexCache", + "readTaskMetadata", + "readResultMetadata", + "readStageCache", + ]; + const writeLabels = [ + "writeBuildManifest", + "writeIndexCache", + "writeTaskMetadata", + "writeResultMetadata", + "writeStageCache", + ]; + const resourceLabels = [ + "getResourcePathForStage", + "writeStageResource", + ]; + + let grandTotalMs = 0; + let grandTotalCalls = 0; + + function addSection(title, labels) { + let sectionTotal = 0; + let sectionCalls = 0; + const sectionLines = []; + + for (const label of labels) { + const entry = entries.get(label); + if (!entry || entry.count === 0) { + sectionLines.push( + pad(` ${label}`, COL_OP) + + lpad("0", COL_NUM) + + lpad("--", COL_NUM) + + lpad("--", COL_NUM) + + lpad("--", COL_NUM) + + lpad("--", COL_NUM) + ); + continue; + } + const avg = entry.totalMs / entry.count; + sectionLines.push( + pad(` ${label}`, COL_OP) + + lpad(String(entry.count), COL_NUM) + + lpad(formatMs(entry.totalMs), COL_NUM) + + lpad(formatMs(avg), COL_NUM) + + lpad(formatMs(entry.minMs), COL_NUM) + + lpad(formatMs(entry.maxMs), COL_NUM) + ); + sectionTotal += entry.totalMs; + sectionCalls += entry.count; + } + + lines.push(`${title}`); + lines.push(...sectionLines); + + if (sectionCalls > 0) { + lines.push( + pad(` Subtotal`, COL_OP) + + lpad(String(sectionCalls), COL_NUM) + + lpad(formatMs(sectionTotal), COL_NUM) + + lpad("", COL_NUM) + + lpad("", COL_NUM) + + lpad("", COL_NUM) + ); + } + lines.push(""); + + grandTotalMs += sectionTotal; + grandTotalCalls += sectionCalls; + } + + addSection("Reads:", readLabels); + addSection("Writes:", writeLabels); + addSection("Resource I/O:", resourceLabels); + + // Include any extra labels not categorised above + const knownLabels = new Set([...readLabels, ...writeLabels, ...resourceLabels]); + const extraLabels = [...entries.keys()].filter((l) => !knownLabels.has(l)); + if (extraLabels.length) { + addSection("Other:", extraLabels); + } + + lines.push(separator); + lines.push( + pad("TOTAL", COL_OP) + + lpad(String(grandTotalCalls), COL_NUM) + + lpad(formatMs(grandTotalMs), COL_NUM) + ); + lines.push(separator); + + log.info(lines.join("\n")); +} + +/** + * Reset all recorded timings. + */ +function reset() { + entries.clear(); +} + +export default {start, end, logSummary, reset, isEnabled}; diff --git a/packages/project/lib/build/cache/CacheManager.js b/packages/project/lib/build/cache/CacheManager.js index 50f9b7a3275..e451160a65f 100644 --- a/packages/project/lib/build/cache/CacheManager.js +++ b/packages/project/lib/build/cache/CacheManager.js @@ -1,14 +1,10 @@ import path from "node:path"; -import fs from "graceful-fs"; -import {promisify} from "node:util"; -const mkdir = promisify(fs.mkdir); -const readFile = promisify(fs.readFile); -const writeFile = promisify(fs.writeFile); import os from "node:os"; import Configuration from "../../config/Configuration.js"; -import {getPathFromPackageName} from "../../utils/sanitizeFileName.js"; -import {getLogger} from "@ui5/logger"; +import MetadataStore from "./MetadataStore.js"; import ContentAddressableStorage from "./ContentAddressableStorage.js"; +import {getLogger} from "@ui5/logger"; +import BuildTimings from "./BuildTimings.js"; const log = getLogger("build:cache:CacheManager"); @@ -16,7 +12,7 @@ const log = getLogger("build:cache:CacheManager"); const chacheManagerInstances = new Map(); // Cache version for compatibility management -const CACHE_VERSION = "v0_3"; +const CACHE_VERSION = "v0_3_c_native"; /** * Manages persistence for the build cache using file-based storage and a @@ -44,17 +40,13 @@ const CACHE_VERSION = "v0_3"; */ export default class CacheManager { #cas; - #manifestDir; - #stageMetadataDir; - #taskMetadataDir; - #resultMetadataDir; - #indexDir; + #store; /** * Creates a new CacheManager instance * - * Initializes the directory structure for the cache. This constructor is private - - * use CacheManager.create() instead to get a singleton instance. + * Initializes the SQLite metadata store and CAS directory. + * This constructor is private - use CacheManager.create() instead. * * @private * @param {string} cacheDir Base directory for the cache @@ -62,11 +54,7 @@ export default class CacheManager { constructor(cacheDir) { cacheDir = path.join(cacheDir, CACHE_VERSION); this.#cas = new ContentAddressableStorage(path.join(cacheDir, "cas")); - this.#manifestDir = path.join(cacheDir, "buildManifests"); - this.#stageMetadataDir = path.join(cacheDir, "stageMetadata"); - this.#taskMetadataDir = path.join(cacheDir, "taskMetadata"); - this.#resultMetadataDir = path.join(cacheDir, "resultMetadata"); - this.#indexDir = path.join(cacheDir, "index"); + this.#store = new MetadataStore(cacheDir); } /** @@ -103,18 +91,6 @@ export default class CacheManager { return chacheManagerInstances.get(cacheDir); } - /** - * Generates the file path for a build manifest - * - * @param {string} packageName Package/project identifier - * @param {string} buildSignature Build signature hash - * @returns {string} Absolute path to the build manifest file - */ - #getBuildManifestPath(packageName, buildSignature) { - const pkgDir = getPathFromPackageName(packageName); - return path.join(this.#manifestDir, pkgDir, `${buildSignature}.json`); - } - /** * Reads a build manifest from cache * @@ -122,30 +98,24 @@ export default class CacheManager { * @param {string} projectId Project identifier (typically package name) * @param {string} buildSignature Build signature hash * @returns {Promise} Parsed manifest object or null if not found - * @throws {Error} If file read fails for reasons other than file not existing */ async readBuildManifest(projectId, buildSignature) { + const t = BuildTimings.start("readBuildManifest"); try { - const manifest = await readFile(this.#getBuildManifestPath(projectId, buildSignature), "utf8"); - return JSON.parse(manifest); + return this.#store.getBuildManifest(projectId, buildSignature); } catch (err) { - if (err.code === "ENOENT") { - // Cache miss - return null; - } throw new Error(`Failed to read build manifest for ` + `${projectId} / ${buildSignature}: ${err.message}`, { cause: err, }); + } finally { + BuildTimings.end("readBuildManifest", t); } } /** * Writes a build manifest to cache * - * Creates parent directories if they don't exist. Manifests are stored as - * formatted JSON (2-space indentation) for readability. - * * @public * @param {string} projectId Project identifier (typically package name) * @param {string} buildSignature Build signature hash @@ -153,59 +123,40 @@ export default class CacheManager { * @returns {Promise} */ async writeBuildManifest(projectId, buildSignature, manifest) { - const manifestPath = this.#getBuildManifestPath(projectId, buildSignature); - await mkdir(path.dirname(manifestPath), {recursive: true}); - await writeFile(manifestPath, JSON.stringify(manifest, null, 2), "utf8"); - } - - /** - * Generates the file path for resource index metadata - * - * @param {string} packageName Package/project identifier - * @param {string} buildSignature Build signature hash - * @param {string} kind "source" or "result" - * @returns {string} Absolute path to the index metadata file - */ - #getIndexCachePath(packageName, buildSignature, kind) { - const pkgDir = getPathFromPackageName(packageName); - return path.join(this.#indexDir, pkgDir, `${kind}-${buildSignature}.json`); + const t = BuildTimings.start("writeBuildManifest"); + try { + this.#store.putBuildManifest(projectId, buildSignature, manifest); + } finally { + BuildTimings.end("writeBuildManifest", t); + } } /** * Reads resource index cache from storage * - * The index cache contains the resource tree structure and task metadata, - * enabling efficient change detection and cache validation. - * * @public * @param {string} projectId Project identifier (typically package name) * @param {string} buildSignature Build signature hash * @param {string} kind "source" or "result" * @returns {Promise} Parsed index cache object or null if not found - * @throws {Error} If file read fails for reasons other than file not existing */ async readIndexCache(projectId, buildSignature, kind) { + const t = BuildTimings.start("readIndexCache"); try { - const metadata = await readFile(this.#getIndexCachePath(projectId, buildSignature, kind), "utf8"); - return JSON.parse(metadata); + return this.#store.getIndexCache(projectId, buildSignature, kind); } catch (err) { - if (err.code === "ENOENT") { - // Cache miss - return null; - } throw new Error(`Failed to read resource index cache for ` + `${projectId} / ${buildSignature}: ${err.message}`, { cause: err, }); + } finally { + BuildTimings.end("readIndexCache", t); } } /** * Writes resource index cache to storage * - * Persists the resource index and associated task metadata for later retrieval. - * Creates parent directories if needed. - * * @public * @param {string} projectId Project identifier (typically package name) * @param {string} buildSignature Build signature hash @@ -214,64 +165,41 @@ export default class CacheManager { * @returns {Promise} */ async writeIndexCache(projectId, buildSignature, kind, index) { - const indexPath = this.#getIndexCachePath(projectId, buildSignature, kind); - await mkdir(path.dirname(indexPath), {recursive: true}); - await writeFile(indexPath, JSON.stringify(index, null, 2), "utf8"); - } - - /** - * Generates the file path for stage metadata - * - * @param {string} packageName Package/project identifier - * @param {string} buildSignature Build signature hash - * @param {string} stageId Stage identifier (e.g., "result" or "task/taskName") - * @param {string} stageSignature Stage signature hash (based on input resources) - * @returns {string} Absolute path to the stage metadata file - */ - #getStageMetadataPath(packageName, buildSignature, stageId, stageSignature) { - const pkgDir = getPathFromPackageName(packageName); - stageId = stageId.replace("/", "_"); - return path.join(this.#stageMetadataDir, pkgDir, buildSignature, stageId, `${stageSignature}.json`); + const t = BuildTimings.start("writeIndexCache"); + try { + this.#store.putIndexCache(projectId, buildSignature, kind, index); + } finally { + BuildTimings.end("writeIndexCache", t); + } } /** * Reads stage metadata from cache * - * Stage metadata contains information about resources produced by a build stage, - * including resource paths and their metadata. - * * @public * @param {string} projectId Project identifier (typically package name) * @param {string} buildSignature Build signature hash * @param {string} stageId Stage identifier (e.g., "result" or "task/taskName") * @param {string} stageSignature Stage signature hash (based on input resources) * @returns {Promise} Parsed stage metadata or null if not found - * @throws {Error} If file read fails for reasons other than file not existing */ async readStageCache(projectId, buildSignature, stageId, stageSignature) { + const t = BuildTimings.start("readStageCache"); try { - const metadata = await readFile( - this.#getStageMetadataPath(projectId, buildSignature, stageId, stageSignature - ), "utf8"); - return JSON.parse(metadata); + return this.#store.getStageMetadata(projectId, buildSignature, stageId, stageSignature); } catch (err) { - if (err.code === "ENOENT") { - // Cache miss - return null; - } throw new Error(`Failed to read stage metadata from cache for ` + `${projectId} / ${buildSignature} / ${stageId} / ${stageSignature}: ${err.message}`, { cause: err, }); + } finally { + BuildTimings.end("readStageCache", t); } } /** * Writes stage metadata to cache * - * Persists metadata about resources produced by a build stage. - * Creates parent directories if needed. - * * @public * @param {string} projectId Project identifier (typically package name) * @param {string} buildSignature Build signature hash @@ -281,63 +209,41 @@ export default class CacheManager { * @returns {Promise} */ async writeStageCache(projectId, buildSignature, stageId, stageSignature, metadata) { - const metadataPath = this.#getStageMetadataPath( - projectId, buildSignature, stageId, stageSignature); - await mkdir(path.dirname(metadataPath), {recursive: true}); - await writeFile(metadataPath, JSON.stringify(metadata, null, 2), "utf8"); - } - - /** - * Generates the file path for task metadata - * - * @param {string} packageName Package/project identifier - * @param {string} buildSignature Build signature hash - * @param {string} taskName Task name - * @param {string} type "project" or "dependency" - * @returns {string} Absolute path to the task metadata file - */ - #getTaskMetadataPath(packageName, buildSignature, taskName, type) { - const pkgDir = getPathFromPackageName(packageName); - return path.join(this.#taskMetadataDir, pkgDir, buildSignature, taskName, `${type}.json`); + const t = BuildTimings.start("writeStageCache"); + try { + this.#store.putStageMetadata(projectId, buildSignature, stageId, stageSignature, metadata); + } finally { + BuildTimings.end("writeStageCache", t); + } } /** * Reads task metadata from cache * - * Task metadata contains resource request graphs and indices for tracking - * which resources a task accessed during execution. - * * @public * @param {string} projectId Project identifier (typically package name) * @param {string} buildSignature Build signature hash * @param {string} taskName Task name * @param {string} type "project" or "dependency" * @returns {Promise} Parsed task metadata or null if not found - * @throws {Error} If file read fails for reasons other than file not existing */ async readTaskMetadata(projectId, buildSignature, taskName, type) { + const t = BuildTimings.start("readTaskMetadata"); try { - const metadata = await readFile( - this.#getTaskMetadataPath(projectId, buildSignature, taskName, type), "utf8"); - return JSON.parse(metadata); + return this.#store.getTaskMetadata(projectId, buildSignature, taskName, type); } catch (err) { - if (err.code === "ENOENT") { - // Cache miss - return null; - } throw new Error(`Failed to read task metadata from cache for ` + `${projectId} / ${buildSignature} / ${taskName} / ${type}: ${err.message}`, { cause: err, }); + } finally { + BuildTimings.end("readTaskMetadata", t); } } /** * Writes task metadata to cache * - * Persists task-specific metadata including resource request graphs and indices. - * Creates parent directories if needed. - * * @public * @param {string} projectId Project identifier (typically package name) * @param {string} buildSignature Build signature hash @@ -347,61 +253,40 @@ export default class CacheManager { * @returns {Promise} */ async writeTaskMetadata(projectId, buildSignature, taskName, type, metadata) { - const metadataPath = this.#getTaskMetadataPath(projectId, buildSignature, taskName, type); - await mkdir(path.dirname(metadataPath), {recursive: true}); - await writeFile(metadataPath, JSON.stringify(metadata, null, 2), "utf8"); - } - - /** - * Generates the file path for result metadata - * - * @param {string} packageName Package/project identifier - * @param {string} buildSignature Build signature hash - * @param {string} stageSignature Stage signature hash (based on input resources) - * @returns {string} Absolute path to the result metadata file - */ - #getResultMetadataPath(packageName, buildSignature, stageSignature) { - const pkgDir = getPathFromPackageName(packageName); - return path.join(this.#resultMetadataDir, pkgDir, buildSignature, `${stageSignature}.json`); + const t = BuildTimings.start("writeTaskMetadata"); + try { + this.#store.putTaskMetadata(projectId, buildSignature, taskName, type, metadata); + } finally { + BuildTimings.end("writeTaskMetadata", t); + } } /** * Reads result metadata from cache * - * Result metadata contains information about the final build output, including - * references to all stage signatures that comprise the result. - * * @public * @param {string} projectId Project identifier (typically package name) * @param {string} buildSignature Build signature hash * @param {string} stageSignature Stage signature hash (based on input resources) * @returns {Promise} Parsed result metadata or null if not found - * @throws {Error} If file read fails for reasons other than file not existing */ async readResultMetadata(projectId, buildSignature, stageSignature) { + const t = BuildTimings.start("readResultMetadata"); try { - const metadata = await readFile( - this.#getResultMetadataPath(projectId, buildSignature, stageSignature - ), "utf8"); - return JSON.parse(metadata); + return this.#store.getResultMetadata(projectId, buildSignature, stageSignature); } catch (err) { - if (err.code === "ENOENT") { - // Cache miss - return null; - } throw new Error(`Failed to read stage metadata from cache for ` + `${projectId} / ${buildSignature} / ${stageSignature}: ${err.message}`, { cause: err, }); + } finally { + BuildTimings.end("readResultMetadata", t); } } /** * Writes result metadata to cache * - * Persists metadata about the final build result, including stage signature mappings. - * Creates parent directories if needed. - * * @public * @param {string} projectId Project identifier (typically package name) * @param {string} buildSignature Build signature hash @@ -410,10 +295,19 @@ export default class CacheManager { * @returns {Promise} */ async writeResultMetadata(projectId, buildSignature, stageSignature, metadata) { - const metadataPath = this.#getResultMetadataPath( - projectId, buildSignature, stageSignature); - await mkdir(path.dirname(metadataPath), {recursive: true}); - await writeFile(metadataPath, JSON.stringify(metadata, null, 2), "utf8"); + const t = BuildTimings.start("writeResultMetadata"); + try { + this.#store.putResultMetadata(projectId, buildSignature, stageSignature, metadata); + } finally { + BuildTimings.end("writeResultMetadata", t); + } + } + + /** + * Close the metadata store. Should be called when the build is complete. + */ + close() { + this.#store.close(); } /** @@ -440,13 +334,18 @@ export default class CacheManager { * @throws {Error} If integrity is not provided */ async getResourcePathForStage(integrity) { - if (!integrity) { - throw new Error("Integrity hash must be provided to read from cache"); - } - if (await this.#cas.has(integrity)) { - return this.#cas.contentPath(integrity); + const t = BuildTimings.start("getResourcePathForStage"); + try { + if (!integrity) { + throw new Error("Integrity hash must be provided to read from cache"); + } + if (await this.#cas.has(integrity)) { + return this.#cas.contentPath(integrity); + } + return null; + } finally { + BuildTimings.end("getResourcePathForStage", t); } - return null; } /** @@ -460,8 +359,13 @@ export default class CacheManager { * @returns {Promise} */ async writeStageResource(resource) { - const integrity = await resource.getIntegrity(); - const buffer = await resource.getBuffer(); - await this.#cas.put(integrity, buffer); + const t = BuildTimings.start("writeStageResource"); + try { + const integrity = await resource.getIntegrity(); + const buffer = await resource.getBuffer(); + await this.#cas.put(integrity, buffer); + } finally { + BuildTimings.end("writeStageResource", t); + } } } diff --git a/packages/project/lib/build/cache/MetadataStore.js b/packages/project/lib/build/cache/MetadataStore.js new file mode 100644 index 00000000000..40166408740 --- /dev/null +++ b/packages/project/lib/build/cache/MetadataStore.js @@ -0,0 +1,210 @@ +import {DatabaseSync} from "node:sqlite"; +import path from "node:path"; +import fs from "graceful-fs"; + +/** + * SQLite-based metadata store for build cache. + * + * Provides a key-value interface backed by a single SQLite database file. + * Each metadata category maps to a table with composite TEXT keys and a + * TEXT value column (JSON). + * + * WAL mode is enabled for concurrent read performance. + */ +export default class MetadataStore { + #db; + #stmts; + + /** + * Open (or create) the SQLite database at the given directory. + * + * @param {string} cacheDir Absolute path to the versioned cache directory + */ + constructor(cacheDir) { + const dbDir = path.join(cacheDir, "metadata"); + fs.mkdirSync(dbDir, {recursive: true}); + + const dbPath = path.join(dbDir, "cache.db"); + this.#db = new DatabaseSync(dbPath); + + // Performance tuning + this.#db.exec("PRAGMA journal_mode = WAL"); + this.#db.exec("PRAGMA synchronous = NORMAL"); + + this.#createTables(); + this.#stmts = this.#prepareStatements(); + } + + #createTables() { + this.#db.exec(` + CREATE TABLE IF NOT EXISTS build_manifests ( + project_id TEXT NOT NULL, + build_signature TEXT NOT NULL, + value TEXT NOT NULL, + PRIMARY KEY (project_id, build_signature) + ); + CREATE TABLE IF NOT EXISTS index_cache ( + project_id TEXT NOT NULL, + build_signature TEXT NOT NULL, + kind TEXT NOT NULL, + value TEXT NOT NULL, + PRIMARY KEY (project_id, build_signature, kind) + ); + CREATE TABLE IF NOT EXISTS stage_metadata ( + project_id TEXT NOT NULL, + build_signature TEXT NOT NULL, + stage_id TEXT NOT NULL, + stage_signature TEXT NOT NULL, + value TEXT NOT NULL, + PRIMARY KEY (project_id, build_signature, stage_id, stage_signature) + ); + CREATE TABLE IF NOT EXISTS task_metadata ( + project_id TEXT NOT NULL, + build_signature TEXT NOT NULL, + task_name TEXT NOT NULL, + type TEXT NOT NULL, + value TEXT NOT NULL, + PRIMARY KEY (project_id, build_signature, task_name, type) + ); + CREATE TABLE IF NOT EXISTS result_metadata ( + project_id TEXT NOT NULL, + build_signature TEXT NOT NULL, + stage_signature TEXT NOT NULL, + value TEXT NOT NULL, + PRIMARY KEY (project_id, build_signature, stage_signature) + ); + `); + } + + #prepareStatements() { + return { + getBuildManifest: this.#db.prepare( + "SELECT value FROM build_manifests WHERE project_id = ? AND build_signature = ?" + ), + putBuildManifest: this.#db.prepare( + `INSERT OR REPLACE INTO build_manifests (project_id, build_signature, value) + VALUES (?, ?, ?)` + ), + getIndexCache: this.#db.prepare( + "SELECT value FROM index_cache WHERE project_id = ? AND build_signature = ? AND kind = ?" + ), + putIndexCache: this.#db.prepare( + `INSERT OR REPLACE INTO index_cache (project_id, build_signature, kind, value) + VALUES (?, ?, ?, ?)` + ), + getStageMetadata: this.#db.prepare( + `SELECT value FROM stage_metadata + WHERE project_id = ? AND build_signature = ? AND stage_id = ? AND stage_signature = ?` + ), + putStageMetadata: this.#db.prepare( + `INSERT OR REPLACE INTO stage_metadata + (project_id, build_signature, stage_id, stage_signature, value) + VALUES (?, ?, ?, ?, ?)` + ), + getTaskMetadata: this.#db.prepare( + `SELECT value FROM task_metadata + WHERE project_id = ? AND build_signature = ? AND task_name = ? AND type = ?` + ), + putTaskMetadata: this.#db.prepare( + `INSERT OR REPLACE INTO task_metadata + (project_id, build_signature, task_name, type, value) + VALUES (?, ?, ?, ?, ?)` + ), + getResultMetadata: this.#db.prepare( + `SELECT value FROM result_metadata + WHERE project_id = ? AND build_signature = ? AND stage_signature = ?` + ), + putResultMetadata: this.#db.prepare( + `INSERT OR REPLACE INTO result_metadata + (project_id, build_signature, stage_signature, value) + VALUES (?, ?, ?, ?)` + ), + }; + } + + // --- Build Manifests --- + + getBuildManifest(projectId, buildSignature) { + const row = this.#stmts.getBuildManifest.get(projectId, buildSignature); + return row ? JSON.parse(row.value) : null; + } + + putBuildManifest(projectId, buildSignature, data) { + this.#stmts.putBuildManifest.run(projectId, buildSignature, JSON.stringify(data)); + } + + // --- Index Cache --- + + getIndexCache(projectId, buildSignature, kind) { + const row = this.#stmts.getIndexCache.get(projectId, buildSignature, kind); + return row ? JSON.parse(row.value) : null; + } + + putIndexCache(projectId, buildSignature, kind, data) { + this.#stmts.putIndexCache.run(projectId, buildSignature, kind, JSON.stringify(data)); + } + + // --- Stage Metadata --- + + getStageMetadata(projectId, buildSignature, stageId, stageSignature) { + const row = this.#stmts.getStageMetadata.get(projectId, buildSignature, stageId, stageSignature); + return row ? JSON.parse(row.value) : null; + } + + putStageMetadata(projectId, buildSignature, stageId, stageSignature, data) { + this.#stmts.putStageMetadata.run( + projectId, buildSignature, stageId, stageSignature, JSON.stringify(data) + ); + } + + // --- Task Metadata --- + + getTaskMetadata(projectId, buildSignature, taskName, type) { + const row = this.#stmts.getTaskMetadata.get(projectId, buildSignature, taskName, type); + return row ? JSON.parse(row.value) : null; + } + + putTaskMetadata(projectId, buildSignature, taskName, type, data) { + this.#stmts.putTaskMetadata.run( + projectId, buildSignature, taskName, type, JSON.stringify(data) + ); + } + + // --- Result Metadata --- + + getResultMetadata(projectId, buildSignature, stageSignature) { + const row = this.#stmts.getResultMetadata.get(projectId, buildSignature, stageSignature); + return row ? JSON.parse(row.value) : null; + } + + putResultMetadata(projectId, buildSignature, stageSignature, data) { + this.#stmts.putResultMetadata.run( + projectId, buildSignature, stageSignature, JSON.stringify(data) + ); + } + + /** + * Run a function inside a SQLite transaction. + * + * @param {Function} fn Callback executed inside BEGIN/COMMIT + * @returns {*} Return value of fn + */ + transaction(fn) { + this.#db.exec("BEGIN"); + try { + const result = fn(); + this.#db.exec("COMMIT"); + return result; + } catch (err) { + this.#db.exec("ROLLBACK"); + throw err; + } + } + + /** + * Close the database connection. + */ + close() { + this.#db.close(); + } +} diff --git a/packages/project/lib/build/cache/ProjectBuildCache.js b/packages/project/lib/build/cache/ProjectBuildCache.js index a7bc4d884ab..8accb9a3327 100644 --- a/packages/project/lib/build/cache/ProjectBuildCache.js +++ b/packages/project/lib/build/cache/ProjectBuildCache.js @@ -1495,8 +1495,8 @@ export default class ProjectBuildCache { * @param {Object|Array>} resourceMetadata */ #collectKnownIntegrities(resourceMetadata) { - const metadataObjects = Array.isArray(resourceMetadata) - ? resourceMetadata : [resourceMetadata]; + const metadataObjects = Array.isArray(resourceMetadata) ? + resourceMetadata : [resourceMetadata]; for (const metadataObj of metadataObjects) { for (const meta of Object.values(metadataObj)) { if (meta.integrity) { @@ -1532,7 +1532,7 @@ export default class ProjectBuildCache { inode: res.getInode(), lastModified: res.getLastModified(), size: await res.getSize(), - integrity, + integrity: await res.getIntegrity(), }; })); if (log.isLevelEnabled("perf") && casSkipped > 0) { diff --git a/packages/project/test/lib/build/cache/ContentAddressableStorage.js b/packages/project/test/lib/build/cache/ContentAddressableStorage.js index 13867641d2a..1d4a74a35a5 100644 --- a/packages/project/test/lib/build/cache/ContentAddressableStorage.js +++ b/packages/project/test/lib/build/cache/ContentAddressableStorage.js @@ -10,15 +10,15 @@ const readFile = promisify(fs.readFile); const TEST_DIR = path.join(import.meta.dirname, "..", "..", "..", "tmp", "ContentAddressableStorage"); -test.beforeEach(async (t) => { - t.context.basePath = path.join(TEST_DIR, `cas-${Date.now()}-${Math.random().toString(36).slice(2)}`); - t.context.cas = new ContentAddressableStorage(t.context.basePath); -}); - test.after.always(async () => { await rimraf(TEST_DIR); }); +test.beforeEach((t) => { + t.context.basePath = path.join(TEST_DIR, `cas-${Date.now()}-${Math.random().toString(36).slice(2)}`); + t.context.cas = new ContentAddressableStorage(t.context.basePath); +}); + test("contentPath: Computes deterministic path from integrity", (t) => { const cas = t.context.cas; @@ -66,7 +66,6 @@ test("has: Returns false when content does not exist", async (t) => { test("has: Returns true after content is stored", async (t) => { const cas = t.context.cas; const content = Buffer.from("test content"); - const integrity = "sha256-6DvEUQhlMraqbfGBGR2fhsJaNhr0K0VhMupLPEYrmIY="; // Compute real integrity for test content const ssri = await import("ssri"); diff --git a/scripts/run-cache-timings-benchmark.mjs b/scripts/run-cache-timings-benchmark.mjs new file mode 100644 index 00000000000..09a28c62081 --- /dev/null +++ b/scripts/run-cache-timings-benchmark.mjs @@ -0,0 +1,621 @@ +#!/usr/bin/env node + +/** + * Cache Timings Benchmark Runner + * + * Runs warm-cache builds with UI5_BUILD_TIMINGS=true across all cache option + * branches and 3 test projects, then generates per-project comparison tables. + * + * Usage: + * node scripts/run-cache-timings-benchmark.mjs [--warmup N] [--runs N] [--output DIR] + * + * Options: + * --warmup N Number of warmup runs before measuring (default: 1) + * --runs N Number of measured runs to average (default: 3) + * --output DIR Directory for results (default: ~/Desktop/cache-timings) + */ + +import {execSync} from "node:child_process"; +import path from "node:path"; +import fs from "node:fs"; +import os from "node:os"; + +// ── Configuration ────────────────────────────────────────────────────────── + +const REPO_DIR = path.resolve(import.meta.dirname, ".."); +const CLI_BIN = path.join(REPO_DIR, "packages/cli/bin/ui5.cjs"); + +const PROJECTS = [ + {name: "openui5-sample-app", dir: path.join(os.homedir(), "SAPDevelop/openui5-sample-app")}, + {name: "sap.ui.core", dir: path.join(os.homedir(), "SAPDevelop/openui5/src/sap.ui.core")}, + {name: "sap.m", dir: path.join(os.homedir(), "SAPDevelop/openui5/src/sap.m")}, +]; + +const BRANCHES = [ + {key: "baseline", name: "Baseline (pretty JSON)", branch: "feat/incremental-build-tests"}, + {key: "option_a", name: "Option A (minified JSON)", branch: "feat/cache-option-a"}, + {key: "option_b", name: "Option B (MessagePack)", branch: "feat/cache-option-b"}, + {key: "option_c", name: "Option C (SQLite+JSON)", branch: "feat/cache-option-c"}, + {key: "option_d", name: "Option D (SQLite+MsgPack)", branch: "feat/cache-option-d"}, + {key: "option_e", name: "Option E (LevelDB+MsgPack)", branch: "feat/cache-option-e"}, + {key: "option_f", name: "Option F (Streaming JSON)", branch: "feat/cache-option-f"}, + {key: "option_g", name: "Option G (JSONL Protocol)", branch: "feat/cache-option-g"}, + {key: "option_h", name: "Option H (Pure SQLite)", branch: "feat/cache-option-h"}, + {key: "option_i", name: "Option I (Pure LevelDB)", branch: "feat/cache-option-i"}, + {key: "option_j", name: "Option J (Incremental Managers)", branch: "feat/cache-option-j"}, + {key: "option_k", name: "Option K (Flat JSON Store)", branch: "feat/cache-option-k"}, + {key: "option_l", name: "Option L (JSONL+jq Streaming)", branch: "feat/cache-option-l"}, +]; + +// Timing labels we want to extract (in display order) +const TIMING_LABELS = [ + "readBuildManifest", + "readIndexCache", + "readTaskMetadata", + "readResultMetadata", + "readStageCache", + "writeBuildManifest", + "writeIndexCache", + "writeTaskMetadata", + "writeResultMetadata", + "writeStageCache", + "getResourcePathForStage", + "writeStageResource", +]; + +// ── CLI argument parsing ─────────────────────────────────────────────────── + +const args = process.argv.slice(2); +let warmupRuns = 1; +let measuredRuns = 3; +let outputDir = path.join(os.homedir(), "Desktop/cache-timings"); + +for (let i = 0; i < args.length; i++) { + if (args[i] === "--warmup" && args[i + 1]) { + warmupRuns = parseInt(args[++i], 10); + } else if (args[i] === "--runs" && args[i + 1]) { + measuredRuns = parseInt(args[++i], 10); + } else if (args[i] === "--output" && args[i + 1]) { + outputDir = path.resolve(args[++i]); + } else if (args[i] === "--help") { + console.log(`Usage: node ${path.basename(import.meta.url)} [--warmup N] [--runs N] [--output DIR]`); + process.exit(0); + } +} + +// ── Helpers ──────────────────────────────────────────────────────────────── + +function git(cmd) { + return execSync(`git ${cmd}`, {cwd: REPO_DIR, encoding: "utf8", stdio: ["pipe", "pipe", "pipe"]}).trim(); +} + +/** + * Run a build and capture the combined stdout+stderr output. + * UI5 logger writes to stderr, so we need both streams. + */ +function runBuildCapture(projectDir) { + try { + // Use shell to redirect stderr to stdout so we get everything + const result = execSync( + `UI5_BUILD_TIMINGS=true node "${CLI_BIN}" build 2>&1`, + { + cwd: projectDir, + encoding: "utf8", + timeout: 600_000, + shell: true, + } + ); + return result; + } catch (err) { + // Build may "fail" after success if timings cause issues — still capture output + if (err.stdout) { + return err.stdout; + } + throw err; + } +} + +/** + * Parse "Build succeeded in X.XX s" or "Build succeeded in XXX ms" from build output. + * Returns duration in milliseconds. + */ +function parseBuildTime(output) { + // Pattern: "Build succeeded in 5.37 s" or "Build succeeded in 123 ms" + const match = output.match(/Build succeeded in ([\d.]+)\s*(s|ms)/); + if (!match) { + return null; + } + const value = parseFloat(match[1]); + return match[2] === "s" ? value * 1000 : value; +} + +/** + * Parse BuildTimings output from the build log. + * Returns a Map of label -> {calls, totalMs, avgMs, minMs, maxMs} + */ +function parseTimings(output) { + const timings = new Map(); + + // Match lines like: + // readTaskMetadata 20 283.70ms 14.18ms 2.00ms 44.56ms + // readIndexCache 1 14.36ms 14.36ms 14.36ms 14.36ms + // getResourcePathForStage 962547848.234s 4.971s 4.959s 4.991s + // Also handles "0 -- -- -- --" + const lineRegex = /^\s{2}(\w+)\s+(\d+)\s+([\d.]+(?:ms|s)|--)\s+([\d.]+(?:ms|s)|--)\s+([\d.]+(?:ms|s)|--)\s+([\d.]+(?:ms|s)|--)/gm; + + let match; + while ((match = lineRegex.exec(output)) !== null) { + const [, label, callsStr, totalStr, avgStr, minStr, maxStr] = match; + + // Skip "Subtotal" lines + if (label === "Subtotal") continue; + + const calls = parseInt(callsStr, 10); + if (calls === 0) { + timings.set(label, {calls: 0, totalMs: 0, avgMs: 0, minMs: 0, maxMs: 0}); + continue; + } + + timings.set(label, { + calls, + totalMs: parseValue(totalStr), + avgMs: parseValue(avgStr), + minMs: parseValue(minStr), + maxMs: parseValue(maxStr), + }); + } + + return timings; +} + +/** + * Parse a timing value string like "14.36ms", "4.971s", or "--" + * Returns value in milliseconds. + */ +function parseValue(str) { + if (str === "--") return 0; + if (str.endsWith("ms")) return parseFloat(str); + if (str.endsWith("s")) return parseFloat(str) * 1000; + return parseFloat(str); +} + +/** + * Format milliseconds to a human-readable string for the table. + */ +function fmtMs(ms) { + if (ms === 0 || ms == null) return "--"; + if (ms >= 1000) return `${(ms / 1000).toFixed(3)}s`; + if (ms < 0.01) return "<0.01ms"; + return `${ms.toFixed(1)}ms`; +} + +function fmtPct(value) { + if (value == null || Number.isNaN(value)) return "--"; + return `${value.toFixed(1)}%`; +} + +function percentile(values, p) { + if (!values.length) return 0; + const sorted = [...values].sort((a, b) => a - b); + const index = Math.ceil((p / 100) * sorted.length) - 1; + return sorted[Math.max(0, Math.min(index, sorted.length - 1))]; +} + +function median(values) { + if (!values.length) return 0; + const sorted = [...values].sort((a, b) => a - b); + const mid = Math.floor(sorted.length / 2); + if (sorted.length % 2 === 0) { + return (sorted[mid - 1] + sorted[mid]) / 2; + } + return sorted[mid]; +} + +/** + * Aggregate multiple timing results. + * Input: array of {buildTimeMs, timings: Map} + */ +function aggregateResults(results) { + if (results.length === 0) return null; + + const buildTimes = results.map((r) => r.buildTimeMs || 0); + const avgBuildTime = buildTimes.reduce((sum, t) => sum + t, 0) / results.length; + const medianBuildTime = median(buildTimes); + const p95BuildTime = percentile(buildTimes, 95); + + const avgTimings = new Map(); + for (const label of TIMING_LABELS) { + const entries = results + .map((r) => r.timings.get(label)) + .filter(Boolean); + + if (entries.length === 0) { + avgTimings.set(label, {calls: 0, totalMs: 0, avgMs: 0, minMs: 0, maxMs: 0}); + continue; + } + + avgTimings.set(label, { + calls: Math.round(entries.reduce((s, e) => s + e.calls, 0) / entries.length), + totalMs: entries.reduce((s, e) => s + e.totalMs, 0) / entries.length, + avgMs: entries.reduce((s, e) => s + e.avgMs, 0) / entries.length, + minMs: Math.min(...entries.map((e) => e.minMs)), + maxMs: Math.max(...entries.map((e) => e.maxMs)), + }); + } + + // Compute read subtotal and write subtotal + const readLabels = ["readBuildManifest", "readIndexCache", "readTaskMetadata", "readResultMetadata", "readStageCache"]; + const writeLabels = ["writeBuildManifest", "writeIndexCache", "writeTaskMetadata", "writeResultMetadata", "writeStageCache"]; + const resourceLabels = ["getResourcePathForStage", "writeStageResource"]; + + const readTotals = results.map((r) => readLabels.reduce((s, l) => s + (r.timings.get(l)?.totalMs || 0), 0)); + const writeTotals = results.map((r) => writeLabels.reduce((s, l) => s + (r.timings.get(l)?.totalMs || 0), 0)); + const resourceTotals = results.map((r) => resourceLabels.reduce((s, l) => s + (r.timings.get(l)?.totalMs || 0), 0)); + const metadataTotals = readTotals.map((readTotal, i) => readTotal + writeTotals[i] + resourceTotals[i]); + const metadataShares = metadataTotals.map((metadataTotal, i) => { + const buildTime = buildTimes[i]; + if (!buildTime) return 0; + return (metadataTotal / buildTime) * 100; + }); + + const readTotal = readTotals.reduce((s, n) => s + n, 0) / results.length; + const writeTotal = writeTotals.reduce((s, n) => s + n, 0) / results.length; + const resourceTotal = resourceTotals.reduce((s, n) => s + n, 0) / results.length; + const metadataTotal = metadataTotals.reduce((s, n) => s + n, 0) / results.length; + + return { + buildTimeMs: avgBuildTime, + buildTimeMedianMs: medianBuildTime, + buildTimeP95Ms: p95BuildTime, + timings: avgTimings, + readTotalMs: readTotal, + writeTotalMs: writeTotal, + resourceTotalMs: resourceTotal, + metadataTotalMs: metadataTotal, + metadataSharePct: metadataShares.reduce((s, n) => s + n, 0) / results.length, + metadataShareP95Pct: percentile(metadataShares, 95), + }; +} + +// ── Markdown table generation ────────────────────────────────────────────── + +function generateProjectTable(projectName, dataByBranch) { + const branchNames = BRANCHES.map((b) => b.name); + + // Header + const lines = [ + `## ${projectName}`, + "", + ]; + + // === Summary Table === + lines.push("### Summary"); + lines.push(""); + lines.push(`| Metric | ${branchNames.join(" | ")} |`); + lines.push(`| --- | ${branchNames.map(() => "---:").join(" | ")} |`); + + // Build time row + lines.push( + `| **Build Time (avg)** | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + return d ? fmtMs(d.buildTimeMs) : "N/A"; + }).join(" | ")} |` + ); + + lines.push( + `| **Build Time (median)** | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + return d ? fmtMs(d.buildTimeMedianMs) : "N/A"; + }).join(" | ")} |` + ); + + lines.push( + `| **Build Time (p95)** | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + return d ? fmtMs(d.buildTimeP95Ms) : "N/A"; + }).join(" | ")} |` + ); + + // Read total row + lines.push( + `| **Reads Total** | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + return d ? fmtMs(d.readTotalMs) : "N/A"; + }).join(" | ")} |` + ); + + // Write total row + lines.push( + `| **Writes Total** | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + return d ? fmtMs(d.writeTotalMs) : "N/A"; + }).join(" | ")} |` + ); + + // Resource I/O total row + lines.push( + `| **Resource I/O Total** | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + return d ? fmtMs(d.resourceTotalMs) : "N/A"; + }).join(" | ")} |` + ); + + lines.push( + `| **Metadata Total** | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + return d ? fmtMs(d.metadataTotalMs) : "N/A"; + }).join(" | ")} |` + ); + + lines.push( + `| **Metadata Share (avg)** | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + return d ? fmtPct(d.metadataSharePct) : "N/A"; + }).join(" | ")} |` + ); + + lines.push( + `| **Metadata Share (p95)** | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + return d ? fmtPct(d.metadataShareP95Pct) : "N/A"; + }).join(" | ")} |` + ); + + lines.push(""); + + // === Detailed Reads Table === + lines.push("### Read Operations (Total ms)"); + lines.push(""); + lines.push(`| Operation | ${branchNames.join(" | ")} |`); + lines.push(`| --- | ${branchNames.map(() => "---:").join(" | ")} |`); + + const readOps = ["readBuildManifest", "readIndexCache", "readTaskMetadata", "readResultMetadata", "readStageCache"]; + for (const op of readOps) { + lines.push( + `| ${op} | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + if (!d) return "N/A"; + const t = d.timings.get(op); + if (!t || t.calls === 0) return "--"; + return `${fmtMs(t.totalMs)} (${t.calls}×)`; + }).join(" | ")} |` + ); + } + lines.push(""); + + // === Detailed Writes Table === + lines.push("### Write Operations (Total ms)"); + lines.push(""); + lines.push(`| Operation | ${branchNames.join(" | ")} |`); + lines.push(`| --- | ${branchNames.map(() => "---:").join(" | ")} |`); + + const writeOps = ["writeBuildManifest", "writeIndexCache", "writeTaskMetadata", "writeResultMetadata", "writeStageCache"]; + for (const op of writeOps) { + lines.push( + `| ${op} | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + if (!d) return "N/A"; + const t = d.timings.get(op); + if (!t || t.calls === 0) return "--"; + return `${fmtMs(t.totalMs)} (${t.calls}×)`; + }).join(" | ")} |` + ); + } + lines.push(""); + + // === Resource I/O Table === + lines.push("### Resource I/O (Total ms)"); + lines.push(""); + lines.push(`| Operation | ${branchNames.join(" | ")} |`); + lines.push(`| --- | ${branchNames.map(() => "---:").join(" | ")} |`); + + const resourceOps = ["getResourcePathForStage", "writeStageResource"]; + for (const op of resourceOps) { + lines.push( + `| ${op} | ${BRANCHES.map((b) => { + const d = dataByBranch.get(b.key); + if (!d) return "N/A"; + const t = d.timings.get(op); + if (!t || t.calls === 0) return "--"; + return `${fmtMs(t.totalMs)} (${t.calls}×)`; + }).join(" | ")} |` + ); + } + lines.push(""); + + return lines.join("\n"); +} + +// ── Main ─────────────────────────────────────────────────────────────────── + +async function main() { + const startBranch = git("branch --show-current"); + console.log(`Starting branch: ${startBranch}`); + console.log(`Warmup runs: ${warmupRuns}, Measured runs: ${measuredRuns}`); + console.log(`Output directory: ${outputDir}`); + console.log(); + + // Verify all project directories exist + for (const project of PROJECTS) { + if (!fs.existsSync(project.dir)) { + console.error(`Project directory not found: ${project.dir}`); + process.exit(1); + } + } + + // Ensure clean working tree + const status = git("status --porcelain"); + if (status) { + console.error("Working tree is not clean. Please commit or stash changes first."); + console.error(status); + process.exit(1); + } + + // Data structure: projectName -> branchKey -> averaged result + const allResults = new Map(); + for (const project of PROJECTS) { + allResults.set(project.name, new Map()); + } + + // Raw results for JSON export + const rawResults = []; + + const totalSteps = BRANCHES.length * PROJECTS.length * (warmupRuns + measuredRuns); + let currentStep = 0; + + for (const branchInfo of BRANCHES) { + console.log(`\n${"═".repeat(70)}`); + console.log(`Switching to: ${branchInfo.branch} (${branchInfo.name})`); + console.log(`${"═".repeat(70)}`); + + try { + git(`checkout ${branchInfo.branch}`); + } catch (err) { + console.error(` Failed to checkout ${branchInfo.branch}: ${err.message}`); + continue; + } + + for (const project of PROJECTS) { + console.log(`\n ── ${project.name} ──`); + + // Warmup runs (not measured) + for (let w = 0; w < warmupRuns; w++) { + currentStep++; + const pct = ((currentStep / totalSteps) * 100).toFixed(0); + process.stdout.write(` [${pct}%] Warmup ${w + 1}/${warmupRuns}...`); + try { + runBuildCapture(project.dir); + console.log(" done"); + } catch (err) { + console.log(` FAILED: ${err.message}`); + } + } + + // Measured runs + const runResults = []; + for (let r = 0; r < measuredRuns; r++) { + currentStep++; + const pct = ((currentStep / totalSteps) * 100).toFixed(0); + process.stdout.write(` [${pct}%] Run ${r + 1}/${measuredRuns}...`); + try { + const output = runBuildCapture(project.dir); + const buildTimeMs = parseBuildTime(output); + const timings = parseTimings(output); + + const readLabels = ["readBuildManifest", "readIndexCache", "readTaskMetadata", "readResultMetadata", "readStageCache"]; + const writeLabels = ["writeBuildManifest", "writeIndexCache", "writeTaskMetadata", "writeResultMetadata", "writeStageCache"]; + const resourceLabels = ["getResourcePathForStage", "writeStageResource"]; + const readTotalMs = readLabels.reduce((s, l) => s + (timings.get(l)?.totalMs || 0), 0); + const writeTotalMs = writeLabels.reduce((s, l) => s + (timings.get(l)?.totalMs || 0), 0); + const resourceTotalMs = resourceLabels.reduce((s, l) => s + (timings.get(l)?.totalMs || 0), 0); + const metadataTotalMs = readTotalMs + writeTotalMs + resourceTotalMs; + const metadataSharePct = buildTimeMs ? (metadataTotalMs / buildTimeMs) * 100 : 0; + + if (buildTimeMs !== null) { + runResults.push({buildTimeMs, timings}); + console.log(` ${fmtMs(buildTimeMs)} (${timings.size} metrics, metadata ${fmtPct(metadataSharePct)})`); + } else { + console.log(" no build time found in output"); + } + + // Store raw result + rawResults.push({ + branch: branchInfo.key, + branchName: branchInfo.name, + project: project.name, + run: r + 1, + buildTimeMs, + readTotalMs, + writeTotalMs, + resourceTotalMs, + metadataTotalMs, + metadataSharePct, + timings: Object.fromEntries( + [...timings.entries()].map(([k, v]) => [k, v]) + ), + }); + } catch (err) { + console.log(` FAILED: ${err.message}`); + } + } + + // Aggregate the measured runs + if (runResults.length > 0) { + const avg = aggregateResults(runResults); + allResults.get(project.name).set(branchInfo.key, avg); + console.log(` Build avg/median/p95: ${fmtMs(avg.buildTimeMs)} / ${fmtMs(avg.buildTimeMedianMs)} / ${fmtMs(avg.buildTimeP95Ms)}` + + `, metadata share avg/p95: ${fmtPct(avg.metadataSharePct)} / ${fmtPct(avg.metadataShareP95Pct)}`); + } + } + } + + // Return to original branch + try { + git(`checkout ${startBranch}`); + console.log(`\nRestored branch: ${startBranch}`); + } catch { + console.warn(`Warning: Could not restore branch ${startBranch}`); + } + + // ── Generate output ──────────────────────────────────────────────────── + + fs.mkdirSync(outputDir, {recursive: true}); + const timestamp = new Date().toISOString().replace(/[:.]/g, "-"); + + // 1. Per-project markdown tables + const reportLines = [ + `# Cache Timings Benchmark`, + ``, + `**Generated:** ${new Date().toISOString()}`, + `**Warmup runs:** ${warmupRuns} | **Measured runs:** ${measuredRuns}`, + ``, + ]; + + for (const project of PROJECTS) { + const dataByBranch = allResults.get(project.name); + reportLines.push(generateProjectTable(project.name, dataByBranch)); + reportLines.push("---"); + reportLines.push(""); + } + + const reportPath = path.join(outputDir, `cache-timings-${timestamp}.md`); + fs.writeFileSync(reportPath, reportLines.join("\n"), "utf8"); + console.log(`\nReport written to: ${reportPath}`); + + // 2. Raw JSON data + const jsonPath = path.join(outputDir, `cache-timings-${timestamp}.json`); + fs.writeFileSync(jsonPath, JSON.stringify(rawResults, null, 2), "utf8"); + console.log(`Raw data written to: ${jsonPath}`); + + // 3. Print summary to console + console.log("\n" + "═".repeat(70)); + console.log("SUMMARY"); + console.log("═".repeat(70)); + + for (const project of PROJECTS) { + const dataByBranch = allResults.get(project.name); + console.log(`\n${project.name}:`); + + const header = " " + "Option".padEnd(30) + "Avg".padStart(10) + "Median".padStart(10) + "P95".padStart(10) + "Meta%".padStart(10); + console.log(header); + console.log(" " + "─".repeat(header.length - 2)); + + for (const b of BRANCHES) { + const d = dataByBranch.get(b.key); + if (!d) { + console.log(` ${b.name.padEnd(30)}${"N/A".padStart(10)}${"N/A".padStart(10)}${"N/A".padStart(10)}${"N/A".padStart(10)}`); + continue; + } + console.log( + ` ${b.name.padEnd(30)}${fmtMs(d.buildTimeMs).padStart(10)}` + + `${fmtMs(d.buildTimeMedianMs).padStart(10)}${fmtMs(d.buildTimeP95Ms).padStart(10)}` + + `${fmtPct(d.metadataSharePct).padStart(10)}` + ); + } + } +} + +main().catch((err) => { + console.error("Fatal error:", err); + process.exit(1); +}); diff --git a/scripts/run-serve-timings-benchmark.mjs b/scripts/run-serve-timings-benchmark.mjs new file mode 100644 index 00000000000..3e7b243dc5a --- /dev/null +++ b/scripts/run-serve-timings-benchmark.mjs @@ -0,0 +1,684 @@ +#!/usr/bin/env node + +/** + * Serve Timings Benchmark Runner + * + * Measures warm-cache startup performance of `ui5 serve` across all cache option + * branches. For each run the script: + * 1. Spawns `ui5 serve --port 0` with UI5_BUILD_TIMINGS=true + * 2. Waits for the "URL: http://localhost:" line (server listening) + * 3. Makes an HTTP GET to trigger on-demand build of the root project + * 4. Waits for the "Build succeeded in N ms" line + * 5. Captures build time and cache timing metrics + * 6. Kills the child process + * + * Usage: + * node scripts/run-serve-timings-benchmark.mjs [options] + * + * Options: + * --warmup N Number of warmup runs before measuring (default: 1) + * --runs N Number of measured runs to average (default: 3) + * --output DIR Directory for results (default: ~/Desktop/serve-timings) + * --branches KEYS Comma-separated branch keys to test (default: all) + * --projects NAMES Comma-separated project names to test (default: all) + * --timeout MS Max time to wait for build per run (default: 120000) + */ + +import {spawn, execSync} from "node:child_process"; +import path from "node:path"; +import fs from "node:fs"; +import os from "node:os"; +import http from "node:http"; + +// ── Configuration ────────────────────────────────────────────────────────── + +const REPO_DIR = path.resolve(import.meta.dirname, ".."); +const CLI_BIN = path.join(REPO_DIR, "packages/cli/bin/ui5.cjs"); + +const PROJECTS = [ + {name: "openui5-sample-app", dir: path.join(os.homedir(), "SAPDevelop/openui5-sample-app")}, + {name: "sap.ui.core", dir: path.join(os.homedir(), "SAPDevelop/openui5/src/sap.ui.core")}, + {name: "sap.m", dir: path.join(os.homedir(), "SAPDevelop/openui5/src/sap.m")}, +]; + +const BRANCHES = [ + {key: "baseline", name: "Baseline (pretty JSON)", branch: "feat/incremental-build-tests"}, + {key: "option_a", name: "Option A (minified JSON)", branch: "feat/cache-option-a"}, + {key: "option_b", name: "Option B (MessagePack)", branch: "feat/cache-option-b"}, + {key: "option_c", name: "Option C (SQLite+JSON)", branch: "feat/cache-option-c"}, + {key: "option_d", name: "Option D (SQLite+MsgPack)", branch: "feat/cache-option-d"}, + {key: "option_e", name: "Option E (LevelDB+MsgPack)", branch: "feat/cache-option-e"}, + {key: "option_f", name: "Option F (Streaming JSON)", branch: "feat/cache-option-f"}, + {key: "option_g", name: "Option G (JSONL Protocol)", branch: "feat/cache-option-g"}, + {key: "option_h", name: "Option H (Pure SQLite)", branch: "feat/cache-option-h"}, + {key: "option_i", name: "Option I (Pure LevelDB)", branch: "feat/cache-option-i"}, + {key: "option_j", name: "Option J (Incremental Managers)", branch: "feat/cache-option-j"}, + {key: "option_k", name: "Option K (Flat JSON Store)", branch: "feat/cache-option-k"}, + {key: "option_l", name: "Option L (JSONL+jq Streaming)", branch: "feat/cache-option-l"}, +]; + +const TIMING_LABELS = [ + "readBuildManifest", + "readIndexCache", + "readTaskMetadata", + "readResultMetadata", + "readStageCache", + "writeBuildManifest", + "writeIndexCache", + "writeTaskMetadata", + "writeResultMetadata", + "writeStageCache", + "getResourcePathForStage", + "writeStageResource", +]; + +// ── CLI argument parsing ─────────────────────────────────────────────────── + +const args = process.argv.slice(2); +let warmupRuns = 1; +let measuredRuns = 3; +let outputDir = path.join(os.homedir(), "Desktop/serve-timings"); +let branchFilter = null; +let projectFilter = null; +let timeoutMs = 120_000; + +for (let i = 0; i < args.length; i++) { + if (args[i] === "--warmup" && args[i + 1]) { + warmupRuns = parseInt(args[++i], 10); + } else if (args[i] === "--runs" && args[i + 1]) { + measuredRuns = parseInt(args[++i], 10); + } else if (args[i] === "--output" && args[i + 1]) { + outputDir = path.resolve(args[++i]); + } else if (args[i] === "--branches" && args[i + 1]) { + branchFilter = new Set(args[++i].split(",")); + } else if (args[i] === "--projects" && args[i + 1]) { + projectFilter = new Set(args[++i].split(",")); + } else if (args[i] === "--timeout" && args[i + 1]) { + timeoutMs = parseInt(args[++i], 10); + } else if (args[i] === "--help") { + console.log([ + `Usage: node ${path.basename(import.meta.url)} [options]`, + "", + "Options:", + " --warmup N Number of warmup runs (default: 1)", + " --runs N Number of measured runs (default: 3)", + " --output DIR Output directory (default: ~/Desktop/serve-timings)", + " --branches KEYS Comma-separated branch keys (default: all)", + " --projects NAMES Comma-separated project names (default: all)", + " --timeout MS Max wait time per run in ms (default: 120000)", + ].join("\n")); + process.exit(0); + } +} + +const selectedBranches = branchFilter + ? BRANCHES.filter((b) => branchFilter.has(b.key)) + : BRANCHES; + +const selectedProjects = projectFilter + ? PROJECTS.filter((p) => projectFilter.has(p.name)) + : PROJECTS; + +// ── Helpers ──────────────────────────────────────────────────────────────── + +function git(cmd) { + return execSync(`git ${cmd}`, { + cwd: REPO_DIR, encoding: "utf8", stdio: ["pipe", "pipe", "pipe"], + }).trim(); +} + +/** + * Make an HTTP GET request and return the status code. + * Used to trigger on-demand build by requesting a resource. + */ +function httpGet(url) { + return new Promise((resolve, reject) => { + const req = http.get(url, (res) => { + res.resume(); // Drain the response body + resolve(res.statusCode); + }); + req.on("error", reject); + req.setTimeout(timeoutMs, () => { + req.destroy(new Error("HTTP request timed out")); + }); + }); +} + +/** + * Spawn `ui5 serve`, wait for server to listen, trigger a build via HTTP, + * capture output until "Build succeeded", then kill the process. + * + * Returns: {buildTimeMs, serverReadyMs, totalMs, output, timings} + * - serverReadyMs: time from spawn to server listening + * - buildTimeMs: extracted from "Build succeeded in N ms" + * - totalMs: time from spawn to "Build succeeded" + */ +function runServeCapture(projectDir) { + return new Promise((resolve, reject) => { + const startTime = performance.now(); + let output = ""; + let port = null; + let buildSucceeded = false; + let settled = false; + + const child = spawn("node", [CLI_BIN, "serve", "--port", "0"], { + cwd: projectDir, + env: { + ...process.env, + UI5_BUILD_TIMINGS: "true", + // Force colour off so log parsing is reliable + NO_COLOR: "1", + FORCE_COLOR: "0", + }, + stdio: ["pipe", "pipe", "pipe"], + }); + + const timer = setTimeout(() => { + if (!settled) { + settled = true; + child.kill("SIGKILL"); + reject(new Error( + `Timed out after ${timeoutMs}ms. Output so far:\n${output.slice(-800)}` + )); + } + }, timeoutMs); + + let serverReadyMs = null; + + function processLine(line) { + output += line + "\n"; + + // Detect "URL: http://localhost:" → extract port and record server-ready time + const urlMatch = line.match(/URL:\s*https?:\/\/localhost:(\d+)/); + if (urlMatch && !port) { + port = parseInt(urlMatch[1], 10); + serverReadyMs = performance.now() - startTime; + + // Trigger on-demand build by requesting a resource from the root project + httpGet(`http://localhost:${port}/index.html`).catch(() => { + // Request may fail if the server shuts down before response — that's OK. + }); + } + + // Detect "Build succeeded in N ms" or "Build succeeded in N.NN s" + const buildMatch = line.match(/Build succeeded in ([\d.]+)\s*(s|ms)/); + if (buildMatch && !buildSucceeded) { + buildSucceeded = true; + const totalMs = performance.now() - startTime; + const value = parseFloat(buildMatch[1]); + const buildTimeMs = buildMatch[2] === "s" ? value * 1000 : value; + + // Wait a brief moment to capture any trailing timing-table output + setTimeout(() => { + if (!settled) { + settled = true; + clearTimeout(timer); + killChild(child); + resolve({ + buildTimeMs, + serverReadyMs, + totalMs, + output, + timings: parseTimings(output), + }); + } + }, 500); + } + } + + child.stdout.on("data", (data) => { + for (const line of data.toString().split("\n")) { + if (line.trim()) processLine(line.trim()); + } + }); + + child.stderr.on("data", (data) => { + for (const line of data.toString().split("\n")) { + if (line.trim()) processLine(line.trim()); + } + }); + + child.on("error", (err) => { + if (!settled) { + settled = true; + clearTimeout(timer); + reject(err); + } + }); + + child.on("exit", (code) => { + if (!settled) { + settled = true; + clearTimeout(timer); + reject(new Error( + `Process exited with code ${code} before build succeeded.\nOutput:\n${output.slice(-1000)}` + )); + } + }); + }); +} + +function killChild(child) { + try { + child.kill("SIGTERM"); + } catch { + // Already dead + } + // Force kill after 1 second if SIGTERM didn't work + setTimeout(() => { + try { + child.kill("SIGKILL"); + } catch { + // Already dead + } + }, 1000); +} + +/** + * Parse BuildTimings output from the serve log. + * Returns a Map of label -> {calls, totalMs, avgMs, minMs, maxMs} + */ +function parseTimings(output) { + const timings = new Map(); + + const lineRegex = + /^\s{2}(\w+)\s+(\d+)\s+([\d.]+(?:ms|s)|--)\s+([\d.]+(?:ms|s)|--)\s+([\d.]+(?:ms|s)|--)\s+([\d.]+(?:ms|s)|--)/gm; + + let match; + while ((match = lineRegex.exec(output)) !== null) { + const [, label, callsStr, totalStr, avgStr, minStr, maxStr] = match; + if (label === "Subtotal") continue; + + const calls = parseInt(callsStr, 10); + if (calls === 0) { + timings.set(label, {calls: 0, totalMs: 0, avgMs: 0, minMs: 0, maxMs: 0}); + continue; + } + + timings.set(label, { + calls, + totalMs: parseValue(totalStr), + avgMs: parseValue(avgStr), + minMs: parseValue(minStr), + maxMs: parseValue(maxStr), + }); + } + + return timings; +} + +function parseValue(str) { + if (str === "--") return 0; + if (str.endsWith("ms")) return parseFloat(str); + if (str.endsWith("s")) return parseFloat(str) * 1000; + return parseFloat(str); +} + +function fmtMs(ms) { + if (ms === 0 || ms == null) return "--"; + if (ms >= 1000) return `${(ms / 1000).toFixed(3)}s`; + if (ms < 0.01) return "<0.01ms"; + return `${ms.toFixed(1)}ms`; +} + +function fmtPct(value) { + if (value == null || Number.isNaN(value)) return "--"; + return `${value.toFixed(1)}%`; +} + +function median(values) { + if (!values.length) return 0; + const sorted = [...values].sort((a, b) => a - b); + const mid = Math.floor(sorted.length / 2); + return sorted.length % 2 === 0 ? (sorted[mid - 1] + sorted[mid]) / 2 : sorted[mid]; +} + +function percentile(values, p) { + if (!values.length) return 0; + const sorted = [...values].sort((a, b) => a - b); + const index = Math.ceil((p / 100) * sorted.length) - 1; + return sorted[Math.max(0, Math.min(index, sorted.length - 1))]; +} + +function aggregateResults(results) { + if (results.length === 0) return null; + + const buildTimes = results.map((r) => r.buildTimeMs || 0); + const serverReadyTimes = results.map((r) => r.serverReadyMs || 0); + const totalTimes = results.map((r) => r.totalMs || 0); + + const avgTimings = new Map(); + for (const label of TIMING_LABELS) { + const entries = results.map((r) => r.timings.get(label)).filter(Boolean); + if (entries.length === 0) { + avgTimings.set(label, {calls: 0, totalMs: 0, avgMs: 0, minMs: 0, maxMs: 0}); + continue; + } + avgTimings.set(label, { + calls: Math.round(entries.reduce((s, e) => s + e.calls, 0) / entries.length), + totalMs: entries.reduce((s, e) => s + e.totalMs, 0) / entries.length, + avgMs: entries.reduce((s, e) => s + e.avgMs, 0) / entries.length, + minMs: Math.min(...entries.map((e) => e.minMs)), + maxMs: Math.max(...entries.map((e) => e.maxMs)), + }); + } + + const readLabels = [ + "readBuildManifest", "readIndexCache", "readTaskMetadata", + "readResultMetadata", "readStageCache", + ]; + const writeLabels = [ + "writeBuildManifest", "writeIndexCache", "writeTaskMetadata", + "writeResultMetadata", "writeStageCache", + ]; + const resourceLabels = ["getResourcePathForStage", "writeStageResource"]; + + const readTotals = results + .map((r) => readLabels.reduce((s, l) => s + (r.timings.get(l)?.totalMs || 0), 0)); + const writeTotals = results + .map((r) => writeLabels.reduce((s, l) => s + (r.timings.get(l)?.totalMs || 0), 0)); + const resourceTotals = results + .map((r) => resourceLabels.reduce((s, l) => s + (r.timings.get(l)?.totalMs || 0), 0)); + const metadataTotals = readTotals.map((r, i) => r + writeTotals[i] + resourceTotals[i]); + const metadataShares = metadataTotals + .map((mt, i) => buildTimes[i] ? (mt / buildTimes[i]) * 100 : 0); + + return { + buildTimeMs: buildTimes.reduce((s, t) => s + t, 0) / results.length, + buildTimeMedianMs: median(buildTimes), + buildTimeP95Ms: percentile(buildTimes, 95), + serverReadyMs: serverReadyTimes.reduce((s, t) => s + t, 0) / results.length, + serverReadyMedianMs: median(serverReadyTimes), + totalMs: totalTimes.reduce((s, t) => s + t, 0) / results.length, + totalMedianMs: median(totalTimes), + timings: avgTimings, + readTotalMs: readTotals.reduce((s, n) => s + n, 0) / results.length, + writeTotalMs: writeTotals.reduce((s, n) => s + n, 0) / results.length, + resourceTotalMs: resourceTotals.reduce((s, n) => s + n, 0) / results.length, + metadataTotalMs: metadataTotals.reduce((s, n) => s + n, 0) / results.length, + metadataSharePct: metadataShares.reduce((s, n) => s + n, 0) / results.length, + metadataShareP95Pct: percentile(metadataShares, 95), + }; +} + +// ── Markdown table generation ────────────────────────────────────────────── + +function generateProjectTable(projectName, dataByBranch, branches) { + const branchNames = branches.map((b) => b.name); + const lines = [`## ${projectName}`, ""]; + + // === Summary Table === + lines.push("### Summary", ""); + lines.push(`| Metric | ${branchNames.join(" | ")} |`); + lines.push(`| --- | ${branchNames.map(() => "---:").join(" | ")} |`); + + const metrics = [ + ["**Build Time (avg)**", (d) => fmtMs(d.buildTimeMs)], + ["**Build Time (median)**", (d) => fmtMs(d.buildTimeMedianMs)], + ["**Build Time (p95)**", (d) => fmtMs(d.buildTimeP95Ms)], + ["**Server Ready (avg)**", (d) => fmtMs(d.serverReadyMs)], + ["**Server Ready (median)**", (d) => fmtMs(d.serverReadyMedianMs)], + ["**Total (spawn→build done)**", (d) => fmtMs(d.totalMs)], + ["**Total (median)**", (d) => fmtMs(d.totalMedianMs)], + ["**Reads Total**", (d) => fmtMs(d.readTotalMs)], + ["**Writes Total**", (d) => fmtMs(d.writeTotalMs)], + ["**Resource I/O Total**", (d) => fmtMs(d.resourceTotalMs)], + ["**Metadata Total**", (d) => fmtMs(d.metadataTotalMs)], + ["**Metadata Share (avg)**", (d) => fmtPct(d.metadataSharePct)], + ["**Metadata Share (p95)**", (d) => fmtPct(d.metadataShareP95Pct)], + ]; + + for (const [label, fmt] of metrics) { + lines.push(`| ${label} | ${branches.map((b) => { + const d = dataByBranch.get(b.key); + return d ? fmt(d) : "N/A"; + }).join(" | ")} |`); + } + lines.push(""); + + // === Detailed Operations Tables === + const sections = [ + { + title: "Read Operations (Total ms)", + ops: ["readBuildManifest", "readIndexCache", "readTaskMetadata", + "readResultMetadata", "readStageCache"], + }, + { + title: "Write Operations (Total ms)", + ops: ["writeBuildManifest", "writeIndexCache", "writeTaskMetadata", + "writeResultMetadata", "writeStageCache"], + }, + { + title: "Resource I/O (Total ms)", + ops: ["getResourcePathForStage", "writeStageResource"], + }, + ]; + + for (const {title, ops} of sections) { + lines.push(`### ${title}`, ""); + lines.push(`| Operation | ${branchNames.join(" | ")} |`); + lines.push(`| --- | ${branchNames.map(() => "---:").join(" | ")} |`); + + for (const op of ops) { + lines.push(`| ${op} | ${branches.map((b) => { + const d = dataByBranch.get(b.key); + if (!d) return "N/A"; + const t = d.timings.get(op); + if (!t || t.calls === 0) return "--"; + return `${fmtMs(t.totalMs)} (${t.calls}×)`; + }).join(" | ")} |`); + } + lines.push(""); + } + + return lines.join("\n"); +} + +// ── Main ─────────────────────────────────────────────────────────────────── + +async function main() { + const startBranch = git("branch --show-current"); + console.log(`Starting branch: ${startBranch}`); + console.log(`Warmup runs: ${warmupRuns}, Measured runs: ${measuredRuns}, Timeout: ${timeoutMs}ms`); + console.log(`Branches: ${selectedBranches.map((b) => b.key).join(", ")}`); + console.log(`Projects: ${selectedProjects.map((p) => p.name).join(", ")}`); + console.log(`Output directory: ${outputDir}`); + console.log(); + + // Verify project directories exist + for (const project of selectedProjects) { + if (!fs.existsSync(project.dir)) { + console.error(`Project directory not found: ${project.dir}`); + process.exit(1); + } + } + + // Ensure no uncommitted modifications (new/staged files for *this* script are OK) + const status = git("status --porcelain"); + const dirtyLines = status.split("\n").filter((l) => { + if (!l) return false; + // Allow untracked, newly added, or added-then-modified files + if (l.startsWith("?? ") || l.startsWith("A ") || l.startsWith("AM ")) return false; + return true; + }); + if (dirtyLines.length) { + console.error("Working tree has uncommitted modifications. Please commit or stash changes first."); + console.error(dirtyLines.join("\n")); + process.exit(1); + } + + // Data: projectName -> branchKey -> aggregated result + const allResults = new Map(); + for (const project of selectedProjects) { + allResults.set(project.name, new Map()); + } + + const rawResults = []; + const totalSteps = selectedBranches.length * selectedProjects.length * (warmupRuns + measuredRuns); + let currentStep = 0; + + for (const branchInfo of selectedBranches) { + console.log(`\n${"═".repeat(70)}`); + console.log(`Switching to: ${branchInfo.branch} (${branchInfo.name})`); + console.log(`${"═".repeat(70)}`); + + try { + git(`checkout ${branchInfo.branch}`); + } catch (err) { + console.error(` Failed to checkout ${branchInfo.branch}: ${err.message}`); + continue; + } + + for (const project of selectedProjects) { + console.log(`\n ── ${project.name} ──`); + + // Warmup: run `ui5 build` first to ensure cache is populated, + // then a serve warmup cycle + for (let w = 0; w < warmupRuns; w++) { + currentStep++; + const pct = ((currentStep / totalSteps) * 100).toFixed(0); + + process.stdout.write(` [${pct}%] Warmup ${w + 1}/${warmupRuns} (build)...`); + try { + execSync( + `UI5_BUILD_TIMINGS=true node "${CLI_BIN}" build 2>&1`, + {cwd: project.dir, encoding: "utf8", timeout: 600_000, shell: true} + ); + console.log(" done"); + } catch (err) { + console.log(` FAILED: ${(err.message || "").slice(0, 100)}`); + } + + process.stdout.write(` [${pct}%] Warmup ${w + 1}/${warmupRuns} (serve)...`); + try { + await runServeCapture(project.dir); + console.log(" done"); + } catch (err) { + console.log(` FAILED: ${(err.message || "").slice(0, 100)}`); + } + } + + // Measured runs + const runResults = []; + for (let r = 0; r < measuredRuns; r++) { + currentStep++; + const pct = ((currentStep / totalSteps) * 100).toFixed(0); + process.stdout.write(` [${pct}%] Run ${r + 1}/${measuredRuns}...`); + try { + const result = await runServeCapture(project.dir); + runResults.push(result); + console.log( + ` build=${fmtMs(result.buildTimeMs)}` + + ` srvReady=${fmtMs(result.serverReadyMs)}` + + ` total=${fmtMs(result.totalMs)}` + ); + + rawResults.push({ + branch: branchInfo.key, + branchName: branchInfo.name, + project: project.name, + run: r + 1, + buildTimeMs: result.buildTimeMs, + serverReadyMs: result.serverReadyMs, + totalMs: result.totalMs, + timings: Object.fromEntries([...result.timings.entries()]), + }); + } catch (err) { + console.log(` FAILED: ${(err.message || "").slice(0, 100)}`); + } + } + + // Aggregate + if (runResults.length > 0) { + const avg = aggregateResults(runResults); + allResults.get(project.name).set(branchInfo.key, avg); + console.log( + ` Summary: build avg/med/p95: ${fmtMs(avg.buildTimeMs)} / ` + + `${fmtMs(avg.buildTimeMedianMs)} / ${fmtMs(avg.buildTimeP95Ms)} ` + + `| srvReady: ${fmtMs(avg.serverReadyMs)} ` + + `| total: ${fmtMs(avg.totalMs)} ` + + `| meta%: ${fmtPct(avg.metadataSharePct)}` + ); + } + } + } + + // Restore original branch + try { + git(`checkout ${startBranch}`); + console.log(`\nRestored branch: ${startBranch}`); + } catch { + console.warn(`Warning: Could not restore branch ${startBranch}`); + } + + // ── Generate output ──────────────────────────────────────────────────── + + fs.mkdirSync(outputDir, {recursive: true}); + const timestamp = new Date().toISOString().replace(/[:.]/g, "-"); + + // Markdown report + const reportLines = [ + "# Serve Timings Benchmark", + "", + `**Generated:** ${new Date().toISOString()}`, + `**Warmup runs:** ${warmupRuns} | **Measured runs:** ${measuredRuns}`, + "", + "Measures time from `ui5 serve` spawn → HTTP request → `Build succeeded`.", + "", + ]; + + for (const project of selectedProjects) { + const dataByBranch = allResults.get(project.name); + reportLines.push(generateProjectTable(project.name, dataByBranch, selectedBranches)); + reportLines.push("---", ""); + } + + const reportPath = path.join(outputDir, `serve-timings-${timestamp}.md`); + fs.writeFileSync(reportPath, reportLines.join("\n"), "utf8"); + console.log(`\nReport written to: ${reportPath}`); + + // Raw JSON + const jsonPath = path.join(outputDir, `serve-timings-${timestamp}.json`); + fs.writeFileSync(jsonPath, JSON.stringify(rawResults, null, 2), "utf8"); + console.log(`Raw data written to: ${jsonPath}`); + + // Console summary + console.log("\n" + "═".repeat(80)); + console.log("SUMMARY (ui5 serve — warm cache)"); + console.log("═".repeat(80)); + + for (const project of selectedProjects) { + const dataByBranch = allResults.get(project.name); + console.log(`\n${project.name}:`); + + const header = " " + "Option".padEnd(38) + + "Build".padStart(10) + "SrvReady".padStart(10) + + "Total".padStart(10) + "Meta%".padStart(8); + console.log(header); + console.log(" " + "─".repeat(header.length - 2)); + + for (const b of selectedBranches) { + const d = dataByBranch.get(b.key); + if (!d) { + console.log( + ` ${b.name.padEnd(38)}` + + `${"N/A".padStart(10)}${"N/A".padStart(10)}` + + `${"N/A".padStart(10)}${"N/A".padStart(8)}` + ); + continue; + } + console.log( + ` ${b.name.padEnd(38)}` + + `${fmtMs(d.buildTimeMedianMs).padStart(10)}` + + `${fmtMs(d.serverReadyMedianMs).padStart(10)}` + + `${fmtMs(d.totalMedianMs).padStart(10)}` + + `${fmtPct(d.metadataSharePct).padStart(8)}` + ); + } + } +} + +main().catch((err) => { + console.error("Fatal error:", err); + process.exit(1); +});