From 198094cba3404c68e43cb3fb34f99ed6b7e010ad Mon Sep 17 00:00:00 2001 From: Haowen Chen Date: Wed, 29 Nov 2023 11:37:42 +0800 Subject: [PATCH 1/3] Re-arrange code and update dependencies to support browser --- package-lock.json | 29 ++- package.json | 3 +- src/Benchmark.ts | 310 +++++++++++++++++++++++++++++ src/index.ts | 483 ++-------------------------------------------- src/measure.ts | 253 ++++++++++++++++++++++++ 5 files changed, 605 insertions(+), 473 deletions(-) create mode 100644 src/Benchmark.ts create mode 100644 src/measure.ts diff --git a/package-lock.json b/package-lock.json index b7db839..56fbeb6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,8 @@ "version": "0.9.0", "license": "MIT", "dependencies": { - "browser-process-hrtime": "^1.0.0", + "browser-hrtime": "^1.1.8", + "eventemitter3": "^4.0.7", "mathjs": "^10.5.2" }, "devDependencies": { @@ -1883,10 +1884,16 @@ "node": ">=8" } }, + "node_modules/browser-hrtime": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/browser-hrtime/-/browser-hrtime-1.1.8.tgz", + "integrity": "sha512-kzXheikaJsBtzUBlyVtPIY5r0soQePzjwVwT4IlDpU2RvfB5Py52gpU98M77rgqMCheoSSZvrcrdj3t6cZ3suA==" + }, "node_modules/browser-process-hrtime": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", - "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==" + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", + "dev": true }, "node_modules/browserslist": { "version": "4.20.3", @@ -2574,6 +2581,11 @@ "node": ">=0.10.0" } }, + "node_modules/eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" + }, "node_modules/exec-sh": { "version": "0.3.6", "resolved": "https://registry.npmjs.org/exec-sh/-/exec-sh-0.3.6.tgz", @@ -9610,10 +9622,16 @@ "fill-range": "^7.0.1" } }, + "browser-hrtime": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/browser-hrtime/-/browser-hrtime-1.1.8.tgz", + "integrity": "sha512-kzXheikaJsBtzUBlyVtPIY5r0soQePzjwVwT4IlDpU2RvfB5Py52gpU98M77rgqMCheoSSZvrcrdj3t6cZ3suA==" + }, "browser-process-hrtime": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", - "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==" + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", + "dev": true }, "browserslist": { "version": "4.20.3", @@ -10136,6 +10154,11 @@ "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "dev": true }, + "eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" + }, "exec-sh": { "version": "0.3.6", "resolved": "https://registry.npmjs.org/exec-sh/-/exec-sh-0.3.6.tgz", diff --git a/package.json b/package.json index 4e83691..e69eb57 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,8 @@ "test": "npx jest --verbose --silent --coverage --runInBand" }, "dependencies": { - "browser-process-hrtime": "^1.0.0", + "browser-hrtime": "^1.1.8", + "eventemitter3": "^4.0.7", "mathjs": "^10.5.2" }, "devDependencies": { diff --git a/src/Benchmark.ts b/src/Benchmark.ts new file mode 100644 index 0000000..a4e91a0 --- /dev/null +++ b/src/Benchmark.ts @@ -0,0 +1,310 @@ +import { EventEmitter } from "eventemitter3"; +import * as mathjs from "mathjs"; +import { FOOTER, HEADER } from "./etc"; +import { defaultMeasureOptions, measure, Measurement, MeasureOptions, verifyMeasurement } from "./measure"; + +export declare interface BenchmarkEventEmitter { + emit( + event: "record", + description: Array, + measurement: Measurement + ): boolean; + on( + event: "record", + listener: (description: Array, measurement: Measurement) => void + ): this; + once( + event: "record", + listener: (description: Array, measurement: Measurement) => void + ): this; +} + +export class BenchmarkEventEmitter extends EventEmitter<"record"> {} + + +/** + * Raw data collected from [[Benchmark.record]]. + */ +export interface BenchmarkData { + /** + * Description passed to [[Benchmark.record]]. + */ + [description: string]: { + /** + * Durations of all measured iterations, in milliseconds. + */ + durations: Array, + + /** + * Total duration of the benchmark, i.e. for throughput. + * This is nullable for compatibility with older serialized data from the Jest reporter. + */ + totalDuration?: number; + + /** + * Nested test data, such as when passing `["A", "B"]` as the + * description to [[Benchmark.record]]. + */ + children: BenchmarkData, + }; +} + +/** + * Used for filtering benchmark data. + */ +export enum Criteria { + Fastest, + Slowest, +} + +/** + * Aggregator for performance results of various tests. + */ +export class Benchmark { + /** + * Raw data collected from [[Benchmark.record]]. + */ + data: BenchmarkData = {}; + + /** + * Event emitter. + * + * * `record` is emitted after [[Benchmark.record]] finishes all iterations. + * + * Refer to [[BenchmarkEventEmitter.on]] for the event callback signatures. + */ + events: BenchmarkEventEmitter = new BenchmarkEventEmitter(); + + /** + * Measure the time it takes for a function to execute. + * In addition to returning the measurement itself, this method also + * stores the result in [[Benchmark.data]] for later use/reporting. + * + * With this overload, since no description is provided, the data will not + * be recorded directly. However, a `record` event will still be emitted, + * allowing any listeners (such as reporters) to act on it. + * + * @param fn - Function to measure. If it returns a promise, + * then it will be `await`ed automatically as part of the iteration. + * @param options - Options to customize the measurement. + */ + async record( + fn: () => any, + options?: Partial> + ): Promise; + /** + * Measure the time it takes for a function to execute. + * In addition to returning the measurement itself, this method also + * stores the result in [[Benchmark.data]] for later use/reporting, + * and [[Benchmark.events]] emits a `record` event for any listeners. + * + * @param description - Name of what is being tested. + * This can be a series of names for nested categories. + * Must not be empty. + * @param fn - Function to measure. If it returns a promise, + * then it will be `await`ed automatically as part of the iteration. + * @param options - Options to customize the measurement. + */ + async record( + description: string | Array, + fn: () => any, + options?: Partial> + ): Promise; + async record(a: any, b: any, c?: any): Promise { + let description: string | Array; + let descriptionSpecified = false; + let fn: () => any; + let options: Partial; + + if (typeof a === "function") { + description = []; + fn = a; + options = b || {}; + } else { + description = a; + descriptionSpecified = true; + fn = b; + options = c || {}; + } + + const mergedOptions = { ...defaultMeasureOptions, ...options }; + + if (descriptionSpecified && description.length === 0) { + throw new Error("The description must not be empty"); + } + if (typeof description === "string") { + description = [description]; + } + + const measurement = await measure(fn, { + ...mergedOptions, + verify: false, + }); + + if (description.length > 0) { + this.incorporate(description, measurement); + } + this.events.emit("record", description, measurement); + verifyMeasurement(measurement, { ...mergedOptions, verify: true }); + return measurement; + } + + /** + * Add a measurement directly to [[Benchmark.data]]. + * + * @param description - Name of what is being tested. + * Must not be empty. + * @param measurement - Measurement to add to the benchmark data. + */ + incorporate(description: Array, measurement: Measurement): void { + if (description.length === 0) { + throw new Error("The description must not be empty"); + } + this.addBenchmarkDurations( + this.data, + description, + measurement.durations, + measurement.totalDuration + ); + } + + private addBenchmarkDurations( + data: BenchmarkData, + categories: Array, + durations: Array, + totalDuration: number + ): void { + if (!(categories[0] in data)) { + data[categories[0]] = { + durations: [], + children: {}, + totalDuration: 0, + }; + } + + if (categories.length === 1) { + data[categories[0]].durations = + data[categories[0]].durations.concat(durations); + data[categories[0]].totalDuration = + (data[categories[0]].totalDuration ?? 0) + totalDuration; + } else { + this.addBenchmarkDurations( + data[categories[0]].children, + categories.slice(1), + durations, + totalDuration + ); + } + } + + private reportLevel(level: BenchmarkData, depth: number): Array { + let lines: Array = []; + for (const [description, info] of Object.entries(level)) { + const showMeasurement = info.durations.length > 0; + const showChildren = Object.keys(info.children).length > 0; + lines.push(`${" ".repeat(depth)}${description}:`); + if (showMeasurement) { + const measurement = new Measurement( + info.durations, + info.totalDuration + ); + const mean = round(measurement.mean); + const moe = round(measurement.marginOfError); + const iterations = measurement.durations.length; + const totalDuration = round(measurement.totalDuration); + lines.push( + `${" ".repeat( + depth + 1 + )}${mean} ms (+/- ${moe} ms) from ${iterations} iterations (${totalDuration} ms total)` + ); + } + if (showMeasurement && showChildren) { + lines.push(""); + } + if (showChildren) { + lines = lines.concat( + this.reportLevel(info.children, depth + 1) + ); + } + } + return lines; + } + + /** + * Create a report of all the benchmark results. + */ + report(): string { + const lines = this.reportLevel(this.data, 0); + if (lines.length === 0) { + return ""; + } else { + return [HEADER, ...lines, FOOTER].join("\n"); + } + } + + private getMeasurementsAtLevel( + level: BenchmarkData, + descriptions: Array + ): Array { + let measurements: Array = []; + for (const [description, info] of Object.entries(level)) { + const localDescriptions = [...descriptions, description]; + if (info.durations.length > 0) { + const measurement = new Measurement( + info.durations, + info.totalDuration + ); + measurement.description = localDescriptions; + measurements.push(measurement); + } + measurements = measurements.concat( + this.getMeasurementsAtLevel(info.children, localDescriptions) + ); + } + return measurements; + } + + /** + * Get a list of [[Measurement]] based on [[Benchmark.data]]. + */ + get measurements(): Array { + return this.getMeasurementsAtLevel(this.data, []); + } + + /** + * Find the measurement that meets some criteria. + * In the case of a tie, the first one found wins. + * + * @param criteria - Criteria by which to select a measurement. + * @param value - Callback to select a specific field of each measurement for comparison. + * The default uses the mean plus the margin of error. + * @returns the matching measurement, or null if no measurements have been taken + */ + find( + criteria: Criteria, + value: (m: Measurement) => number = (m) => m.mean + m.marginOfError + ): Measurement | null { + let candidate = null; + for (const measurement of this.measurements) { + if (candidate === null) { + candidate = measurement; + } else if ( + criteria === Criteria.Fastest && + value(measurement) < value(candidate) + ) { + candidate = measurement; + } else if ( + criteria === Criteria.Slowest && + value(measurement) > value(candidate) + ) { + candidate = measurement; + } + } + return candidate; + } +} + + +function round(value: number, places: number = 5): number { + return mathjs.round(value, places) as number; +} diff --git a/src/index.ts b/src/index.ts index d0f44fe..69b4bae 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,479 +1,24 @@ -import hrtime from "browser-process-hrtime"; -import { EventEmitter } from "events"; -import * as mathjs from "mathjs"; -import { FOOTER, HEADER } from "./etc"; + import { JestReporter, KarmaReporter, MochaReporter } from "./reporters"; export { JestReporter, KarmaReporter, MochaReporter }; -export declare interface BenchmarkEventEmitter { - emit(event: "record", description: Array, measurement: Measurement): boolean; - on(event: "record", listener: (description: Array, measurement: Measurement) => void): this; - once(event: "record", listener: (description: Array, measurement: Measurement) => void): this; -} - -export class BenchmarkEventEmitter extends EventEmitter { } - -/** - * Base error for benchmark failures, such as a function taking too long - * to execute. - */ -export class PerformanceError extends Error { - constructor(message?: string) { - super(message); - Object.setPrototypeOf(this, new.target.prototype); - } -} - -/** - * Performance measurement result from running a benchmark. - */ -export class Measurement { - /** - * Optional name of the measurement, for use in reporting. - */ - description: Array = []; - - /** - * Total duration of the benchmark, i.e. for throughput. - * This includes time spent on any configured `beforeEach`/`afterEach` callbacks. - * When `serial` is false, this number will be lower. - */ - totalDuration: number; - - /** - * - * @param durations - Durations measured, in milliseconds. - * The list must not be empty. - * @param totalDuration - Duration of the entire measurement, in milliseconds. - */ - constructor(public durations: Array, totalDuration?: number) { - if (durations.length === 0) { - throw new Error("The list of durations must not be empty"); - } - this.totalDuration = totalDuration ?? mathjs.sum(durations); - } - - /** - * Mean of all durations measured, in milliseconds. - */ - get mean(): number { - return mathjs.mean(this.durations); - } - - /** - * Minimum duration measured, in milliseconds. - */ - get min(): number { - return mathjs.min(this.durations); - } - - /** - * Maximum duration measured, in milliseconds. - */ - get max(): number { - return mathjs.max(this.durations); - } - - /** - * Standard deviation of all durations measured, in milliseconds. - */ - get standardDeviation(): number { - return mathjs.std(...this.durations); - } - - /** - * Margin of error at 95% confidence level, in milliseconds. - */ - get marginOfError(): number { - return mathjs.sqrt(mathjs.variance(...this.durations) / this.durations.length) * 1.96; - } -} - -/** - * Options for Benchmark.measure(). - */ -export interface MeasureOptions { - /** - * The number of times to call the function and measure its duration. - * @default 100 - */ - iterations: number; - - /** - * Whether to wait for each iteration to finish before starting the next. - * @default true - */ - serial: boolean; - - /** - * If the mean measured duration exceeds this many milliseconds, - * throw a [[PerformanceError]]. - */ - meanUnder?: number; - - /** - * If the minimum measured duration exceeds this many milliseconds, - * throw a [[PerformanceError]]. - */ - minUnder?: number; - - /** - * If the maximum measured duration exceeds this many milliseconds, - * throw a [[PerformanceError]]. - */ - maxUnder?: number; - - /** - * If the margin of error at 95% confidence level exceeds this many milliseconds, - * throw a [[PerformanceError]]. - */ - marginOfErrorUnder?: number; - - /** - * If the standard deviation of all durations measured exceeds this many milliseconds, - * throw a [[PerformanceError]]. - */ - standardDeviationUnder?: number; - - /** - * Callback to invoke before each iteration. - */ - beforeEach?: () => any; - - /** - * Callback to invoke after each iteration. - */ - afterEach?: () => any; - - /** - * Whether to make use of the options like `meanUnder` and `minUnder`. - * @default true - */ - verify: boolean; -} - -/** - * Default options for Benchmark.measure(). - */ -const defaultMeasureOptions: MeasureOptions = { - iterations: 100, - serial: true, - verify: true, -}; - -/** - * Raw data collected from [[Benchmark.record]]. - */ -export interface BenchmarkData { - /** - * Description passed to [[Benchmark.record]]. - */ - [description: string]: { - /** - * Durations of all measured iterations, in milliseconds. - */ - durations: Array, - - /** - * Total duration of the benchmark, i.e. for throughput. - * This is nullable for compatibility with older serialized data from the Jest reporter. - */ - totalDuration?: number; - - /** - * Nested test data, such as when passing `["A", "B"]` as the - * description to [[Benchmark.record]]. - */ - children: BenchmarkData, - }; -} - -async function maybePromise(fn: () => any): Promise { - const ret = fn(); - if (ret instanceof Promise) { - await ret; - } -} - -function round(value: number, places: number = 5): number { - return mathjs.round(value, places) as number; -} - -/** - * Measure the time it takes for a function to execute. - * - * @param fn - Function to measure. - * @param options - Options to customize the measurement. - */ -export async function measure(fn: () => any, options: Partial = {}): Promise { - const mergedOptions = { ...defaultMeasureOptions, ...options }; - const durations: Array = []; - let calls: Array = []; - - for (let i = 0; i < mergedOptions.iterations; i++) { - calls.push(async () => { - if (mergedOptions.beforeEach !== undefined) { - await maybePromise(mergedOptions.beforeEach); - } - - const startTime = hrtime(); - await maybePromise(fn); - const [durationSec, durationNano] = hrtime(startTime); - durations.push(durationSec * 1e3 + durationNano / 1e6); - - if (mergedOptions.afterEach !== undefined) { - await maybePromise(mergedOptions.afterEach); - } - }); - } - - const measureStart = hrtime(); - - if (mergedOptions.serial) { - for (const call of calls) { - await call(); - } - } else { - await Promise.all(calls.map(x => x())); - } - - const [measureSec, measureNano] = hrtime(measureStart); - const totalDuration = measureSec * 1e3 + measureNano / 1e6; - - const measurement = new Measurement(durations, totalDuration); - verifyMeasurement(measurement, mergedOptions); - return measurement; -} - -function verifyMeasurement(measurement: Measurement, options: MeasureOptions): void { - if (!options.verify) { - return; - } - if (options.meanUnder !== undefined) { - if (measurement.mean > options.meanUnder) { - throw new PerformanceError(`Mean time of ${measurement.mean} ms exceeded threshold of ${options.meanUnder} ms`); - } - } - if (options.minUnder !== undefined) { - if (measurement.min > options.minUnder) { - throw new PerformanceError(`Minimum time of ${measurement.min} ms exceeded threshold of ${options.minUnder} ms`); - } - } - if (options.maxUnder !== undefined) { - if (measurement.max > options.maxUnder) { - throw new PerformanceError(`Maximum time of ${measurement.max} ms exceeded threshold of ${options.maxUnder} ms`); - } - } - if (options.marginOfErrorUnder !== undefined) { - if (measurement.marginOfError > options.marginOfErrorUnder) { - throw new PerformanceError(`Margin of error time of ${measurement.marginOfError} ms exceeded threshold of ${options.marginOfErrorUnder} ms`); - } - } - if (options.standardDeviationUnder !== undefined) { - if (measurement.standardDeviation > options.standardDeviationUnder) { - throw new PerformanceError(`Standard deviation time of ${measurement.standardDeviation} ms exceeded threshold of ${options.standardDeviationUnder} ms`); - } - } -} - -/** - * Used for filtering benchmark data. - */ -export enum Criteria { - Fastest, - Slowest, -} - -/** - * Aggregator for performance results of various tests. - */ -export class Benchmark { - /** - * Raw data collected from [[Benchmark.record]]. - */ - data: BenchmarkData = {}; - - /** - * Event emitter. - * - * * `record` is emitted after [[Benchmark.record]] finishes all iterations. - * - * Refer to [[BenchmarkEventEmitter.on]] for the event callback signatures. - */ - events: BenchmarkEventEmitter = new BenchmarkEventEmitter(); - - /** - * Measure the time it takes for a function to execute. - * In addition to returning the measurement itself, this method also - * stores the result in [[Benchmark.data]] for later use/reporting. - * - * With this overload, since no description is provided, the data will not - * be recorded directly. However, a `record` event will still be emitted, - * allowing any listeners (such as reporters) to act on it. - * - * @param fn - Function to measure. If it returns a promise, - * then it will be `await`ed automatically as part of the iteration. - * @param options - Options to customize the measurement. - */ - async record(fn: () => any, options?: Partial>): Promise; - /** - * Measure the time it takes for a function to execute. - * In addition to returning the measurement itself, this method also - * stores the result in [[Benchmark.data]] for later use/reporting, - * and [[Benchmark.events]] emits a `record` event for any listeners. - * - * @param description - Name of what is being tested. - * This can be a series of names for nested categories. - * Must not be empty. - * @param fn - Function to measure. If it returns a promise, - * then it will be `await`ed automatically as part of the iteration. - * @param options - Options to customize the measurement. - */ - async record(description: string | Array, fn: () => any, options?: Partial>): Promise; - async record(a: any, b: any, c?: any): Promise { - let description: string | Array; - let descriptionSpecified = false; - let fn: () => any; - let options: Partial; - - if (typeof a === "function") { - description = []; - fn = a; - options = b || {}; - } else { - description = a; - descriptionSpecified = true; - fn = b; - options = c || {}; - } - - const mergedOptions = { ...defaultMeasureOptions, ...options }; - - if ((descriptionSpecified && description.length === 0)) { - throw new Error("The description must not be empty"); - } - if (typeof description === "string") { - description = [description]; - } - - const measurement = await measure(fn, { ...mergedOptions, verify: false }); - - if (description.length > 0) { - this.incorporate(description, measurement); - } - this.events.emit("record", description, measurement); - verifyMeasurement(measurement, { ...mergedOptions, verify: true }); - return measurement; - } - - /** - * Add a measurement directly to [[Benchmark.data]]. - * - * @param description - Name of what is being tested. - * Must not be empty. - * @param measurement - Measurement to add to the benchmark data. - */ - incorporate(description: Array, measurement: Measurement): void { - if ((description.length === 0)) { - throw new Error("The description must not be empty"); - } - this.addBenchmarkDurations(this.data, description, measurement.durations, measurement.totalDuration); - } - - private addBenchmarkDurations(data: BenchmarkData, categories: Array, durations: Array, totalDuration: number): void { - if (!(categories[0] in data)) { - data[categories[0]] = { durations: [], children: {}, totalDuration: 0 }; - } - - if (categories.length === 1) { - data[categories[0]].durations = data[categories[0]].durations.concat(durations); - data[categories[0]].totalDuration = (data[categories[0]].totalDuration ?? 0) + totalDuration; - } else { - this.addBenchmarkDurations(data[categories[0]].children, categories.slice(1), durations, totalDuration); - } - } - - private reportLevel(level: BenchmarkData, depth: number): Array { - let lines: Array = []; - for (const [description, info] of Object.entries(level)) { - const showMeasurement = info.durations.length > 0; - const showChildren = Object.keys(info.children).length > 0; - lines.push(`${" ".repeat(depth)}${description}:`); - if (showMeasurement) { - const measurement = new Measurement(info.durations, info.totalDuration); - const mean = round(measurement.mean); - const moe = round(measurement.marginOfError); - const iterations = measurement.durations.length; - const totalDuration = round(measurement.totalDuration); - lines.push(`${" ".repeat(depth + 1)}${mean} ms (+/- ${moe} ms) from ${iterations} iterations (${totalDuration} ms total)`); - } - if (showMeasurement && showChildren) { - lines.push(""); - } - if (showChildren) { - lines = lines.concat(this.reportLevel(info.children, depth + 1)); - } - } - return lines; - } - - /** - * Create a report of all the benchmark results. - */ - report(): string { - const lines = this.reportLevel(this.data, 0); - if (lines.length === 0) { - return ""; - } else { - return [HEADER, ...lines, FOOTER].join("\n"); - } - } +import { Benchmark } from "./Benchmark"; - private getMeasurementsAtLevel(level: BenchmarkData, descriptions: Array): Array { - let measurements: Array = []; - for (const [description, info] of Object.entries(level)) { - const localDescriptions = [...descriptions, description]; - if (info.durations.length > 0) { - const measurement = new Measurement(info.durations, info.totalDuration); - measurement.description = localDescriptions; - measurements.push(measurement); - } - measurements = measurements.concat(this.getMeasurementsAtLevel(info.children, localDescriptions)); - } - return measurements; - } +export { + Benchmark, + Criteria, + BenchmarkEventEmitter, + BenchmarkData, +} from "./Benchmark"; - /** - * Get a list of [[Measurement]] based on [[Benchmark.data]]. - */ - get measurements(): Array { - return this.getMeasurementsAtLevel(this.data, []); - } +export { + MeasureOptions, + PerformanceError, + Measurement, + measure, +} from "./measure"; - /** - * Find the measurement that meets some criteria. - * In the case of a tie, the first one found wins. - * - * @param criteria - Criteria by which to select a measurement. - * @param value - Callback to select a specific field of each measurement for comparison. - * The default uses the mean plus the margin of error. - * @returns the matching measurement, or null if no measurements have been taken - */ - find(criteria: Criteria, value: (m: Measurement) => number = m => m.mean + m.marginOfError): Measurement | null { - let candidate = null; - for (const measurement of this.measurements) { - if (candidate === null) { - candidate = measurement; - } else if (criteria === Criteria.Fastest && value(measurement) < value(candidate)) { - candidate = measurement; - } else if (criteria === Criteria.Slowest && value(measurement) > value(candidate)) { - candidate = measurement; - } - } - return candidate; - } -} /** * Default [[Benchmark]] instance for shared usage throughout your tests. diff --git a/src/measure.ts b/src/measure.ts new file mode 100644 index 0000000..59ad3b5 --- /dev/null +++ b/src/measure.ts @@ -0,0 +1,253 @@ +import hrtime from "browser-hrtime"; +import * as mathjs from "mathjs"; + +/** + * Options for Benchmark.measure(). + */ +export interface MeasureOptions { + /** + * The number of times to call the function and measure its duration. + * @default 100 + */ + iterations: number; + + /** + * Whether to wait for each iteration to finish before starting the next. + * @default true + */ + serial: boolean; + + /** + * If the mean measured duration exceeds this many milliseconds, + * throw a [[PerformanceError]]. + */ + meanUnder?: number; + + /** + * If the minimum measured duration exceeds this many milliseconds, + * throw a [[PerformanceError]]. + */ + minUnder?: number; + + /** + * If the maximum measured duration exceeds this many milliseconds, + * throw a [[PerformanceError]]. + */ + maxUnder?: number; + + /** + * If the margin of error at 95% confidence level exceeds this many milliseconds, + * throw a [[PerformanceError]]. + */ + marginOfErrorUnder?: number; + + /** + * If the standard deviation of all durations measured exceeds this many milliseconds, + * throw a [[PerformanceError]]. + */ + standardDeviationUnder?: number; + + /** + * Callback to invoke before each iteration. + */ + beforeEach?: () => any; + + /** + * Callback to invoke after each iteration. + */ + afterEach?: () => any; + + /** + * Whether to make use of the options like `meanUnder` and `minUnder`. + * @default true + */ + verify: boolean; +} + + + +/** + * Base error for benchmark failures, such as a function taking too long + * to execute. + */ +export class PerformanceError extends Error { + constructor(message?: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} + +/** + * Performance measurement result from running a benchmark. + */ +export class Measurement { + /** + * Optional name of the measurement, for use in reporting. + */ + description: Array = []; + + /** + * Total duration of the benchmark, i.e. for throughput. + * This includes time spent on any configured `beforeEach`/`afterEach` callbacks. + * When `serial` is false, this number will be lower. + */ + totalDuration: number; + + /** + * + * @param durations - Durations measured, in milliseconds. + * The list must not be empty. + * @param totalDuration - Duration of the entire measurement, in milliseconds. + */ + constructor(public durations: Array, totalDuration?: number) { + if (durations.length === 0) { + throw new Error("The list of durations must not be empty"); + } + this.totalDuration = totalDuration ?? mathjs.sum(durations); + } + + /** + * Mean of all durations measured, in milliseconds. + */ + get mean(): number { + return mathjs.mean(this.durations); + } + + /** + * Minimum duration measured, in milliseconds. + */ + get min(): number { + return mathjs.min(this.durations); + } + + /** + * Maximum duration measured, in milliseconds. + */ + get max(): number { + return mathjs.max(this.durations); + } + + /** + * Standard deviation of all durations measured, in milliseconds. + */ + get standardDeviation(): number { + return mathjs.std(...this.durations); + } + + /** + * Margin of error at 95% confidence level, in milliseconds. + */ + get marginOfError(): number { + return mathjs.sqrt(mathjs.variance(...this.durations) / this.durations.length) * 1.96; + } +} + +/** + * Default options for Benchmark.measure(). + */ +export const defaultMeasureOptions: MeasureOptions = { + iterations: 100, + serial: true, + verify: true, +}; + +/** + * Measure the time it takes for a function to execute. + * + * @param fn - Function to measure. + * @param options - Options to customize the measurement. + */ +export async function measure( + fn: () => any, + options: Partial = {} +): Promise { + const mergedOptions = { ...defaultMeasureOptions, ...options }; + const durations: Array = []; + let calls: Array = []; + + for (let i = 0; i < mergedOptions.iterations; i++) { + calls.push(async () => { + if (mergedOptions.beforeEach !== undefined) { + await maybePromise(mergedOptions.beforeEach); + } + + const startTime = hrtime(); + await maybePromise(fn); + const [durationSec, durationNano] = hrtime(startTime); + durations.push(durationSec * 1e3 + durationNano / 1e6); + + if (mergedOptions.afterEach !== undefined) { + await maybePromise(mergedOptions.afterEach); + } + }); + } + + const measureStart = hrtime(); + + if (mergedOptions.serial) { + for (const call of calls) { + await call(); + } + } else { + await Promise.all(calls.map((x) => x())); + } + + const [measureSec, measureNano] = hrtime(measureStart); + const totalDuration = measureSec * 1e3 + measureNano / 1e6; + + const measurement = new Measurement(durations, totalDuration); + verifyMeasurement(measurement, mergedOptions); + return measurement; +} + +export function verifyMeasurement( + measurement: Measurement, + options: MeasureOptions +): void { + if (!options.verify) { + return; + } + if (options.meanUnder !== undefined) { + if (measurement.mean > options.meanUnder) { + throw new PerformanceError( + `Mean time of ${measurement.mean} ms exceeded threshold of ${options.meanUnder} ms` + ); + } + } + if (options.minUnder !== undefined) { + if (measurement.min > options.minUnder) { + throw new PerformanceError( + `Minimum time of ${measurement.min} ms exceeded threshold of ${options.minUnder} ms` + ); + } + } + if (options.maxUnder !== undefined) { + if (measurement.max > options.maxUnder) { + throw new PerformanceError( + `Maximum time of ${measurement.max} ms exceeded threshold of ${options.maxUnder} ms` + ); + } + } + if (options.marginOfErrorUnder !== undefined) { + if (measurement.marginOfError > options.marginOfErrorUnder) { + throw new PerformanceError( + `Margin of error time of ${measurement.marginOfError} ms exceeded threshold of ${options.marginOfErrorUnder} ms` + ); + } + } + if (options.standardDeviationUnder !== undefined) { + if (measurement.standardDeviation > options.standardDeviationUnder) { + throw new PerformanceError( + `Standard deviation time of ${measurement.standardDeviation} ms exceeded threshold of ${options.standardDeviationUnder} ms` + ); + } + } +} + + +async function maybePromise(fn: () => any): Promise { + const ret = fn(); + if (ret instanceof Promise) { + await ret; + } +} From 49225bbd756ae4d3b8eac7057f0b444268c9be26 Mon Sep 17 00:00:00 2001 From: Haowen Chen Date: Mon, 4 Dec 2023 11:05:57 +0800 Subject: [PATCH 2/3] Revert "Re-arrange code and update dependencies to support browser" --- package-lock.json | 29 +-- package.json | 3 +- src/Benchmark.ts | 310 ----------------------------- src/index.ts | 483 ++++++++++++++++++++++++++++++++++++++++++++-- src/measure.ts | 253 ------------------------ 5 files changed, 473 insertions(+), 605 deletions(-) delete mode 100644 src/Benchmark.ts delete mode 100644 src/measure.ts diff --git a/package-lock.json b/package-lock.json index 56fbeb6..b7db839 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,8 +9,7 @@ "version": "0.9.0", "license": "MIT", "dependencies": { - "browser-hrtime": "^1.1.8", - "eventemitter3": "^4.0.7", + "browser-process-hrtime": "^1.0.0", "mathjs": "^10.5.2" }, "devDependencies": { @@ -1884,16 +1883,10 @@ "node": ">=8" } }, - "node_modules/browser-hrtime": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/browser-hrtime/-/browser-hrtime-1.1.8.tgz", - "integrity": "sha512-kzXheikaJsBtzUBlyVtPIY5r0soQePzjwVwT4IlDpU2RvfB5Py52gpU98M77rgqMCheoSSZvrcrdj3t6cZ3suA==" - }, "node_modules/browser-process-hrtime": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", - "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", - "dev": true + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==" }, "node_modules/browserslist": { "version": "4.20.3", @@ -2581,11 +2574,6 @@ "node": ">=0.10.0" } }, - "node_modules/eventemitter3": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", - "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" - }, "node_modules/exec-sh": { "version": "0.3.6", "resolved": "https://registry.npmjs.org/exec-sh/-/exec-sh-0.3.6.tgz", @@ -9622,16 +9610,10 @@ "fill-range": "^7.0.1" } }, - "browser-hrtime": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/browser-hrtime/-/browser-hrtime-1.1.8.tgz", - "integrity": "sha512-kzXheikaJsBtzUBlyVtPIY5r0soQePzjwVwT4IlDpU2RvfB5Py52gpU98M77rgqMCheoSSZvrcrdj3t6cZ3suA==" - }, "browser-process-hrtime": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", - "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", - "dev": true + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==" }, "browserslist": { "version": "4.20.3", @@ -10154,11 +10136,6 @@ "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "dev": true }, - "eventemitter3": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", - "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" - }, "exec-sh": { "version": "0.3.6", "resolved": "https://registry.npmjs.org/exec-sh/-/exec-sh-0.3.6.tgz", diff --git a/package.json b/package.json index e69eb57..4e83691 100644 --- a/package.json +++ b/package.json @@ -23,8 +23,7 @@ "test": "npx jest --verbose --silent --coverage --runInBand" }, "dependencies": { - "browser-hrtime": "^1.1.8", - "eventemitter3": "^4.0.7", + "browser-process-hrtime": "^1.0.0", "mathjs": "^10.5.2" }, "devDependencies": { diff --git a/src/Benchmark.ts b/src/Benchmark.ts deleted file mode 100644 index a4e91a0..0000000 --- a/src/Benchmark.ts +++ /dev/null @@ -1,310 +0,0 @@ -import { EventEmitter } from "eventemitter3"; -import * as mathjs from "mathjs"; -import { FOOTER, HEADER } from "./etc"; -import { defaultMeasureOptions, measure, Measurement, MeasureOptions, verifyMeasurement } from "./measure"; - -export declare interface BenchmarkEventEmitter { - emit( - event: "record", - description: Array, - measurement: Measurement - ): boolean; - on( - event: "record", - listener: (description: Array, measurement: Measurement) => void - ): this; - once( - event: "record", - listener: (description: Array, measurement: Measurement) => void - ): this; -} - -export class BenchmarkEventEmitter extends EventEmitter<"record"> {} - - -/** - * Raw data collected from [[Benchmark.record]]. - */ -export interface BenchmarkData { - /** - * Description passed to [[Benchmark.record]]. - */ - [description: string]: { - /** - * Durations of all measured iterations, in milliseconds. - */ - durations: Array, - - /** - * Total duration of the benchmark, i.e. for throughput. - * This is nullable for compatibility with older serialized data from the Jest reporter. - */ - totalDuration?: number; - - /** - * Nested test data, such as when passing `["A", "B"]` as the - * description to [[Benchmark.record]]. - */ - children: BenchmarkData, - }; -} - -/** - * Used for filtering benchmark data. - */ -export enum Criteria { - Fastest, - Slowest, -} - -/** - * Aggregator for performance results of various tests. - */ -export class Benchmark { - /** - * Raw data collected from [[Benchmark.record]]. - */ - data: BenchmarkData = {}; - - /** - * Event emitter. - * - * * `record` is emitted after [[Benchmark.record]] finishes all iterations. - * - * Refer to [[BenchmarkEventEmitter.on]] for the event callback signatures. - */ - events: BenchmarkEventEmitter = new BenchmarkEventEmitter(); - - /** - * Measure the time it takes for a function to execute. - * In addition to returning the measurement itself, this method also - * stores the result in [[Benchmark.data]] for later use/reporting. - * - * With this overload, since no description is provided, the data will not - * be recorded directly. However, a `record` event will still be emitted, - * allowing any listeners (such as reporters) to act on it. - * - * @param fn - Function to measure. If it returns a promise, - * then it will be `await`ed automatically as part of the iteration. - * @param options - Options to customize the measurement. - */ - async record( - fn: () => any, - options?: Partial> - ): Promise; - /** - * Measure the time it takes for a function to execute. - * In addition to returning the measurement itself, this method also - * stores the result in [[Benchmark.data]] for later use/reporting, - * and [[Benchmark.events]] emits a `record` event for any listeners. - * - * @param description - Name of what is being tested. - * This can be a series of names for nested categories. - * Must not be empty. - * @param fn - Function to measure. If it returns a promise, - * then it will be `await`ed automatically as part of the iteration. - * @param options - Options to customize the measurement. - */ - async record( - description: string | Array, - fn: () => any, - options?: Partial> - ): Promise; - async record(a: any, b: any, c?: any): Promise { - let description: string | Array; - let descriptionSpecified = false; - let fn: () => any; - let options: Partial; - - if (typeof a === "function") { - description = []; - fn = a; - options = b || {}; - } else { - description = a; - descriptionSpecified = true; - fn = b; - options = c || {}; - } - - const mergedOptions = { ...defaultMeasureOptions, ...options }; - - if (descriptionSpecified && description.length === 0) { - throw new Error("The description must not be empty"); - } - if (typeof description === "string") { - description = [description]; - } - - const measurement = await measure(fn, { - ...mergedOptions, - verify: false, - }); - - if (description.length > 0) { - this.incorporate(description, measurement); - } - this.events.emit("record", description, measurement); - verifyMeasurement(measurement, { ...mergedOptions, verify: true }); - return measurement; - } - - /** - * Add a measurement directly to [[Benchmark.data]]. - * - * @param description - Name of what is being tested. - * Must not be empty. - * @param measurement - Measurement to add to the benchmark data. - */ - incorporate(description: Array, measurement: Measurement): void { - if (description.length === 0) { - throw new Error("The description must not be empty"); - } - this.addBenchmarkDurations( - this.data, - description, - measurement.durations, - measurement.totalDuration - ); - } - - private addBenchmarkDurations( - data: BenchmarkData, - categories: Array, - durations: Array, - totalDuration: number - ): void { - if (!(categories[0] in data)) { - data[categories[0]] = { - durations: [], - children: {}, - totalDuration: 0, - }; - } - - if (categories.length === 1) { - data[categories[0]].durations = - data[categories[0]].durations.concat(durations); - data[categories[0]].totalDuration = - (data[categories[0]].totalDuration ?? 0) + totalDuration; - } else { - this.addBenchmarkDurations( - data[categories[0]].children, - categories.slice(1), - durations, - totalDuration - ); - } - } - - private reportLevel(level: BenchmarkData, depth: number): Array { - let lines: Array = []; - for (const [description, info] of Object.entries(level)) { - const showMeasurement = info.durations.length > 0; - const showChildren = Object.keys(info.children).length > 0; - lines.push(`${" ".repeat(depth)}${description}:`); - if (showMeasurement) { - const measurement = new Measurement( - info.durations, - info.totalDuration - ); - const mean = round(measurement.mean); - const moe = round(measurement.marginOfError); - const iterations = measurement.durations.length; - const totalDuration = round(measurement.totalDuration); - lines.push( - `${" ".repeat( - depth + 1 - )}${mean} ms (+/- ${moe} ms) from ${iterations} iterations (${totalDuration} ms total)` - ); - } - if (showMeasurement && showChildren) { - lines.push(""); - } - if (showChildren) { - lines = lines.concat( - this.reportLevel(info.children, depth + 1) - ); - } - } - return lines; - } - - /** - * Create a report of all the benchmark results. - */ - report(): string { - const lines = this.reportLevel(this.data, 0); - if (lines.length === 0) { - return ""; - } else { - return [HEADER, ...lines, FOOTER].join("\n"); - } - } - - private getMeasurementsAtLevel( - level: BenchmarkData, - descriptions: Array - ): Array { - let measurements: Array = []; - for (const [description, info] of Object.entries(level)) { - const localDescriptions = [...descriptions, description]; - if (info.durations.length > 0) { - const measurement = new Measurement( - info.durations, - info.totalDuration - ); - measurement.description = localDescriptions; - measurements.push(measurement); - } - measurements = measurements.concat( - this.getMeasurementsAtLevel(info.children, localDescriptions) - ); - } - return measurements; - } - - /** - * Get a list of [[Measurement]] based on [[Benchmark.data]]. - */ - get measurements(): Array { - return this.getMeasurementsAtLevel(this.data, []); - } - - /** - * Find the measurement that meets some criteria. - * In the case of a tie, the first one found wins. - * - * @param criteria - Criteria by which to select a measurement. - * @param value - Callback to select a specific field of each measurement for comparison. - * The default uses the mean plus the margin of error. - * @returns the matching measurement, or null if no measurements have been taken - */ - find( - criteria: Criteria, - value: (m: Measurement) => number = (m) => m.mean + m.marginOfError - ): Measurement | null { - let candidate = null; - for (const measurement of this.measurements) { - if (candidate === null) { - candidate = measurement; - } else if ( - criteria === Criteria.Fastest && - value(measurement) < value(candidate) - ) { - candidate = measurement; - } else if ( - criteria === Criteria.Slowest && - value(measurement) > value(candidate) - ) { - candidate = measurement; - } - } - return candidate; - } -} - - -function round(value: number, places: number = 5): number { - return mathjs.round(value, places) as number; -} diff --git a/src/index.ts b/src/index.ts index 69b4bae..d0f44fe 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,24 +1,479 @@ - +import hrtime from "browser-process-hrtime"; +import { EventEmitter } from "events"; +import * as mathjs from "mathjs"; +import { FOOTER, HEADER } from "./etc"; import { JestReporter, KarmaReporter, MochaReporter } from "./reporters"; export { JestReporter, KarmaReporter, MochaReporter }; -import { Benchmark } from "./Benchmark"; +export declare interface BenchmarkEventEmitter { + emit(event: "record", description: Array, measurement: Measurement): boolean; + on(event: "record", listener: (description: Array, measurement: Measurement) => void): this; + once(event: "record", listener: (description: Array, measurement: Measurement) => void): this; +} + +export class BenchmarkEventEmitter extends EventEmitter { } + +/** + * Base error for benchmark failures, such as a function taking too long + * to execute. + */ +export class PerformanceError extends Error { + constructor(message?: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} + +/** + * Performance measurement result from running a benchmark. + */ +export class Measurement { + /** + * Optional name of the measurement, for use in reporting. + */ + description: Array = []; + + /** + * Total duration of the benchmark, i.e. for throughput. + * This includes time spent on any configured `beforeEach`/`afterEach` callbacks. + * When `serial` is false, this number will be lower. + */ + totalDuration: number; + + /** + * + * @param durations - Durations measured, in milliseconds. + * The list must not be empty. + * @param totalDuration - Duration of the entire measurement, in milliseconds. + */ + constructor(public durations: Array, totalDuration?: number) { + if (durations.length === 0) { + throw new Error("The list of durations must not be empty"); + } + this.totalDuration = totalDuration ?? mathjs.sum(durations); + } + + /** + * Mean of all durations measured, in milliseconds. + */ + get mean(): number { + return mathjs.mean(this.durations); + } + + /** + * Minimum duration measured, in milliseconds. + */ + get min(): number { + return mathjs.min(this.durations); + } + + /** + * Maximum duration measured, in milliseconds. + */ + get max(): number { + return mathjs.max(this.durations); + } + + /** + * Standard deviation of all durations measured, in milliseconds. + */ + get standardDeviation(): number { + return mathjs.std(...this.durations); + } + + /** + * Margin of error at 95% confidence level, in milliseconds. + */ + get marginOfError(): number { + return mathjs.sqrt(mathjs.variance(...this.durations) / this.durations.length) * 1.96; + } +} + +/** + * Options for Benchmark.measure(). + */ +export interface MeasureOptions { + /** + * The number of times to call the function and measure its duration. + * @default 100 + */ + iterations: number; + + /** + * Whether to wait for each iteration to finish before starting the next. + * @default true + */ + serial: boolean; + + /** + * If the mean measured duration exceeds this many milliseconds, + * throw a [[PerformanceError]]. + */ + meanUnder?: number; + + /** + * If the minimum measured duration exceeds this many milliseconds, + * throw a [[PerformanceError]]. + */ + minUnder?: number; + + /** + * If the maximum measured duration exceeds this many milliseconds, + * throw a [[PerformanceError]]. + */ + maxUnder?: number; + + /** + * If the margin of error at 95% confidence level exceeds this many milliseconds, + * throw a [[PerformanceError]]. + */ + marginOfErrorUnder?: number; + + /** + * If the standard deviation of all durations measured exceeds this many milliseconds, + * throw a [[PerformanceError]]. + */ + standardDeviationUnder?: number; + + /** + * Callback to invoke before each iteration. + */ + beforeEach?: () => any; + + /** + * Callback to invoke after each iteration. + */ + afterEach?: () => any; + + /** + * Whether to make use of the options like `meanUnder` and `minUnder`. + * @default true + */ + verify: boolean; +} + +/** + * Default options for Benchmark.measure(). + */ +const defaultMeasureOptions: MeasureOptions = { + iterations: 100, + serial: true, + verify: true, +}; + +/** + * Raw data collected from [[Benchmark.record]]. + */ +export interface BenchmarkData { + /** + * Description passed to [[Benchmark.record]]. + */ + [description: string]: { + /** + * Durations of all measured iterations, in milliseconds. + */ + durations: Array, + + /** + * Total duration of the benchmark, i.e. for throughput. + * This is nullable for compatibility with older serialized data from the Jest reporter. + */ + totalDuration?: number; + + /** + * Nested test data, such as when passing `["A", "B"]` as the + * description to [[Benchmark.record]]. + */ + children: BenchmarkData, + }; +} + +async function maybePromise(fn: () => any): Promise { + const ret = fn(); + if (ret instanceof Promise) { + await ret; + } +} + +function round(value: number, places: number = 5): number { + return mathjs.round(value, places) as number; +} + +/** + * Measure the time it takes for a function to execute. + * + * @param fn - Function to measure. + * @param options - Options to customize the measurement. + */ +export async function measure(fn: () => any, options: Partial = {}): Promise { + const mergedOptions = { ...defaultMeasureOptions, ...options }; + const durations: Array = []; + let calls: Array = []; + + for (let i = 0; i < mergedOptions.iterations; i++) { + calls.push(async () => { + if (mergedOptions.beforeEach !== undefined) { + await maybePromise(mergedOptions.beforeEach); + } + + const startTime = hrtime(); + await maybePromise(fn); + const [durationSec, durationNano] = hrtime(startTime); + durations.push(durationSec * 1e3 + durationNano / 1e6); + + if (mergedOptions.afterEach !== undefined) { + await maybePromise(mergedOptions.afterEach); + } + }); + } + + const measureStart = hrtime(); + + if (mergedOptions.serial) { + for (const call of calls) { + await call(); + } + } else { + await Promise.all(calls.map(x => x())); + } + + const [measureSec, measureNano] = hrtime(measureStart); + const totalDuration = measureSec * 1e3 + measureNano / 1e6; + + const measurement = new Measurement(durations, totalDuration); + verifyMeasurement(measurement, mergedOptions); + return measurement; +} + +function verifyMeasurement(measurement: Measurement, options: MeasureOptions): void { + if (!options.verify) { + return; + } + if (options.meanUnder !== undefined) { + if (measurement.mean > options.meanUnder) { + throw new PerformanceError(`Mean time of ${measurement.mean} ms exceeded threshold of ${options.meanUnder} ms`); + } + } + if (options.minUnder !== undefined) { + if (measurement.min > options.minUnder) { + throw new PerformanceError(`Minimum time of ${measurement.min} ms exceeded threshold of ${options.minUnder} ms`); + } + } + if (options.maxUnder !== undefined) { + if (measurement.max > options.maxUnder) { + throw new PerformanceError(`Maximum time of ${measurement.max} ms exceeded threshold of ${options.maxUnder} ms`); + } + } + if (options.marginOfErrorUnder !== undefined) { + if (measurement.marginOfError > options.marginOfErrorUnder) { + throw new PerformanceError(`Margin of error time of ${measurement.marginOfError} ms exceeded threshold of ${options.marginOfErrorUnder} ms`); + } + } + if (options.standardDeviationUnder !== undefined) { + if (measurement.standardDeviation > options.standardDeviationUnder) { + throw new PerformanceError(`Standard deviation time of ${measurement.standardDeviation} ms exceeded threshold of ${options.standardDeviationUnder} ms`); + } + } +} + +/** + * Used for filtering benchmark data. + */ +export enum Criteria { + Fastest, + Slowest, +} + +/** + * Aggregator for performance results of various tests. + */ +export class Benchmark { + /** + * Raw data collected from [[Benchmark.record]]. + */ + data: BenchmarkData = {}; + + /** + * Event emitter. + * + * * `record` is emitted after [[Benchmark.record]] finishes all iterations. + * + * Refer to [[BenchmarkEventEmitter.on]] for the event callback signatures. + */ + events: BenchmarkEventEmitter = new BenchmarkEventEmitter(); + + /** + * Measure the time it takes for a function to execute. + * In addition to returning the measurement itself, this method also + * stores the result in [[Benchmark.data]] for later use/reporting. + * + * With this overload, since no description is provided, the data will not + * be recorded directly. However, a `record` event will still be emitted, + * allowing any listeners (such as reporters) to act on it. + * + * @param fn - Function to measure. If it returns a promise, + * then it will be `await`ed automatically as part of the iteration. + * @param options - Options to customize the measurement. + */ + async record(fn: () => any, options?: Partial>): Promise; + /** + * Measure the time it takes for a function to execute. + * In addition to returning the measurement itself, this method also + * stores the result in [[Benchmark.data]] for later use/reporting, + * and [[Benchmark.events]] emits a `record` event for any listeners. + * + * @param description - Name of what is being tested. + * This can be a series of names for nested categories. + * Must not be empty. + * @param fn - Function to measure. If it returns a promise, + * then it will be `await`ed automatically as part of the iteration. + * @param options - Options to customize the measurement. + */ + async record(description: string | Array, fn: () => any, options?: Partial>): Promise; + async record(a: any, b: any, c?: any): Promise { + let description: string | Array; + let descriptionSpecified = false; + let fn: () => any; + let options: Partial; + + if (typeof a === "function") { + description = []; + fn = a; + options = b || {}; + } else { + description = a; + descriptionSpecified = true; + fn = b; + options = c || {}; + } + + const mergedOptions = { ...defaultMeasureOptions, ...options }; + + if ((descriptionSpecified && description.length === 0)) { + throw new Error("The description must not be empty"); + } + if (typeof description === "string") { + description = [description]; + } + + const measurement = await measure(fn, { ...mergedOptions, verify: false }); + + if (description.length > 0) { + this.incorporate(description, measurement); + } + this.events.emit("record", description, measurement); + verifyMeasurement(measurement, { ...mergedOptions, verify: true }); + return measurement; + } + + /** + * Add a measurement directly to [[Benchmark.data]]. + * + * @param description - Name of what is being tested. + * Must not be empty. + * @param measurement - Measurement to add to the benchmark data. + */ + incorporate(description: Array, measurement: Measurement): void { + if ((description.length === 0)) { + throw new Error("The description must not be empty"); + } + this.addBenchmarkDurations(this.data, description, measurement.durations, measurement.totalDuration); + } + + private addBenchmarkDurations(data: BenchmarkData, categories: Array, durations: Array, totalDuration: number): void { + if (!(categories[0] in data)) { + data[categories[0]] = { durations: [], children: {}, totalDuration: 0 }; + } + + if (categories.length === 1) { + data[categories[0]].durations = data[categories[0]].durations.concat(durations); + data[categories[0]].totalDuration = (data[categories[0]].totalDuration ?? 0) + totalDuration; + } else { + this.addBenchmarkDurations(data[categories[0]].children, categories.slice(1), durations, totalDuration); + } + } + + private reportLevel(level: BenchmarkData, depth: number): Array { + let lines: Array = []; + for (const [description, info] of Object.entries(level)) { + const showMeasurement = info.durations.length > 0; + const showChildren = Object.keys(info.children).length > 0; + lines.push(`${" ".repeat(depth)}${description}:`); + if (showMeasurement) { + const measurement = new Measurement(info.durations, info.totalDuration); + const mean = round(measurement.mean); + const moe = round(measurement.marginOfError); + const iterations = measurement.durations.length; + const totalDuration = round(measurement.totalDuration); + lines.push(`${" ".repeat(depth + 1)}${mean} ms (+/- ${moe} ms) from ${iterations} iterations (${totalDuration} ms total)`); + } + if (showMeasurement && showChildren) { + lines.push(""); + } + if (showChildren) { + lines = lines.concat(this.reportLevel(info.children, depth + 1)); + } + } + return lines; + } + + /** + * Create a report of all the benchmark results. + */ + report(): string { + const lines = this.reportLevel(this.data, 0); + if (lines.length === 0) { + return ""; + } else { + return [HEADER, ...lines, FOOTER].join("\n"); + } + } -export { - Benchmark, - Criteria, - BenchmarkEventEmitter, - BenchmarkData, -} from "./Benchmark"; + private getMeasurementsAtLevel(level: BenchmarkData, descriptions: Array): Array { + let measurements: Array = []; + for (const [description, info] of Object.entries(level)) { + const localDescriptions = [...descriptions, description]; + if (info.durations.length > 0) { + const measurement = new Measurement(info.durations, info.totalDuration); + measurement.description = localDescriptions; + measurements.push(measurement); + } + measurements = measurements.concat(this.getMeasurementsAtLevel(info.children, localDescriptions)); + } + return measurements; + } -export { - MeasureOptions, - PerformanceError, - Measurement, - measure, -} from "./measure"; + /** + * Get a list of [[Measurement]] based on [[Benchmark.data]]. + */ + get measurements(): Array { + return this.getMeasurementsAtLevel(this.data, []); + } + /** + * Find the measurement that meets some criteria. + * In the case of a tie, the first one found wins. + * + * @param criteria - Criteria by which to select a measurement. + * @param value - Callback to select a specific field of each measurement for comparison. + * The default uses the mean plus the margin of error. + * @returns the matching measurement, or null if no measurements have been taken + */ + find(criteria: Criteria, value: (m: Measurement) => number = m => m.mean + m.marginOfError): Measurement | null { + let candidate = null; + for (const measurement of this.measurements) { + if (candidate === null) { + candidate = measurement; + } else if (criteria === Criteria.Fastest && value(measurement) < value(candidate)) { + candidate = measurement; + } else if (criteria === Criteria.Slowest && value(measurement) > value(candidate)) { + candidate = measurement; + } + } + return candidate; + } +} /** * Default [[Benchmark]] instance for shared usage throughout your tests. diff --git a/src/measure.ts b/src/measure.ts deleted file mode 100644 index 59ad3b5..0000000 --- a/src/measure.ts +++ /dev/null @@ -1,253 +0,0 @@ -import hrtime from "browser-hrtime"; -import * as mathjs from "mathjs"; - -/** - * Options for Benchmark.measure(). - */ -export interface MeasureOptions { - /** - * The number of times to call the function and measure its duration. - * @default 100 - */ - iterations: number; - - /** - * Whether to wait for each iteration to finish before starting the next. - * @default true - */ - serial: boolean; - - /** - * If the mean measured duration exceeds this many milliseconds, - * throw a [[PerformanceError]]. - */ - meanUnder?: number; - - /** - * If the minimum measured duration exceeds this many milliseconds, - * throw a [[PerformanceError]]. - */ - minUnder?: number; - - /** - * If the maximum measured duration exceeds this many milliseconds, - * throw a [[PerformanceError]]. - */ - maxUnder?: number; - - /** - * If the margin of error at 95% confidence level exceeds this many milliseconds, - * throw a [[PerformanceError]]. - */ - marginOfErrorUnder?: number; - - /** - * If the standard deviation of all durations measured exceeds this many milliseconds, - * throw a [[PerformanceError]]. - */ - standardDeviationUnder?: number; - - /** - * Callback to invoke before each iteration. - */ - beforeEach?: () => any; - - /** - * Callback to invoke after each iteration. - */ - afterEach?: () => any; - - /** - * Whether to make use of the options like `meanUnder` and `minUnder`. - * @default true - */ - verify: boolean; -} - - - -/** - * Base error for benchmark failures, such as a function taking too long - * to execute. - */ -export class PerformanceError extends Error { - constructor(message?: string) { - super(message); - Object.setPrototypeOf(this, new.target.prototype); - } -} - -/** - * Performance measurement result from running a benchmark. - */ -export class Measurement { - /** - * Optional name of the measurement, for use in reporting. - */ - description: Array = []; - - /** - * Total duration of the benchmark, i.e. for throughput. - * This includes time spent on any configured `beforeEach`/`afterEach` callbacks. - * When `serial` is false, this number will be lower. - */ - totalDuration: number; - - /** - * - * @param durations - Durations measured, in milliseconds. - * The list must not be empty. - * @param totalDuration - Duration of the entire measurement, in milliseconds. - */ - constructor(public durations: Array, totalDuration?: number) { - if (durations.length === 0) { - throw new Error("The list of durations must not be empty"); - } - this.totalDuration = totalDuration ?? mathjs.sum(durations); - } - - /** - * Mean of all durations measured, in milliseconds. - */ - get mean(): number { - return mathjs.mean(this.durations); - } - - /** - * Minimum duration measured, in milliseconds. - */ - get min(): number { - return mathjs.min(this.durations); - } - - /** - * Maximum duration measured, in milliseconds. - */ - get max(): number { - return mathjs.max(this.durations); - } - - /** - * Standard deviation of all durations measured, in milliseconds. - */ - get standardDeviation(): number { - return mathjs.std(...this.durations); - } - - /** - * Margin of error at 95% confidence level, in milliseconds. - */ - get marginOfError(): number { - return mathjs.sqrt(mathjs.variance(...this.durations) / this.durations.length) * 1.96; - } -} - -/** - * Default options for Benchmark.measure(). - */ -export const defaultMeasureOptions: MeasureOptions = { - iterations: 100, - serial: true, - verify: true, -}; - -/** - * Measure the time it takes for a function to execute. - * - * @param fn - Function to measure. - * @param options - Options to customize the measurement. - */ -export async function measure( - fn: () => any, - options: Partial = {} -): Promise { - const mergedOptions = { ...defaultMeasureOptions, ...options }; - const durations: Array = []; - let calls: Array = []; - - for (let i = 0; i < mergedOptions.iterations; i++) { - calls.push(async () => { - if (mergedOptions.beforeEach !== undefined) { - await maybePromise(mergedOptions.beforeEach); - } - - const startTime = hrtime(); - await maybePromise(fn); - const [durationSec, durationNano] = hrtime(startTime); - durations.push(durationSec * 1e3 + durationNano / 1e6); - - if (mergedOptions.afterEach !== undefined) { - await maybePromise(mergedOptions.afterEach); - } - }); - } - - const measureStart = hrtime(); - - if (mergedOptions.serial) { - for (const call of calls) { - await call(); - } - } else { - await Promise.all(calls.map((x) => x())); - } - - const [measureSec, measureNano] = hrtime(measureStart); - const totalDuration = measureSec * 1e3 + measureNano / 1e6; - - const measurement = new Measurement(durations, totalDuration); - verifyMeasurement(measurement, mergedOptions); - return measurement; -} - -export function verifyMeasurement( - measurement: Measurement, - options: MeasureOptions -): void { - if (!options.verify) { - return; - } - if (options.meanUnder !== undefined) { - if (measurement.mean > options.meanUnder) { - throw new PerformanceError( - `Mean time of ${measurement.mean} ms exceeded threshold of ${options.meanUnder} ms` - ); - } - } - if (options.minUnder !== undefined) { - if (measurement.min > options.minUnder) { - throw new PerformanceError( - `Minimum time of ${measurement.min} ms exceeded threshold of ${options.minUnder} ms` - ); - } - } - if (options.maxUnder !== undefined) { - if (measurement.max > options.maxUnder) { - throw new PerformanceError( - `Maximum time of ${measurement.max} ms exceeded threshold of ${options.maxUnder} ms` - ); - } - } - if (options.marginOfErrorUnder !== undefined) { - if (measurement.marginOfError > options.marginOfErrorUnder) { - throw new PerformanceError( - `Margin of error time of ${measurement.marginOfError} ms exceeded threshold of ${options.marginOfErrorUnder} ms` - ); - } - } - if (options.standardDeviationUnder !== undefined) { - if (measurement.standardDeviation > options.standardDeviationUnder) { - throw new PerformanceError( - `Standard deviation time of ${measurement.standardDeviation} ms exceeded threshold of ${options.standardDeviationUnder} ms` - ); - } - } -} - - -async function maybePromise(fn: () => any): Promise { - const ret = fn(); - if (ret instanceof Promise) { - await ret; - } -} From d718c907e767d2def890bba2b055e046d66a27ca Mon Sep 17 00:00:00 2001 From: Haowen Chen Date: Mon, 4 Dec 2023 11:20:36 +0800 Subject: [PATCH 3/3] Refine --- src/etc.ts | 48 ---------- src/index.ts | 5 +- src/plugin/jestReporter.ts | 2 +- src/plugin/jestReporterSetup.ts | 2 +- src/plugin/karmaReporter.ts | 2 +- src/plugin/karmaReporterSetup.ts | 2 +- src/plugin/mochaReporter.ts | 2 +- src/reporters.ts | 149 ------------------------------- src/reporters/jest.ts | 111 +++++++++++++++++++++++ src/reporters/karma.ts | 68 ++++++++++++++ src/reporters/mocha.ts | 40 +++++++++ src/reporters/utils.ts | 26 ++++++ tests/index.test.ts | 2 +- tests/reporters.test.ts | 7 +- 14 files changed, 257 insertions(+), 209 deletions(-) delete mode 100644 src/etc.ts delete mode 100644 src/reporters.ts create mode 100644 src/reporters/jest.ts create mode 100644 src/reporters/karma.ts create mode 100644 src/reporters/mocha.ts create mode 100644 src/reporters/utils.ts diff --git a/src/etc.ts b/src/etc.ts deleted file mode 100644 index 9fd7f89..0000000 --- a/src/etc.ts +++ /dev/null @@ -1,48 +0,0 @@ -import * as fsModule from "fs"; - -export const STATE_FILE = ".kelonio.state.json"; -const HEADER_SIDE = "- ".repeat(17).trim(); -export const HEADER = `${HEADER_SIDE} Performance ${HEADER_SIDE}`; -export const FOOTER = "- ".repeat(40).trim(); - -let fs: typeof fsModule; -let canUseFs = true; -try { - fs = require("fs"); -} catch { - canUseFs = false; -} - -export class BenchmarkFileState { - constructor() { - if (!canUseFs) { - throw new Error("Unable to access file system"); - } - } - - exists(): boolean { - return fs.existsSync(STATE_FILE); - } - - read(): any { - return JSON.parse(fs.readFileSync(STATE_FILE, "utf-8")); - } - - write(data: object): void { - fs.writeFileSync(STATE_FILE, JSON.stringify(data), "utf-8"); - } - - append(data: object): void { - let previousData; - try { - previousData = this.read(); - } catch { - previousData = {}; - } - this.write({ ...previousData, ...data }); - } - - delete(): void { - try { fs.unlinkSync(STATE_FILE); } catch { } - } -} diff --git a/src/index.ts b/src/index.ts index d0f44fe..111bcde 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,10 +1,7 @@ import hrtime from "browser-process-hrtime"; import { EventEmitter } from "events"; import * as mathjs from "mathjs"; -import { FOOTER, HEADER } from "./etc"; -import { JestReporter, KarmaReporter, MochaReporter } from "./reporters"; - -export { JestReporter, KarmaReporter, MochaReporter }; +import { FOOTER, HEADER } from "./reporters/utils"; export declare interface BenchmarkEventEmitter { emit(event: "record", description: Array, measurement: Measurement): boolean; diff --git a/src/plugin/jestReporter.ts b/src/plugin/jestReporter.ts index c5528b9..db312d9 100644 --- a/src/plugin/jestReporter.ts +++ b/src/plugin/jestReporter.ts @@ -1,3 +1,3 @@ -import { JestReporter } from ".."; +import { JestReporter } from "../reporters/jest"; export = JestReporter; diff --git a/src/plugin/jestReporterSetup.ts b/src/plugin/jestReporterSetup.ts index 8b59329..f035de4 100644 --- a/src/plugin/jestReporterSetup.ts +++ b/src/plugin/jestReporterSetup.ts @@ -1,3 +1,3 @@ -import { JestReporter } from ".."; +import { JestReporter } from "../reporters/jest"; JestReporter.initializeKelonio(); diff --git a/src/plugin/karmaReporter.ts b/src/plugin/karmaReporter.ts index 2990082..fe29566 100644 --- a/src/plugin/karmaReporter.ts +++ b/src/plugin/karmaReporter.ts @@ -1,4 +1,4 @@ -import { KarmaReporter } from ".."; +import { KarmaReporter } from "../reporters/karma"; module.exports = { "reporter:kelonio": ["type", KarmaReporter], diff --git a/src/plugin/karmaReporterSetup.ts b/src/plugin/karmaReporterSetup.ts index 20c0cb6..7c49736 100644 --- a/src/plugin/karmaReporterSetup.ts +++ b/src/plugin/karmaReporterSetup.ts @@ -1,3 +1,3 @@ -import { KarmaReporter } from ".."; +import { KarmaReporter } from "../reporters/karma"; KarmaReporter.initializeKelonio(); diff --git a/src/plugin/mochaReporter.ts b/src/plugin/mochaReporter.ts index 48ca13b..eafebab 100644 --- a/src/plugin/mochaReporter.ts +++ b/src/plugin/mochaReporter.ts @@ -1,3 +1,3 @@ -import { MochaReporter } from ".."; +import { MochaReporter } from "../reporters/mocha"; export = MochaReporter; diff --git a/src/reporters.ts b/src/reporters.ts deleted file mode 100644 index 37080c2..0000000 --- a/src/reporters.ts +++ /dev/null @@ -1,149 +0,0 @@ -import { benchmark, Benchmark, Measurement } from "."; -import { BenchmarkFileState } from "./etc"; - -const MOCHA_EVENT_TEST_BEGIN = "test"; -const MOCHA_EVENT_RUN_END = "end"; -type KarmaLoggedRecord = { description: Array, durations: Array, totalDuration: number }; -type Extension = { - extraReport?: (benchmark: Benchmark) => string | void; -}; -type ExtensionLookup = { module: string, extension: string }; - -interface MochaReporterOptions { - reporterOptions: { - inferDescriptions?: boolean; - printReportAtEnd?: boolean; - extensions?: Array; - }; -} - -interface KarmaReporterOptions { - inferBrowsers?: boolean; - printReportAtEnd?: boolean; - extensions?: Array; -} - -interface JestReporterOptions { - keepStateAtStart?: boolean; - keepStateAtEnd?: boolean; - printReportAtEnd?: boolean; - extensions?: Array; -} - -function handleExtraReports(lookups: Array | undefined, benchmark: Benchmark, print: (report: string) => void): void { - for (const lookup of (lookups ?? [])) { - const extension: Extension | undefined = require(lookup.module)?.[lookup.extension]; - const report = extension?.extraReport?.(benchmark); - if (report) { - print(report); - } - } -} - -export class JestReporter implements jest.Reporter { - options: JestReporterOptions = { keepStateAtStart: false, keepStateAtEnd: false, printReportAtEnd: true }; - constructor(testData?: any, options?: JestReporterOptions) { - if (options) { - this.options = { ...this.options, ...options }; - } - } - static initializeKelonio(): void { - const state = new BenchmarkFileState(); - benchmark.events.on("record", (description, measurement) => { - const b = new Benchmark(); - if (state.exists()) { - b.data = state.read(); - } - b.incorporate(description, measurement); - state.write(b.data); - }); - } - - onRunStart(): void { - const state = new BenchmarkFileState(); - if (this.options.keepStateAtStart) { - state.append({}); - } else { - state.write({}); - } - } - - onRunComplete(): void { - const state = new BenchmarkFileState(); - if (!state.exists()) { - throw new Error( - "The Kelonio reporter for Jest requires benchmark serialization." - + " Make sure to call `JestReporter.initializeKelonio()`." - ); - } - - const b = new Benchmark(); - b.data = state.read(); - - if (this.options.printReportAtEnd) { - console.log(`\n${b.report()}`); - handleExtraReports(this.options.extensions, b, console.log); - } - - if (!this.options.keepStateAtEnd) { - state.delete(); - } - } -} - -export class KarmaReporter { - protected onBrowserLog: (browser: string, log: string, type: string) => void; - protected onRunComplete: () => void; - - static initializeKelonio(): void { - benchmark.events.on("record", (description, measurement) => { - (window).__karma__.log("kelonio", [JSON.stringify({ description, durations: measurement.durations, totalDuration: measurement.totalDuration })]); - }); - } - - constructor(baseReporterDecorator: any, config: { kelonioReporter?: KarmaReporterOptions }, logger: unknown, helper: unknown, formatError: unknown) { - baseReporterDecorator(this); - const activeConfig = { ...{ inferBrowsers: true, printReportAtEnd: true }, ...config.kelonioReporter }; - const b = new Benchmark(); - - this.onBrowserLog = (browser: string, log: string, type: string) => { - if (type === "kelonio") { - const parsed: KarmaLoggedRecord = JSON.parse(log.slice(1, -1)); - const browserDescription = activeConfig.inferBrowsers ? [browser] : []; - b.incorporate([...browserDescription, ...parsed.description], new Measurement(parsed.durations, parsed.totalDuration)); - } - }; - - this.onRunComplete = () => { - if (activeConfig.printReportAtEnd) { - (this).write(`${b.report()}\n`); - handleExtraReports(activeConfig.extensions, b, msg => (this).write(`${msg}\n`)); - } - }; - } -} - -export class MochaReporter { - constructor(runner: Mocha.Runner, options: MochaReporterOptions) { - const b = new Benchmark(); - let baseDescription: Array = []; - const inferDescriptions = options.reporterOptions.inferDescriptions ?? true; - const printReportAtEnd = options.reporterOptions.printReportAtEnd ?? true; - const extensions = options.reporterOptions.extensions ?? []; - - benchmark.events.on("record", (description, measurement) => { - b.incorporate(baseDescription.concat(description), measurement); - }); - if (inferDescriptions) { - runner.on(MOCHA_EVENT_TEST_BEGIN, test => { - baseDescription = test.titlePath(); - }); - } - runner.once(MOCHA_EVENT_RUN_END, () => { - if (printReportAtEnd) { - console.log(`\n${b.report()}`); - handleExtraReports(extensions, b, console.log); - } - }); - } -} diff --git a/src/reporters/jest.ts b/src/reporters/jest.ts new file mode 100644 index 0000000..c98fe5e --- /dev/null +++ b/src/reporters/jest.ts @@ -0,0 +1,111 @@ +import * as fsModule from "fs"; +import { Benchmark, benchmark } from '../index' +import { ExtensionLookup, handleExtraReports } from "./utils"; + +export const STATE_FILE = ".kelonio.state.json"; + +let fs: typeof fsModule; +let canUseFs = true; +try { + fs = require("fs"); +} catch { + canUseFs = false; +} + +export class BenchmarkFileState { + constructor() { + if (!canUseFs) { + throw new Error("Unable to access file system"); + } + } + + exists(): boolean { + return fs.existsSync(STATE_FILE); + } + + read(): any { + return JSON.parse(fs.readFileSync(STATE_FILE, "utf-8")); + } + + write(data: object): void { + fs.writeFileSync(STATE_FILE, JSON.stringify(data), "utf-8"); + } + + append(data: object): void { + let previousData; + try { + previousData = this.read(); + } catch { + previousData = {}; + } + this.write({ ...previousData, ...data }); + } + + delete(): void { + try { + fs.unlinkSync(STATE_FILE); + } catch {} + } +} + +interface JestReporterOptions { + keepStateAtStart?: boolean; + keepStateAtEnd?: boolean; + printReportAtEnd?: boolean; + extensions?: Array; +} + +export class JestReporter implements jest.Reporter { + options: JestReporterOptions = { + keepStateAtStart: false, + keepStateAtEnd: false, + printReportAtEnd: true, + }; + constructor(testData?: any, options?: JestReporterOptions) { + if (options) { + this.options = { ...this.options, ...options }; + } + } + static initializeKelonio(): void { + const state = new BenchmarkFileState(); + benchmark.events.on("record", (description, measurement) => { + const b = new Benchmark(); + if (state.exists()) { + b.data = state.read(); + } + b.incorporate(description, measurement); + state.write(b.data); + }); + } + + onRunStart(): void { + const state = new BenchmarkFileState(); + if (this.options.keepStateAtStart) { + state.append({}); + } else { + state.write({}); + } + } + + onRunComplete(): void { + const state = new BenchmarkFileState(); + if (!state.exists()) { + throw new Error( + "The Kelonio reporter for Jest requires benchmark serialization." + + " Make sure to call `JestReporter.initializeKelonio()`." + ); + } + + const b = new Benchmark(); + b.data = state.read(); + + if (this.options.printReportAtEnd) { + console.log(`\n${b.report()}`); + handleExtraReports(this.options.extensions, b, console.log); + } + + if (!this.options.keepStateAtEnd) { + state.delete(); + } + } +} diff --git a/src/reporters/karma.ts b/src/reporters/karma.ts new file mode 100644 index 0000000..105ecdd --- /dev/null +++ b/src/reporters/karma.ts @@ -0,0 +1,68 @@ +import { benchmark, Benchmark, Measurement } from ".."; +import { ExtensionLookup, handleExtraReports } from "./utils"; + +type KarmaLoggedRecord = { description: Array, durations: Array, totalDuration: number }; + +interface KarmaReporterOptions { + inferBrowsers?: boolean; + printReportAtEnd?: boolean; + extensions?: Array; +} + +export class KarmaReporter { + protected onBrowserLog: ( + browser: string, + log: string, + type: string + ) => void; + protected onRunComplete: () => void; + + static initializeKelonio(): void { + benchmark.events.on("record", (description, measurement) => { + (window).__karma__.log("kelonio", [ + JSON.stringify({ + description, + durations: measurement.durations, + totalDuration: measurement.totalDuration, + }), + ]); + }); + } + + constructor( + baseReporterDecorator: any, + config: { kelonioReporter?: KarmaReporterOptions }, + logger: unknown, + helper: unknown, + formatError: unknown + ) { + baseReporterDecorator(this); + const activeConfig = { + ...{ inferBrowsers: true, printReportAtEnd: true }, + ...config.kelonioReporter, + }; + const b = new Benchmark(); + + this.onBrowserLog = (browser: string, log: string, type: string) => { + if (type === "kelonio") { + const parsed: KarmaLoggedRecord = JSON.parse(log.slice(1, -1)); + const browserDescription = activeConfig.inferBrowsers + ? [browser] + : []; + b.incorporate( + [...browserDescription, ...parsed.description], + new Measurement(parsed.durations, parsed.totalDuration) + ); + } + }; + + this.onRunComplete = () => { + if (activeConfig.printReportAtEnd) { + (this).write(`${b.report()}\n`); + handleExtraReports(activeConfig.extensions, b, (msg) => + (this).write(`${msg}\n`) + ); + } + }; + } +} diff --git a/src/reporters/mocha.ts b/src/reporters/mocha.ts new file mode 100644 index 0000000..560b1d4 --- /dev/null +++ b/src/reporters/mocha.ts @@ -0,0 +1,40 @@ +import { Benchmark, benchmark } from ".."; +import { ExtensionLookup, handleExtraReports } from "./utils"; + +const MOCHA_EVENT_TEST_BEGIN = "test"; +const MOCHA_EVENT_RUN_END = "end"; + +interface MochaReporterOptions { + reporterOptions: { + inferDescriptions?: boolean; + printReportAtEnd?: boolean; + extensions?: Array; + }; +} + +export class MochaReporter { + constructor(runner: Mocha.Runner, options: MochaReporterOptions) { + const b = new Benchmark(); + let baseDescription: Array = []; + const inferDescriptions = + options.reporterOptions.inferDescriptions ?? true; + const printReportAtEnd = + options.reporterOptions.printReportAtEnd ?? true; + const extensions = options.reporterOptions.extensions ?? []; + + benchmark.events.on("record", (description, measurement) => { + b.incorporate(baseDescription.concat(description), measurement); + }); + if (inferDescriptions) { + runner.on(MOCHA_EVENT_TEST_BEGIN, (test) => { + baseDescription = test.titlePath(); + }); + } + runner.once(MOCHA_EVENT_RUN_END, () => { + if (printReportAtEnd) { + console.log(`\n${b.report()}`); + handleExtraReports(extensions, b, console.log); + } + }); + } +} diff --git a/src/reporters/utils.ts b/src/reporters/utils.ts new file mode 100644 index 0000000..2e31d80 --- /dev/null +++ b/src/reporters/utils.ts @@ -0,0 +1,26 @@ +import { Benchmark } from ".."; + +const HEADER_SIDE = "- ".repeat(17).trim(); +export const HEADER = `${HEADER_SIDE} Performance ${HEADER_SIDE}`; +export const FOOTER = "- ".repeat(40).trim(); + +export type Extension = { + extraReport?: (benchmark: Benchmark) => string | void; +}; +export type ExtensionLookup = { module: string; extension: string }; + +export function handleExtraReports( + lookups: Array | undefined, + benchmark: Benchmark, + print: (report: string) => void +): void { + for (const lookup of lookups ?? []) { + const extension: Extension | undefined = require(lookup.module)?.[ + lookup.extension + ]; + const report = extension?.extraReport?.(benchmark); + if (report) { + print(report); + } + } +} diff --git a/tests/index.test.ts b/tests/index.test.ts index 655fb39..433f36f 100644 --- a/tests/index.test.ts +++ b/tests/index.test.ts @@ -1,6 +1,6 @@ import stripIndent from "strip-indent"; import { Benchmark, Criteria, measure, Measurement, PerformanceError } from "../src"; -import { FOOTER, HEADER } from "../src/etc"; +import { FOOTER, HEADER } from "../src/reporters/utils"; // Using `await util.promisify(setTimeout)(500)` leads to this error in some tests: // "Async callback was not invoked within the 5000ms timeout" diff --git a/tests/reporters.test.ts b/tests/reporters.test.ts index a6cb859..443bcdf 100644 --- a/tests/reporters.test.ts +++ b/tests/reporters.test.ts @@ -1,8 +1,11 @@ import { EventEmitter } from "events"; import fs from "fs"; import stripIndent from "strip-indent"; -import { benchmark, JestReporter, KarmaReporter, Measurement, MochaReporter } from "../src"; -import { FOOTER, HEADER, STATE_FILE } from "../src/etc"; +import { benchmark, Measurement } from "../src"; +import { JestReporter, STATE_FILE } from "../src/reporters/jest"; +import { KarmaReporter } from "../src/reporters/karma"; +import { MochaReporter } from "../src/reporters/mocha"; +import { FOOTER, HEADER } from "../src/reporters/utils"; function makeMochaReporter(reporterOptions: any = {}): [EventEmitter, MochaReporter] { const runner = new EventEmitter();