Skip to content

Commit 26509ca

Browse files
implement example apps benchmarking (#117)
--------- Co-authored-by: Victor Berchet <[email protected]>
1 parent 4254f00 commit 26509ca

11 files changed

+557
-19
lines changed

benchmarking/.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
results/

benchmarking/README.md

+17
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
# Benchmarking
2+
3+
This directory contains a script for running full end to end benchmarks against the example applications.
4+
5+
What the script does:
6+
7+
- takes all the example applications from the [`./examples` directory](../examples/)
8+
(excluding the ones specified in the `exampleAppsNotToBenchmark` set in [`./src/cloudflare.ts`](./src/cloudflare.ts))
9+
- in parallel for each application:
10+
- builds the application by running its `build:worker` script
11+
- deploys the application to production (with `wrangler deploy`)
12+
- takes the production deployment url
13+
- benchmarks the application's response time by fetching from the deployment url a number of times
14+
15+
> [!note]
16+
> This is the first cut at benchmarking our solution, later we can take the script in this directory,
17+
> generalize it and make it more reusable if we want

benchmarking/package.json

+14
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
{
2+
"name": "@opennextjs-cloudflare/benchmarking",
3+
"private": true,
4+
"type": "module",
5+
"devDependencies": {
6+
"tsx": "catalog:",
7+
"@tsconfig/strictest": "catalog:",
8+
"@types/node": "catalog:",
9+
"ora": "^8.1.0"
10+
},
11+
"scripts": {
12+
"benchmark": "tsx src/index.ts"
13+
}
14+
}

benchmarking/src/benchmarking.ts

+132
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
import nodeTimesPromises from "node:timers/promises";
2+
import nodeFsPromises from "node:fs/promises";
3+
import nodePath from "node:path";
4+
import { getPercentile } from "./utils";
5+
6+
export type FetchBenchmark = {
7+
iterationsMs: number[];
8+
averageMs: number;
9+
p90Ms: number;
10+
};
11+
12+
export type BenchmarkingResults = {
13+
name: string;
14+
path: string;
15+
fetchBenchmark: FetchBenchmark;
16+
}[];
17+
18+
type BenchmarkFetchOptions = {
19+
numberOfIterations?: number;
20+
maxRandomDelayMs?: number;
21+
fetch: (deploymentUrl: string) => Promise<Response>;
22+
};
23+
24+
const defaultOptions: Required<Omit<BenchmarkFetchOptions, "fetch">> = {
25+
numberOfIterations: 20,
26+
maxRandomDelayMs: 15_000,
27+
};
28+
29+
/**
30+
* Benchmarks the response time of an application end-to-end by:
31+
* - building the application
32+
* - deploying it
33+
* - and fetching from it (multiple times)
34+
*
35+
* @param options.build function implementing how the application is to be built
36+
* @param options.deploy function implementing how the application is deployed (returning the url of the deployment)
37+
* @param options.fetch function indicating how to fetch from the application (in case a specific route needs to be hit, cookies need to be applied, etc...)
38+
* @returns the benchmarking results for the application
39+
*/
40+
export async function benchmarkApplicationResponseTime({
41+
build,
42+
deploy,
43+
fetch,
44+
}: {
45+
build: () => Promise<void>;
46+
deploy: () => Promise<string>;
47+
fetch: (deploymentUrl: string) => Promise<Response>;
48+
}): Promise<FetchBenchmark> {
49+
await build();
50+
const deploymentUrl = await deploy();
51+
return benchmarkFetch(deploymentUrl, { fetch });
52+
}
53+
54+
/**
55+
* Benchmarks a fetch operation by running it multiple times and computing the average time (in milliseconds) such fetch operation takes.
56+
*
57+
* @param url The url to fetch from
58+
* @param options options for the benchmarking
59+
* @returns the computed average alongside all the single call times
60+
*/
61+
async function benchmarkFetch(url: string, options: BenchmarkFetchOptions): Promise<FetchBenchmark> {
62+
const benchmarkFetchCall = async () => {
63+
const preTimeMs = performance.now();
64+
const resp = await options.fetch(url);
65+
const postTimeMs = performance.now();
66+
67+
if (!resp.ok) {
68+
throw new Error(`Error: Failed to fetch from "${url}"`);
69+
}
70+
71+
return postTimeMs - preTimeMs;
72+
};
73+
74+
const resolvedOptions = { ...defaultOptions, ...options };
75+
76+
const iterationsMs = await Promise.all(
77+
new Array(resolvedOptions.numberOfIterations).fill(null).map(async () => {
78+
// let's add a random delay before we make the fetch
79+
await nodeTimesPromises.setTimeout(Math.round(Math.random() * resolvedOptions.maxRandomDelayMs));
80+
81+
return benchmarkFetchCall();
82+
})
83+
);
84+
85+
const averageMs = iterationsMs.reduce((time, sum) => sum + time) / iterationsMs.length;
86+
87+
const p90Ms = getPercentile(iterationsMs, 90);
88+
89+
return {
90+
iterationsMs,
91+
averageMs,
92+
p90Ms,
93+
};
94+
}
95+
96+
/**
97+
* Saves benchmarking results in a local json file
98+
*
99+
* @param results the benchmarking results to save
100+
* @returns the path to the created json file
101+
*/
102+
export async function saveResultsToDisk(results: BenchmarkingResults): Promise<string> {
103+
const date = new Date();
104+
105+
const fileName = `${toSimpleDateString(date)}.json`;
106+
107+
const outputFile = nodePath.resolve(`./results/${fileName}`);
108+
109+
await nodeFsPromises.mkdir(nodePath.dirname(outputFile), { recursive: true });
110+
111+
const resultStr = JSON.stringify(results, null, 2);
112+
await nodeFsPromises.writeFile(outputFile, resultStr);
113+
114+
return outputFile;
115+
}
116+
117+
/**
118+
* Takes a date and coverts it to a simple format that can be used as
119+
* a filename (which is human readable and doesn't contain special
120+
* characters)
121+
*
122+
* The format being: `YYYY-MM-DD_hh-mm-ss`
123+
*
124+
* @param date the date to convert
125+
* @returns a string representing the date
126+
*/
127+
function toSimpleDateString(date: Date): string {
128+
const isoString = date.toISOString();
129+
const isoDate = isoString.split(".")[0]!;
130+
131+
return isoDate.replace("T", "_").replaceAll(":", "-");
132+
}

benchmarking/src/cloudflare.ts

+123
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
import nodeFsPromises from "node:fs/promises";
2+
import nodeFs from "node:fs";
3+
import nodePath from "node:path";
4+
import nodeChildProcess from "node:child_process";
5+
6+
await ensureWranglerSetup();
7+
8+
/**
9+
* Collects name and absolute paths of apps (in this repository) that we want to benchmark
10+
*
11+
* @returns Array of objects containing the app's name and absolute path
12+
*/
13+
export async function collectAppPathsToBenchmark(): Promise<
14+
{
15+
name: string;
16+
path: string;
17+
}[]
18+
> {
19+
const allExampleNames = await nodeFsPromises.readdir("../examples");
20+
21+
/**
22+
* Example applications that we don't want to benchmark
23+
*
24+
* Currently we only want to skip the `vercel-commerce` example, and that's simply
25+
* because it requires a shopify specific setup and secrets.
26+
*/
27+
const exampleAppsNotToBenchmark = new Set(["vercel-commerce"]);
28+
29+
const examplePaths = allExampleNames
30+
.filter((exampleName) => !exampleAppsNotToBenchmark.has(exampleName))
31+
.map((exampleName) => ({
32+
name: exampleName,
33+
path: nodePath.resolve(`../examples/${exampleName}`),
34+
}));
35+
36+
return examplePaths;
37+
}
38+
39+
/**
40+
* Builds an application using their "build:worker" script
41+
* (an error is thrown if the application doesn't have such a script)
42+
*
43+
* @param dir Path to the application to build
44+
*/
45+
export async function buildApp(dir: string): Promise<void> {
46+
const packageJsonPath = `${dir}/package.json`;
47+
if (!nodeFs.existsSync(packageJsonPath)) {
48+
throw new Error(`Error: package.json for app at "${dir}" not found`);
49+
}
50+
51+
const packageJsonContent = JSON.parse(await nodeFsPromises.readFile(packageJsonPath, "utf8"));
52+
53+
const buildScript = "build:worker";
54+
55+
if (!packageJsonContent.scripts?.[buildScript]) {
56+
throw new Error(`Error: package.json for app at "${dir}" does not include a "${buildScript}" script`);
57+
}
58+
59+
const command = `pnpm ${buildScript}`;
60+
61+
return new Promise((resolve, reject) => {
62+
nodeChildProcess.exec(command, { cwd: dir }, (error) => {
63+
if (error) {
64+
return reject(error);
65+
}
66+
return resolve();
67+
});
68+
});
69+
}
70+
71+
/**
72+
* Deploys a built application using wrangler
73+
*
74+
* @param dir Path to the application to build
75+
* @returns the url of the deployed application
76+
*/
77+
export async function deployBuiltApp(dir: string): Promise<string> {
78+
return new Promise<string>((resolve, reject) => {
79+
nodeChildProcess.exec("pnpm exec wrangler deploy", { cwd: dir }, (error, stdout) => {
80+
if (error) {
81+
return reject(error);
82+
}
83+
84+
const deploymentUrl = stdout.match(/\bhttps:\/\/(?:[a-zA-Z0-9.\-])*\.workers\.dev\b/)?.[0];
85+
86+
if (!deploymentUrl) {
87+
return reject(new Error(`Could not obtain a deployment url for app at "${dir}"`));
88+
}
89+
90+
return resolve(deploymentUrl);
91+
});
92+
});
93+
}
94+
95+
/**
96+
* Makes sure that everything is set up so that wrangler can actually deploy the applications.
97+
* This means that:
98+
* - the user has logged in
99+
* - if they have more than one account they have set a CLOUDFLARE_ACCOUNT_ID env variable
100+
*/
101+
async function ensureWranglerSetup(): Promise<void> {
102+
return new Promise((resolve, reject) => {
103+
nodeChildProcess.exec("pnpm dlx wrangler whoami", (error, stdout) => {
104+
if (error) {
105+
return reject(error);
106+
}
107+
108+
if (stdout.includes("You are not authenticated")) {
109+
reject(new Error("Please log in using wrangler by running `pnpm dlx wrangler login`"));
110+
}
111+
112+
if (!(process.env as Record<string, unknown>)["CLOUDFLARE_ACCOUNT_ID"]) {
113+
reject(
114+
new Error(
115+
"Please set the CLOUDFLARE_ACCOUNT_ID environment variable to the id of the account you want to use to deploy the applications"
116+
)
117+
);
118+
}
119+
120+
return resolve();
121+
});
122+
});
123+
}

benchmarking/src/index.ts

+42
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
import nodeTimesPromises from "node:timers/promises";
2+
import * as cloudflare from "./cloudflare";
3+
import { benchmarkApplicationResponseTime, BenchmarkingResults, saveResultsToDisk } from "./benchmarking";
4+
import { parallelRunWithSpinner } from "./utils";
5+
6+
const appPathsToBenchmark = await cloudflare.collectAppPathsToBenchmark();
7+
8+
const benchmarkingResults: BenchmarkingResults = await parallelRunWithSpinner(
9+
"Benchmarking Apps",
10+
appPathsToBenchmark.map(({ name, path }, i) => async () => {
11+
await nodeTimesPromises.setTimeout(i * 1_000);
12+
const fetchBenchmark = await benchmarkApplicationResponseTime({
13+
build: async () => cloudflare.buildApp(path),
14+
deploy: async () => cloudflare.deployBuiltApp(path),
15+
fetch,
16+
});
17+
18+
return {
19+
name,
20+
path,
21+
fetchBenchmark,
22+
};
23+
})
24+
);
25+
26+
console.log();
27+
28+
const outputFile = await saveResultsToDisk(benchmarkingResults);
29+
30+
console.log(`The benchmarking results have been written in ${outputFile}`);
31+
32+
console.log("\n\nSummary: ");
33+
const summary = benchmarkingResults.map(({ name, fetchBenchmark }) => ({
34+
name,
35+
"average fetch duration (ms)": Math.round(fetchBenchmark.averageMs),
36+
"90th percentile (ms)": Math.round(fetchBenchmark.p90Ms),
37+
}));
38+
console.table(summary);
39+
40+
console.log();
41+
42+
process.exit(0);

benchmarking/src/utils.ts

+61
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
import ora from "ora";
2+
3+
/**
4+
* Runs a list of operations in parallel while presenting a loading spinner with some text
5+
*
6+
* @param spinnerText The text to add to the spinner
7+
* @param operations The operations to run
8+
* @returns The operations results
9+
*/
10+
export async function parallelRunWithSpinner<T>(
11+
spinnerText: string,
12+
operations: (() => Promise<T>)[]
13+
): Promise<T[]> {
14+
const spinner = ora({
15+
discardStdin: false,
16+
hideCursor: false,
17+
}).start();
18+
19+
let doneCount = 0;
20+
21+
const updateSpinnerText = () => {
22+
doneCount++;
23+
spinner.text = `${spinnerText} (${doneCount}/${operations.length})`;
24+
};
25+
26+
updateSpinnerText();
27+
28+
const results = await Promise.all(
29+
operations.map(async (operation) => {
30+
const result = await operation();
31+
updateSpinnerText();
32+
return result;
33+
})
34+
);
35+
36+
spinner.stop();
37+
38+
return results;
39+
}
40+
41+
/**
42+
* Gets a specific percentile for a given set of numbers
43+
*
44+
* @param data the data which percentile value needs to be computed
45+
* @param percentile the requested percentile (a number between 0 and 100)
46+
* @returns the computed percentile
47+
*/
48+
export function getPercentile(data: number[], percentile: number): number {
49+
if (Number.isNaN(percentile) || percentile < 0 || percentile > 100) {
50+
throw new Error(`A percentile needs to be between 0 and 100, found: ${percentile}`);
51+
}
52+
53+
data = data.sort((a, b) => a - b);
54+
55+
const rank = (percentile / 100) * (data.length - 1);
56+
57+
const rankInt = Math.floor(rank);
58+
const rankFract = rank - rankInt;
59+
60+
return Math.round(data[rankInt]! + rankFract * (data[rankInt + 1]! - data[rankInt]!));
61+
}

0 commit comments

Comments
 (0)