Skip to content

Commit 198fb72

Browse files
authoredFeb 6, 2025··
chore(NODE-6720): migrate multibench tests (#4399)
1 parent a1c83de commit 198fb72

10 files changed

+284
-4
lines changed
 

‎.evergreen/config.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -3323,7 +3323,7 @@ tasks:
33233323
tags:
33243324
- run-spec-benchmark-tests
33253325
- performance
3326-
exec_timeout_secs: 3600
3326+
exec_timeout_secs: 18000
33273327
commands:
33283328
- command: expansions.update
33293329
type: setup

‎.evergreen/generate_evergreen_tasks.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -756,7 +756,7 @@ function addPerformanceTasks() {
756756
const makePerfTaskNEW = (name, MONGODB_CLIENT_OPTIONS) => ({
757757
name,
758758
tags: ['run-spec-benchmark-tests', 'performance'],
759-
exec_timeout_secs: 3600,
759+
exec_timeout_secs: 18000,
760760
commands: [
761761
updateExpansions({
762762
NODE_LTS_VERSION: 'v22.11.0',
+65
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
# Node.js Driver Benchmarks
2+
3+
Set up the driver for development (`npm ci` in the top level of this repo).
4+
5+
Then:
6+
7+
```sh
8+
npm start
9+
```
10+
11+
will build the benchmarks and run them.
12+
13+
## Environment Configuration and Setup
14+
15+
The benchmarks respond to a few environment variables:
16+
17+
- `MONGODB_URI`
18+
- The connection string to run operations against.
19+
CI uses a standalone, you should be able to launch any cluster and point the benchmarks at it via this env var.
20+
- default: `"mongodb://localhost:27017"`
21+
- `MONGODB_DRIVER_PATH`
22+
- The path to the MongoDB Node.js driver.
23+
This MUST be set to the _directory_ the driver is installed in.
24+
**NOT** the file "lib/index.js" that is the driver's export.
25+
- default: 4 directories above driver.mjs (should be the root of this repo)
26+
- `MONGODB_CLIENT_OPTIONS`
27+
- A JSON string that will be passed to the MongoClient constructor
28+
- default: `"{}"`
29+
30+
## Running individual benchmarks
31+
32+
`main.mjs` loops and launches the bench runner for you.
33+
34+
You can launch `runner.mjs` directly and tell it which benchmark to run.
35+
36+
```sh
37+
node lib/runner.mjs suites/multi_bench/grid_fs_upload.mjs
38+
```
39+
40+
## Writing your own benchmark
41+
42+
In the suites directory you can add a new suite folder or add a new `.mts` file to an existing one.
43+
44+
A benchmark must export the following:
45+
46+
```ts
47+
type BenchmarkModule = {
48+
taskSize: number;
49+
before?: () => Promise<void>;
50+
beforeEach?: () => Promise<void>;
51+
run: () => Promise<void>;
52+
afterEach?: () => Promise<void>;
53+
after?: () => Promise<void>;
54+
};
55+
```
56+
57+
Just like mocha we have once before and once after as well as before each and after each hooks.
58+
59+
The `driver.mts` module is intended to hold various helpers for setup and teardown and help abstract some of the driver API.
60+
61+
## Wishlist
62+
63+
- Make it so runner can handle: `./lib/suites/multi_bench/grid_fs_upload.mjs` as an argument so shell path autocomplete makes it easier to pick a benchmark
64+
- Make `main.mjs` accept a filter of some kind to run some of the benchmarks
65+
- TBD

‎test/benchmarks/driver_bench/src/driver.mts

+5-2
Original file line numberDiff line numberDiff line change
@@ -88,8 +88,11 @@ export const MONGODB_CLIENT_OPTIONS = (() => {
8888
})();
8989

9090
export const MONGODB_URI = (() => {
91-
if (process.env.MONGODB_URI?.length) return process.env.MONGODB_URI;
92-
return 'mongodb://127.0.0.1:27017';
91+
const connectionString = process.env.MONGODB_URI;
92+
if (connectionString?.length) {
93+
return connectionString;
94+
}
95+
return 'mongodb://localhost:27017';
9396
})();
9497

9598
export function snakeToCamel(name: string) {
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
import { driver, type mongodb } from '../../driver.mjs';
2+
3+
export const taskSize = 16;
4+
5+
let db: mongodb.Db;
6+
7+
export async function before() {
8+
await driver.drop();
9+
await driver.create();
10+
11+
db = driver.db;
12+
}
13+
14+
export async function run() {
15+
await db
16+
.aggregate([
17+
{ $documents: [{}] },
18+
{
19+
$set: {
20+
field: {
21+
$reduce: {
22+
input: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
23+
initialValue: [0],
24+
in: { $concatArrays: ['$$value', '$$value'] }
25+
}
26+
}
27+
}
28+
},
29+
{ $unwind: '$field' },
30+
{ $limit: 1000000 }
31+
])
32+
.toArray();
33+
}
34+
35+
export async function after() {
36+
await driver.drop();
37+
await driver.close();
38+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
import { driver, type mongodb } from '../../driver.mjs';
2+
3+
export const taskSize = 1500;
4+
5+
let db: mongodb.Db;
6+
let tweet: Record<string, any>;
7+
8+
export async function before() {
9+
await driver.drop();
10+
await driver.create();
11+
12+
tweet = await driver.load('single_and_multi_document/tweet.json', 'json');
13+
14+
db = driver.db;
15+
}
16+
17+
export async function run() {
18+
await db
19+
.aggregate([
20+
{ $documents: [tweet] },
21+
{
22+
$set: {
23+
field: {
24+
$reduce: {
25+
input: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
26+
initialValue: [0],
27+
in: { $concatArrays: ['$$value', '$$value'] }
28+
}
29+
}
30+
}
31+
},
32+
{ $unwind: '$field' },
33+
{ $limit: 1000000 }
34+
])
35+
.toArray();
36+
}
37+
38+
export async function after() {
39+
await driver.drop();
40+
await driver.close();
41+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import { driver, type mongodb } from '../../driver.mjs';
2+
3+
export const taskSize = 16.22;
4+
5+
let collection: mongodb.Collection;
6+
7+
export async function before() {
8+
await driver.drop();
9+
await driver.create();
10+
11+
const tweet = await driver.load('single_and_multi_document/tweet.json', 'json');
12+
await driver.insertManyOf(tweet, 10000);
13+
14+
collection = driver.collection;
15+
}
16+
17+
export async function run() {
18+
await collection.find({}).toArray();
19+
}
20+
21+
export async function after() {
22+
await driver.drop();
23+
await driver.close();
24+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
import { Readable, Writable } from 'node:stream';
2+
import { pipeline } from 'node:stream/promises';
3+
4+
import { driver, type mongodb } from '../../driver.mjs';
5+
6+
export const taskSize = 52.43;
7+
8+
let bucket: mongodb.GridFSBucket;
9+
let bin: Uint8Array;
10+
let _id: mongodb.ObjectId;
11+
const devNull = () => new Writable({ write: (_, __, callback) => callback() });
12+
13+
export async function before() {
14+
bin = await driver.load('single_and_multi_document/gridfs_large.bin', 'buffer');
15+
16+
await driver.drop();
17+
await driver.create();
18+
19+
bucket = driver.bucket;
20+
21+
await bucket.drop().catch(() => null);
22+
23+
// Create the bucket.
24+
const stream = bucket.openUploadStream('gridfstest');
25+
const largeBin = Readable.from(bin);
26+
await pipeline(largeBin, stream);
27+
28+
_id = stream.id;
29+
}
30+
31+
export async function run() {
32+
const downloadStream = bucket.openDownloadStream(_id);
33+
await pipeline(downloadStream, devNull());
34+
}
35+
36+
export async function after() {
37+
await driver.drop();
38+
await driver.close();
39+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
import { Readable } from 'node:stream';
2+
import { pipeline } from 'node:stream/promises';
3+
4+
import { driver, type mongodb } from '../../driver.mjs';
5+
6+
export const taskSize = 52.43;
7+
8+
let bucket: mongodb.GridFSBucket;
9+
let uploadStream: mongodb.GridFSBucketWriteStream;
10+
let bin: Uint8Array;
11+
12+
export async function before() {
13+
bin = await driver.load('single_and_multi_document/gridfs_large.bin', 'buffer');
14+
15+
await driver.drop();
16+
await driver.create();
17+
18+
bucket = driver.bucket;
19+
20+
await bucket.drop().catch(() => null);
21+
}
22+
23+
export async function beforeEach() {
24+
uploadStream = bucket.openUploadStream('gridfstest');
25+
26+
// Create the bucket.
27+
const stream = bucket.openUploadStream('setup-file.txt');
28+
const oneByteFile = Readable.from('a');
29+
await pipeline(oneByteFile, stream);
30+
}
31+
32+
export async function run() {
33+
const uploadData = Readable.from(bin);
34+
await pipeline(uploadData, uploadStream);
35+
}
36+
37+
export async function after() {
38+
await driver.drop();
39+
await driver.close();
40+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import { driver, type mongodb } from '../../driver.mjs';
2+
3+
export const taskSize = 27.31;
4+
5+
let collection: mongodb.Collection;
6+
let documents: any[];
7+
let largeDoc: any;
8+
9+
export async function before() {
10+
largeDoc = await driver.load('single_and_multi_document/large_doc.json', 'json');
11+
}
12+
13+
export async function beforeEach() {
14+
await driver.drop();
15+
await driver.create();
16+
17+
// Make new "documents" so the _id field is not carried over from the last run
18+
documents = Array.from({ length: 10 }, () => ({ ...largeDoc })) as any[];
19+
20+
collection = driver.collection;
21+
}
22+
23+
export async function run() {
24+
await collection.insertMany(documents, { ordered: true });
25+
}
26+
27+
export async function after() {
28+
await driver.drop();
29+
await driver.close();
30+
}

0 commit comments

Comments
 (0)
Please sign in to comment.