Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions benchmark/v8/deserialize.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
'use strict';

const common = require('../common.js');
const v8 = require('v8');

const bench = common.createBenchmark(main, {
n: [1e6]
});

function main({ n }) {
const serialized = v8.serialize({ a: 1 });
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
const serialized = v8.serialize({ a: 1 });
const serialized = v8.serialize({ a: 1, b: new BigUint64Array() });

bench.start();
for (let i = 0; i < n; i++)
v8.deserialize(serialized);
bench.end(n);
}
15 changes: 15 additions & 0 deletions benchmark/v8/serialize.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
'use strict';

const common = require('../common.js');
const v8 = require('v8');

const bench = common.createBenchmark(main, {
n: [1e6]
});

function main({ n }) {
bench.start();
for (let i = 0; i < n; i++)
v8.serialize({ a: 1 });
Comment on lines +11 to +13
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
bench.start();
for (let i = 0; i < n; i++)
v8.serialize({ a: 1 });
const typedArray = new BigUint64Array();
bench.start();
for (let i = 0; i < n; i++)
v8.serialize({ a: 1, b: typedArray });

bench.end(n);
}
64 changes: 33 additions & 31 deletions lib/v8.js
Original file line number Diff line number Diff line change
Expand Up @@ -247,39 +247,41 @@ Deserializer.prototype.readRawBytes = function readRawBytes(length) {

function arrayBufferViewTypeToIndex(abView) {
const type = ObjectPrototypeToString(abView);
if (type === '[object Int8Array]') return 0;
if (type === '[object Uint8Array]') return 1;
if (type === '[object Uint8ClampedArray]') return 2;
if (type === '[object Int16Array]') return 3;
if (type === '[object Uint16Array]') return 4;
if (type === '[object Int32Array]') return 5;
if (type === '[object Uint32Array]') return 6;
if (type === '[object Float32Array]') return 7;
if (type === '[object Float64Array]') return 8;
if (type === '[object DataView]') return 9;
// Index 10 is FastBuffer.
if (type === '[object BigInt64Array]') return 11;
if (type === '[object BigUint64Array]') return 12;
return -1;
}

function arrayBufferViewIndexToType(index) {
if (index === 0) return Int8Array;
if (index === 1) return Uint8Array;
if (index === 2) return Uint8ClampedArray;
if (index === 3) return Int16Array;
if (index === 4) return Uint16Array;
if (index === 5) return Int32Array;
if (index === 6) return Uint32Array;
if (index === 7) return Float32Array;
if (index === 8) return Float64Array;
if (index === 9) return DataView;
if (index === 10) return FastBuffer;
if (index === 11) return BigInt64Array;
if (index === 12) return BigUint64Array;
return undefined;
switch (type) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How does this compare to an object or Map lookup?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Object is slower compared to switch:

With switch:

➜  node git:(perf/v8-deserialize-serialize) ✗ ./node-debug benchmark/v8/serialize.js
v8/serialize.js n=1000000: 705,193.2134097881
➜  node git:(perf/v8-deserialize-serialize) ✗ ./node-debug benchmark/v8/deserialize.js
v8/deserialize.js n=1000000: 1,595,554.4661464228

With object:

➜  node git:(perf/v8-deserialize-serialize) ✗ ./node-v8-object benchmark/v8/serialize.js
v8/serialize.js n=1000000: 696,654.8116336825
➜  node git:(perf/v8-deserialize-serialize) ✗ ./node-v8-object benchmark/v8/deserialize.js
v8/deserialize.js n=1000000: 1,575,265.6004526608

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Map is faster on serialize but slower on deserialize:

With switch:

➜  node git:(perf/v8-deserialize-serialize) ✗ ./node-master benchmark/v8/serialize.js
v8/serialize.js n=1000000: 690,649.3698025825
➜  node git:(perf/v8-deserialize-serialize) ✗ ./node-master benchmark/v8/deserialize.js
v8/deserialize.js n=1000000: 1,582,109.8185933107

With Map:

➜  node git:(perf/v8-deserialize-serialize) ✗ ./node-v8-map benchmark/v8/serialize.js
v8/serialize.js n=1000000: 695,808.1758001995
➜  node git:(perf/v8-deserialize-serialize) ✗ ./node-v8-map benchmark/v8/serialize.js
v8/serialize.js n=1000000: 699,940.6800273677
➜  node git:(perf/v8-deserialize-serialize) ✗ ./node-v8-map benchmark/v8/deserialize.js
v8/deserialize.js n=1000000: 1,565,646.8890889874
➜  node git:(perf/v8-deserialize-serialize) ✗ ./node-v8-map benchmark/v8/deserialize.js
v8/deserialize.js n=1000000: 1,572,391.3241736297

Copy link
Contributor

@mscdex mscdex Jul 29, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That doesn't make sense to me since the current serialize benchmark included in this PR doesn't even exercise this code path and for deserialize it'd never hit this code path because it's for serialization only.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It should be noted when running benchmarks you can't just run them once and compare because the results can vary. So you either need to run them manually a bunch of times to get a feel for the expected range or use the compare.js script to do all of this for you and tell you whether results are statistically significant.

Copy link
Contributor

@mscdex mscdex Jul 29, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I changed the serialize benchmark to this, which does exercise the code path in question:

'use strict';

const common = require('../common.js');
const v8 = require('v8');

const bench = common.createBenchmark(main, {
  n: [1e6]
});

function main({ n }) {
  const abv = new BigUint64Array();
  bench.start();
  for (let i = 0; i < n; i++)
    v8.serialize({ a: 1, b: abv });
  bench.end(n);
}

Using that benchmark against a plain object, a plain null object, and a Map, all of them have either the same or less performance compared to the if/switch solution.

case '[object Int8Array]': return 0;
case '[object Uint8Array]': return 1;
case '[object Uint8ClampedArray]': return 2;
case '[object Int16Array]': return 3;
case '[object Uint16Array]': return 4;
case '[object Int32Array]': return 5;
case '[object Uint32Array]': return 6;
case '[object Float32Array]': return 7;
case '[object Float64Array]': return 8;
case '[object DataView]': return 9;
// Index 10 is FastBuffer.
case '[object BigInt64Array]': return 11;
case '[object BigUint64Array]': return 12;
default: return -1;
}
}

const arrayBufferTypes = [
Int8Array,
Uint8Array,
Uint8ClampedArray,
Int16Array,
Uint16Array,
Int32Array,
Uint32Array,
Float32Array,
Float64Array,
DataView,
FastBuffer,
BigInt64Array,
BigUint64Array,
];

class DefaultSerializer extends Serializer {
constructor() {
super();
Expand Down Expand Up @@ -325,7 +327,7 @@ class DefaultDeserializer extends Deserializer {
*/
_readHostObject() {
const typeIndex = this.readUint32();
const ctor = arrayBufferViewIndexToType(typeIndex);
const ctor = arrayBufferTypes[typeIndex];
const byteLength = this.readUint32();
const byteOffset = this._readRawBytes(byteLength);
const BYTES_PER_ELEMENT = ctor.BYTES_PER_ELEMENT || 1;
Expand Down