perf_hooks: simplify perf_hooks

Remove the `performance.getEntries()` and `performance.clear*()`
variants and eliminate the accumulation of the global timeline
entries. The design of this particular bit of the API is a memory
leak and performance footgun. The `PerformanceObserver` API is
a better approach to consuming the data in a more transient way.

PR-URL: https://github.com/nodejs/node/pull/19563
Reviewed-By: Anna Henningsen <anna@addaleax.net>
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
This commit is contained in:
James M Snell 2018-03-23 11:23:22 -07:00
parent d54f651910
commit 2ec6995555
9 changed files with 38 additions and 320 deletions

View File

@ -10,14 +10,18 @@ is to support collection of high resolution performance metrics.
This is the same Performance API as implemented in modern Web browsers.
```js
const { performance } = require('perf_hooks');
const { PerformanceObserver, performance } = require('perf_hooks');
const obs = new PerformanceObserver((items) => {
console.log(items.getEntries()[0].duration);
performance.clearMarks();
});
obs.observe({ entryTypes: ['measure'] });
performance.mark('A');
doSomeLongRunningProcess(() => {
performance.mark('B');
performance.measure('A to B', 'A', 'B');
const measure = performance.getEntriesByName('A to B')[0];
console.log(measure.duration);
// Prints the number of milliseconds between Mark 'A' and Mark 'B'
});
```
@ -26,35 +30,6 @@ doSomeLongRunningProcess(() => {
added: v8.5.0
-->
The `Performance` provides access to performance metric data. A single
instance of this class is provided via the `performance` property.
### performance.clearEntries(name)
<!-- YAML
added: v9.5.0
-->
Remove all performance entry objects with `entryType` equal to `name` from the
Performance Timeline.
### performance.clearFunctions([name])
<!-- YAML
added: v8.5.0
-->
* `name` {string}
If `name` is not provided, removes all `PerformanceFunction` objects from the
Performance Timeline. If `name` is provided, removes entries with `name`.
### performance.clearGC()
<!-- YAML
added: v8.5.0
-->
Remove all performance entry objects with `entryType` equal to `gc` from the
Performance Timeline.
### performance.clearMarks([name])
<!-- YAML
added: v8.5.0
@ -65,53 +40,6 @@ added: v8.5.0
If `name` is not provided, removes all `PerformanceMark` objects from the
Performance Timeline. If `name` is provided, removes only the named mark.
### performance.clearMeasures([name])
<!-- YAML
added: v8.5.0
-->
* `name` {string}
If `name` is not provided, removes all `PerformanceMeasure` objects from the
Performance Timeline. If `name` is provided, removes only objects whose
`performanceEntry.name` matches `name`.
### performance.getEntries()
<!-- YAML
added: v8.5.0
-->
* Returns: {Array}
Returns a list of all `PerformanceEntry` objects in chronological order
with respect to `performanceEntry.startTime`.
### performance.getEntriesByName(name[, type])
<!-- YAML
added: v8.5.0
-->
* `name` {string}
* `type` {string}
* Returns: {Array}
Returns a list of all `PerformanceEntry` objects in chronological order
with respect to `performanceEntry.startTime` whose `performanceEntry.name` is
equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to
`type`.
### performance.getEntriesByType(type)
<!-- YAML
added: v8.5.0
-->
* `type` {string}
* Returns: {Array}
Returns a list of all `PerformanceEntry` objects in chronological order
with respect to `performanceEntry.startTime` whose `performanceEntry.entryType`
is equal to `type`.
### performance.mark([name])
<!-- YAML
added: v8.5.0
@ -125,20 +53,6 @@ Creates a new `PerformanceMark` entry in the Performance Timeline. A
`performanceEntry.duration` is always `0`. Performance marks are used
to mark specific significant moments in the Performance Timeline.
### performance.maxEntries
<!-- YAML
added: v9.6.0
-->
Value: {number}
The maximum number of Performance Entry items that should be added to the
Performance Timeline. This limit is not strictly enforced, but a process
warning will be emitted if the number of entries in the timeline exceeds
this limit.
Defaults to 150.
### performance.measure(name, startMark, endMark)
<!-- YAML
added: v8.5.0
@ -220,7 +134,6 @@ const wrapped = performance.timerify(someFunction);
const obs = new PerformanceObserver((list) => {
console.log(list.getEntries()[0].duration);
obs.disconnect();
performance.clearFunctions();
});
obs.observe({ entryTypes: ['function'] });
@ -608,7 +521,6 @@ hook.enable();
const obs = new PerformanceObserver((list, observer) => {
console.log(list.getEntries()[0]);
performance.clearMarks();
performance.clearMeasures();
observer.disconnect();
});
obs.observe({ entryTypes: ['measure'], buffered: true });
@ -642,8 +554,6 @@ const obs = new PerformanceObserver((list) => {
console.log(`require('${entry[0]}')`, entry.duration);
});
obs.disconnect();
// Free memory
performance.clearFunctions();
});
obs.observe({ entryTypes: ['function'], buffered: true });

View File

@ -3,6 +3,7 @@
const {
PerformanceEntry,
mark: _mark,
clearMark: _clearMark,
measure: _measure,
milestones,
observerCounts,
@ -50,17 +51,11 @@ const kBuffering = Symbol('buffering');
const kQueued = Symbol('queued');
const kTimerified = Symbol('timerified');
const kInsertEntry = Symbol('insert-entry');
const kIndexEntry = Symbol('index-entry');
const kClearEntry = Symbol('clear-entry');
const kGetEntries = Symbol('get-entries');
const kIndex = Symbol('index');
const kMarks = Symbol('marks');
const kCount = Symbol('count');
const kMaxCount = Symbol('max-count');
const kDefaultMaxCount = 150;
observerCounts[NODE_PERFORMANCE_ENTRY_TYPE_MARK] = 1;
observerCounts[NODE_PERFORMANCE_ENTRY_TYPE_MEASURE] = 1;
const observers = {};
const observerableTypes = [
'node',
@ -286,17 +281,12 @@ class PerformanceObserverEntryList {
const item = { entry };
L.append(this[kEntries], item);
this[kCount]++;
this[kIndexEntry](item);
}
get length() {
return this[kCount];
}
[kIndexEntry](entry) {
// Default implementation does nothing
}
[kGetEntries](name, type) {
const ret = [];
const list = this[kEntries];
@ -407,72 +397,11 @@ class PerformanceObserver extends AsyncResource {
}
}
class Performance extends PerformanceObserverEntryList {
class Performance {
constructor() {
super();
this[kIndex] = {
[kMarks]: new Set()
};
this[kMaxCount] = kDefaultMaxCount;
this[kInsertEntry](nodeTiming);
}
set maxEntries(val) {
if (typeof val !== 'number' || val >>> 0 !== val) {
const errors = lazyErrors();
throw new errors.ERR_INVALID_ARG_TYPE('val', 'number', val);
}
this[kMaxCount] = Math.max(1, val >>> 0);
}
get maxEntries() {
return this[kMaxCount];
}
[kIndexEntry](item) {
const index = this[kIndex];
const type = item.entry.entryType;
let items = index[type];
if (!items) {
items = index[type] = {};
L.init(items);
}
const entry = item.entry;
L.append(items, { entry, item });
const count = this[kCount];
if (count > this[kMaxCount]) {
const text = count === 1 ? 'is 1 entry' : `are ${count} entries`;
process.emitWarning('Possible perf_hooks memory leak detected. ' +
`There ${text} in the ` +
'Performance Timeline. Use the clear methods ' +
'to remove entries that are no longer needed or ' +
'set performance.maxEntries equal to a higher ' +
'value (currently the maxEntries is ' +
`${this[kMaxCount]}).`);
}
}
[kClearEntry](type, name) {
const index = this[kIndex];
const items = index[type];
if (!items) return;
let item = L.peek(items);
while (item && item !== items) {
const entry = item.entry;
const next = item._idlePrev;
if (name !== undefined) {
if (entry.name === `${name}`) {
L.remove(item); // remove from the index
L.remove(item.item); // remove from the master
this[kCount]--;
}
} else {
L.remove(item); // remove from the index
L.remove(item.item); // remove from the master
this[kCount]--;
}
item = next;
}
}
get nodeTiming() {
@ -507,27 +436,13 @@ class Performance extends PerformanceObserverEntryList {
clearMarks(name) {
name = name !== undefined ? `${name}` : name;
this[kClearEntry]('mark', name);
if (name !== undefined)
if (name !== undefined) {
this[kIndex][kMarks].delete(name);
else
_clearMark(name);
} else {
this[kIndex][kMarks].clear();
}
clearMeasures(name) {
this[kClearEntry]('measure', name);
}
clearGC() {
this[kClearEntry]('gc');
}
clearFunctions(name) {
this[kClearEntry]('function', name);
}
clearEntries(name) {
this[kClearEntry](name);
_clearMark();
}
}
timerify(fn) {
@ -563,7 +478,6 @@ class Performance extends PerformanceObserverEntryList {
[kInspect]() {
return {
maxEntries: this.maxEntries,
nodeTiming: this.nodeTiming,
timeOrigin: this.timeOrigin
};
@ -595,7 +509,6 @@ function observersCallback(entry) {
if (type === NODE_PERFORMANCE_ENTRY_TYPE_HTTP2)
collectHttp2Stats(entry);
performance[kInsertEntry](entry);
const list = getObserversList(type);
let current = L.peek(list);

View File

@ -130,7 +130,7 @@ void PerformanceEntry::Notify(Environment* env,
object.As<Object>(),
env->performance_entry_callback(),
1, &object,
node::async_context{0, 0}).ToLocalChecked();
node::async_context{0, 0});
}
}
@ -153,6 +153,17 @@ void Mark(const FunctionCallbackInfo<Value>& args) {
args.GetReturnValue().Set(obj);
}
void ClearMark(const FunctionCallbackInfo<Value>& args) {
Environment* env = Environment::GetCurrent(args);
auto marks = env->performance_marks();
if (args.Length() == 0) {
marks->clear();
} else {
Utf8Value name(env->isolate(), args[0]);
marks->erase(*name);
}
}
inline uint64_t GetPerformanceMark(Environment* env, std::string name) {
auto marks = env->performance_marks();
@ -395,6 +406,7 @@ void Initialize(Local<Object> target,
target->Set(context, performanceEntryString, fn).FromJust();
env->set_performance_entry_template(fn);
env->SetMethod(target, "clearMark", ClearMark);
env->SetMethod(target, "mark", Mark);
env->SetMethod(target, "measure", Measure);
env->SetMethod(target, "markMilestone", MarkMilestone);

View File

@ -6,7 +6,7 @@ if (!common.hasCrypto)
const assert = require('assert');
const h2 = require('http2');
const { PerformanceObserver, performance } = require('perf_hooks');
const { PerformanceObserver } = require('perf_hooks');
const obs = new PerformanceObserver(common.mustCall((items) => {
const entry = items.getEntries()[0];
@ -46,7 +46,6 @@ const obs = new PerformanceObserver(common.mustCall((items) => {
default:
assert.fail('invalid entry name');
}
performance.clearEntries('http2');
}, 4));
obs.observe({ entryTypes: ['http2'] });
@ -101,10 +100,3 @@ server.on('listening', common.mustCall(() => {
}));
}));
process.on('exit', () => {
const entries = performance.getEntries();
// There shouldn't be any http2 entries left over.
assert.strictEqual(entries.length, 1);
assert.strictEqual(entries[0], performance.nodeTiming);
});

View File

@ -11,9 +11,6 @@ const {
{
// Intentional non-op. Do not wrap in common.mustCall();
const n = performance.timerify(() => {});
n();
const entries = performance.getEntriesByType('function');
assert.strictEqual(entries.length, 0);
const obs = new PerformanceObserver(common.mustCall((list) => {
const entries = list.getEntries();
@ -24,7 +21,6 @@ const {
assert.strictEqual(typeof entry.duration, 'number');
assert.strictEqual(typeof entry.startTime, 'number');
obs.disconnect();
performance.clearFunctions();
}));
obs.observe({ entryTypes: ['function'] });
n();
@ -39,17 +35,12 @@ const {
throw new Error('test');
});
assert.throws(() => n(), /^Error: test$/);
const entries = performance.getEntriesByType('function');
assert.strictEqual(entries.length, 0);
obs.disconnect();
}
{
class N {}
const n = performance.timerify(N);
new n();
const entries = performance.getEntriesByType('function');
assert.strictEqual(entries.length, 0);
const obs = new PerformanceObserver(common.mustCall((list) => {
const entries = list.getEntries();
@ -62,7 +53,6 @@ const {
assert.strictEqual(typeof entry.duration, 'number');
assert.strictEqual(typeof entry.startTime, 'number');
obs.disconnect();
performance.clearFunctions();
}));
obs.observe({ entryTypes: ['function'] });

View File

@ -4,7 +4,6 @@
const common = require('../common');
const assert = require('assert');
const {
performance,
PerformanceObserver
} = require('perf_hooks');
@ -22,13 +21,6 @@ const kinds = [
NODE_PERFORMANCE_GC_WEAKCB
];
// No observers for GC events, no entries should appear
{
global.gc();
const entries = performance.getEntriesByType('gc');
assert.strictEqual(entries.length, 0);
}
// Adding an observer should force at least one gc to appear
{
const obs = new PerformanceObserver(common.mustCallAtLeast((list) => {
@ -39,11 +31,6 @@ const kinds = [
assert(kinds.includes(entry.kind));
assert.strictEqual(typeof entry.startTime, 'number');
assert.strictEqual(typeof entry.duration, 'number');
performance.clearGC();
const entries = performance.getEntriesByType('gc');
assert.strictEqual(entries.length, 0);
obs.disconnect();
}));
obs.observe({ entryTypes: ['gc'] });

View File

@ -1,29 +0,0 @@
// Flags: --no-warnings
'use strict';
const common = require('../common');
const { performance } = require('perf_hooks');
const assert = require('assert');
assert.strictEqual(performance.length, 1);
assert.strictEqual(performance.maxEntries, 150);
performance.maxEntries = 1;
[-1, 0xffffffff + 1, '', null, undefined, Infinity].forEach((i) => {
common.expectsError(
() => performance.maxEntries = i,
{
code: 'ERR_INVALID_ARG_TYPE',
type: TypeError
}
);
});
common.expectWarning('Warning', 'Possible perf_hooks memory leak detected. ' +
'There are 2 entries in the ' +
'Performance Timeline. Use the clear methods to remove entries that are no ' +
'longer needed or set performance.maxEntries equal to a higher value ' +
'(currently the maxEntries is 1).', common.noWarnCode);
performance.mark('test');

View File

@ -21,8 +21,8 @@ const {
} = constants;
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_NODE], 0);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MEASURE], 1);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 0);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MEASURE], 0);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_GC], 0);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_FUNCTION], 0);
@ -67,7 +67,7 @@ assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_FUNCTION], 0);
const countdown =
new Countdown(3, () => {
observer.disconnect();
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 0);
});
function callback(list, obs) {
@ -76,9 +76,9 @@ assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_FUNCTION], 0);
assert.strictEqual(entries.length, 1);
countdown.dec();
}
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 0);
observer.observe({ entryTypes: ['mark'] });
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 2);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1);
performance.mark('test1');
performance.mark('test2');
performance.mark('test3');
@ -89,14 +89,14 @@ assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_FUNCTION], 0);
{
const observer =
new PerformanceObserver(common.mustCall(callback, 1));
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 0);
function callback(list, obs) {
assert.strictEqual(obs, observer);
const entries = list.getEntries();
assert.strictEqual(entries.length, 3);
observer.disconnect();
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 0);
{
const entriesByName = list.getEntriesByName('test1');
@ -129,7 +129,7 @@ assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_FUNCTION], 0);
// Do this twice to make sure it doesn't throw
observer.observe({ entryTypes: ['mark', 'measure'], buffered: true });
// Even tho we called twice, count should be 1
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 2);
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1);
performance.mark('test1');
performance.mark('test2');
performance.measure('test3', 'test1', 'test2');

View File

@ -15,56 +15,12 @@ const inited = performance.now();
assert(inited < 20000);
{
const entries = performance.getEntries();
assert(Array.isArray(entries));
assert.strictEqual(entries.length, 1);
assert.strictEqual(entries[0], performance.nodeTiming);
}
{
const entries = performance.getEntriesByName('node');
assert(Array.isArray(entries));
assert.strictEqual(entries.length, 1);
assert.strictEqual(entries[0], performance.nodeTiming);
}
{
let n;
let entries = performance.getEntries();
for (n = 0; n < entries.length; n++) {
const entry = entries[n];
assert.notStrictEqual(entry.name, 'A');
assert.notStrictEqual(entry.entryType, 'mark');
}
// Should work without throwing any errors
performance.mark('A');
entries = performance.getEntries();
const markA = entries[1];
assert.strictEqual(markA.name, 'A');
assert.strictEqual(markA.entryType, 'mark');
performance.clearMarks('A');
entries = performance.getEntries();
for (n = 0; n < entries.length; n++) {
const entry = entries[n];
assert.notStrictEqual(entry.name, 'A');
assert.notStrictEqual(entry.entryType, 'mark');
}
}
{
let entries = performance.getEntries();
for (let n = 0; n < entries.length; n++) {
const entry = entries[n];
assert.notStrictEqual(entry.name, 'B');
assert.notStrictEqual(entry.entryType, 'mark');
}
performance.mark('B');
entries = performance.getEntries();
const markB = entries[1];
assert.strictEqual(markB.name, 'B');
assert.strictEqual(markB.entryType, 'mark');
performance.clearMarks();
entries = performance.getEntries();
assert.strictEqual(entries.length, 1);
}
{
@ -83,11 +39,7 @@ assert(inited < 20000);
});
});
performance.clearMeasures();
performance.clearMarks();
const entries = performance.getEntries();
assert.strictEqual(entries.length, 1);
}
{
@ -95,15 +47,6 @@ assert(inited < 20000);
setImmediate(() => {
performance.mark('B');
performance.measure('foo', 'A', 'B');
const entry = performance.getEntriesByName('foo')[0];
const markA = performance.getEntriesByName('A', 'mark')[0];
performance.getEntriesByName('B', 'mark')[0];
assert.strictEqual(entry.name, 'foo');
assert.strictEqual(entry.entryType, 'measure');
assert.strictEqual(entry.startTime, markA.startTime);
// TODO(jasnell): This comparison is too imprecise on some systems
// assert.strictEqual(entry.duration.toPrecision(3),
// (markB.startTime - markA.startTime).toPrecision(3));
});
}