From 3618a2a1d680e2c39c2d8d2cbb5e95dccc7407b5 Mon Sep 17 00:00:00 2001 From: himself65 Date: Sat, 4 Apr 2020 16:41:34 +0800 Subject: [PATCH] perf_hooks: allow omitted parameters in 'performance.measure' Make `startMark` and `endMark` parameters optional. --- doc/api/perf_hooks.md | 20 ++++++++++++------ lib/perf_hooks.js | 10 +++++---- src/node_perf.cc | 17 +++++++++------ test/parallel/test-performance-measure.js | 25 +++++++++++++++++++++++ 4 files changed, 56 insertions(+), 16 deletions(-) create mode 100644 test/parallel/test-performance-measure.js diff --git a/doc/api/perf_hooks.md b/doc/api/perf_hooks.md index 00b52d96889c61..ac5aa2ccea0a66 100644 --- a/doc/api/perf_hooks.md +++ b/doc/api/perf_hooks.md @@ -17,9 +17,12 @@ const obs = new PerformanceObserver((items) => { performance.clearMarks(); }); obs.observe({ entryTypes: ['measure'] }); +performance.measure('Start to Now'); performance.mark('A'); doSomeLongRunningProcess(() => { + performance.measure('A to Now', 'A'); + performance.mark('B'); performance.measure('A to B', 'A', 'B'); }); @@ -53,14 +56,18 @@ Creates a new `PerformanceMark` entry in the Performance Timeline. A `performanceEntry.duration` is always `0`. Performance marks are used to mark specific significant moments in the Performance Timeline. -### `performance.measure(name, startMark, endMark)` +### `performance.measure(name[, startMark[, endMark]])` * `name` {string} -* `startMark` {string} -* `endMark` {string} +* `startMark` {string} Optional. +* `endMark` {string} Optional. Creates a new `PerformanceMeasure` entry in the Performance Timeline. A `PerformanceMeasure` is a subclass of `PerformanceEntry` whose @@ -73,9 +80,10 @@ Performance Timeline, or *may* identify any of the timestamp properties provided by the `PerformanceNodeTiming` class. If the named `startMark` does not exist, then `startMark` is set to [`timeOrigin`][] by default. -The `endMark` argument must identify any *existing* `PerformanceMark` in the -Performance Timeline or any of the timestamp properties provided by the -`PerformanceNodeTiming` class. If the named `endMark` does not exist, an +The optional `endMark` argument must identify any *existing* `PerformanceMark` +in the Performance Timeline or any of the timestamp properties provided by the +`PerformanceNodeTiming` class. `endMark` will be `performance.now()` +if no parameter is passed, otherwise if the named `endMark` does not exist, an error will be thrown. ### `performance.nodeTiming` diff --git a/lib/perf_hooks.js b/lib/perf_hooks.js index a141705728227b..11a9b5eba6a343 100644 --- a/lib/perf_hooks.js +++ b/lib/perf_hooks.js @@ -395,12 +395,14 @@ class Performance { measure(name, startMark, endMark) { name = `${name}`; - endMark = `${endMark}`; - startMark = startMark !== undefined ? `${startMark}` : ''; const marks = this[kIndex][kMarks]; - if (!marks.has(endMark) && !(endMark in nodeTiming)) { - throw new ERR_INVALID_PERFORMANCE_MARK(endMark); + if (arguments.length >= 3) { + if (!marks.has(endMark) && !(endMark in nodeTiming)) + throw new ERR_INVALID_PERFORMANCE_MARK(endMark); + else + endMark = `${endMark}`; } + startMark = startMark !== undefined ? `${startMark}` : ''; _measure(name, startMark, endMark); } diff --git a/src/node_perf.cc b/src/node_perf.cc index e54edb5fe6d6e9..4b8bf2a8a7c913 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -172,7 +172,6 @@ void Measure(const FunctionCallbackInfo& args) { HandleScope scope(env->isolate()); Utf8Value name(env->isolate(), args[0]); Utf8Value startMark(env->isolate(), args[1]); - Utf8Value endMark(env->isolate(), args[2]); AliasedFloat64Array& milestones = env->performance_state()->milestones; @@ -186,11 +185,17 @@ void Measure(const FunctionCallbackInfo& args) { startTimestamp = milestones[milestone]; } - uint64_t endTimestamp = GetPerformanceMark(env, *endMark); - if (endTimestamp == 0) { - PerformanceMilestone milestone = ToPerformanceMilestoneEnum(*endMark); - if (milestone != NODE_PERFORMANCE_MILESTONE_INVALID) - endTimestamp = milestones[milestone]; + uint64_t endTimestamp = 0; + if (args[2]->IsUndefined()) { + endTimestamp = PERFORMANCE_NOW(); + } else { + Utf8Value endMark(env->isolate(), args[2]); + endTimestamp = GetPerformanceMark(env, *endMark); + if (endTimestamp == 0) { + PerformanceMilestone milestone = ToPerformanceMilestoneEnum(*endMark); + if (milestone != NODE_PERFORMANCE_MILESTONE_INVALID) + endTimestamp = milestones[milestone]; + } } if (endTimestamp < startTimestamp) diff --git a/test/parallel/test-performance-measure.js b/test/parallel/test-performance-measure.js new file mode 100644 index 00000000000000..06e04cc219c7fd --- /dev/null +++ b/test/parallel/test-performance-measure.js @@ -0,0 +1,25 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { PerformanceObserver, performance } = require('perf_hooks'); +const DELAY = 1000; + +const expected = ['Start to Now', 'A to Now', 'A to B']; +const obs = new PerformanceObserver(common.mustCall((items) => { + const entries = items.getEntries(); + const { name, duration } = entries[0]; + assert.ok(duration > DELAY); + assert.strictEqual(expected.shift(), name); +}, 3)); +obs.observe({ entryTypes: ['measure'] }); + +performance.mark('A'); +setTimeout(common.mustCall(() => { + performance.measure('Start to Now'); + performance.measure('A to Now', 'A'); + + performance.mark('B'); + performance.measure('A to B', 'A', 'B'); +}), DELAY);