From f302ac9ae41120ade05fae6d726e1ed37fcd39e1 Mon Sep 17 00:00:00 2001 From: himself65 Date: Sat, 4 Apr 2020 16:41:34 +0800 Subject: [PATCH] perf_hooks: allow omitted parameters in 'performance.measure' Make `startMark` and `endMark` parameters optional. PR-URL: https://github.com/nodejs/node/pull/32651 Fixes: https://github.com/nodejs/node/issues/32647 Refs: https://www.w3.org/TR/user-timing-2/#measure-method Reviewed-By: Anna Henningsen Reviewed-By: Chengzhong Wu Reviewed-By: James M Snell --- doc/api/perf_hooks.md | 20 ++++++++++++------ lib/perf_hooks.js | 10 +++++---- src/node_perf.cc | 17 +++++++++------ test/parallel/test-performance-measure.js | 25 +++++++++++++++++++++++ 4 files changed, 56 insertions(+), 16 deletions(-) create mode 100644 test/parallel/test-performance-measure.js diff --git a/doc/api/perf_hooks.md b/doc/api/perf_hooks.md index 02dee38bae1987..e6bb35dfcac7e1 100644 --- a/doc/api/perf_hooks.md +++ b/doc/api/perf_hooks.md @@ -17,9 +17,12 @@ const obs = new PerformanceObserver((items) => { performance.clearMarks(); }); obs.observe({ entryTypes: ['measure'] }); +performance.measure('Start to Now'); performance.mark('A'); doSomeLongRunningProcess(() => { + performance.measure('A to Now', 'A'); + performance.mark('B'); performance.measure('A to B', 'A', 'B'); }); @@ -53,14 +56,18 @@ Creates a new `PerformanceMark` entry in the Performance Timeline. A `performanceEntry.duration` is always `0`. Performance marks are used to mark specific significant moments in the Performance Timeline. -### `performance.measure(name, startMark, endMark)` +### `performance.measure(name[, startMark[, endMark]])` * `name` {string} -* `startMark` {string} -* `endMark` {string} +* `startMark` {string} Optional. +* `endMark` {string} Optional. Creates a new `PerformanceMeasure` entry in the Performance Timeline. A `PerformanceMeasure` is a subclass of `PerformanceEntry` whose @@ -73,9 +80,10 @@ Performance Timeline, or *may* identify any of the timestamp properties provided by the `PerformanceNodeTiming` class. If the named `startMark` does not exist, then `startMark` is set to [`timeOrigin`][] by default. -The `endMark` argument must identify any *existing* `PerformanceMark` in the -Performance Timeline or any of the timestamp properties provided by the -`PerformanceNodeTiming` class. If the named `endMark` does not exist, an +The optional `endMark` argument must identify any *existing* `PerformanceMark` +in the Performance Timeline or any of the timestamp properties provided by the +`PerformanceNodeTiming` class. `endMark` will be `performance.now()` +if no parameter is passed, otherwise if the named `endMark` does not exist, an error will be thrown. ### `performance.nodeTiming` diff --git a/lib/perf_hooks.js b/lib/perf_hooks.js index a141705728227b..11a9b5eba6a343 100644 --- a/lib/perf_hooks.js +++ b/lib/perf_hooks.js @@ -395,12 +395,14 @@ class Performance { measure(name, startMark, endMark) { name = `${name}`; - endMark = `${endMark}`; - startMark = startMark !== undefined ? `${startMark}` : ''; const marks = this[kIndex][kMarks]; - if (!marks.has(endMark) && !(endMark in nodeTiming)) { - throw new ERR_INVALID_PERFORMANCE_MARK(endMark); + if (arguments.length >= 3) { + if (!marks.has(endMark) && !(endMark in nodeTiming)) + throw new ERR_INVALID_PERFORMANCE_MARK(endMark); + else + endMark = `${endMark}`; } + startMark = startMark !== undefined ? `${startMark}` : ''; _measure(name, startMark, endMark); } diff --git a/src/node_perf.cc b/src/node_perf.cc index 4a940c476fd2a5..67eedf7b30032f 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -172,7 +172,6 @@ void Measure(const FunctionCallbackInfo& args) { HandleScope scope(env->isolate()); Utf8Value name(env->isolate(), args[0]); Utf8Value startMark(env->isolate(), args[1]); - Utf8Value endMark(env->isolate(), args[2]); AliasedFloat64Array& milestones = env->performance_state()->milestones; @@ -186,11 +185,17 @@ void Measure(const FunctionCallbackInfo& args) { startTimestamp = milestones[milestone]; } - uint64_t endTimestamp = GetPerformanceMark(env, *endMark); - if (endTimestamp == 0) { - PerformanceMilestone milestone = ToPerformanceMilestoneEnum(*endMark); - if (milestone != NODE_PERFORMANCE_MILESTONE_INVALID) - endTimestamp = milestones[milestone]; + uint64_t endTimestamp = 0; + if (args[2]->IsUndefined()) { + endTimestamp = PERFORMANCE_NOW(); + } else { + Utf8Value endMark(env->isolate(), args[2]); + endTimestamp = GetPerformanceMark(env, *endMark); + if (endTimestamp == 0) { + PerformanceMilestone milestone = ToPerformanceMilestoneEnum(*endMark); + if (milestone != NODE_PERFORMANCE_MILESTONE_INVALID) + endTimestamp = milestones[milestone]; + } } if (endTimestamp < startTimestamp) diff --git a/test/parallel/test-performance-measure.js b/test/parallel/test-performance-measure.js new file mode 100644 index 00000000000000..06e04cc219c7fd --- /dev/null +++ b/test/parallel/test-performance-measure.js @@ -0,0 +1,25 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { PerformanceObserver, performance } = require('perf_hooks'); +const DELAY = 1000; + +const expected = ['Start to Now', 'A to Now', 'A to B']; +const obs = new PerformanceObserver(common.mustCall((items) => { + const entries = items.getEntries(); + const { name, duration } = entries[0]; + assert.ok(duration > DELAY); + assert.strictEqual(expected.shift(), name); +}, 3)); +obs.observe({ entryTypes: ['measure'] }); + +performance.mark('A'); +setTimeout(common.mustCall(() => { + performance.measure('Start to Now'); + performance.measure('A to Now', 'A'); + + performance.mark('B'); + performance.measure('A to B', 'A', 'B'); +}), DELAY);