Skip to content

Commit

Permalink
deps: V8: cherry-pick 597f885
Browse files Browse the repository at this point in the history
Original commit message:

    [coverage] Deterministically sort collected shared function infos

    Prior to this CL, collected shared function infos with identical
    source ranges were sorted non-deterministically during coverage
    collection. This lead to non-deterministically incorrectly-reported
    coverage due to an optimization which depended on the sort order later
    on.

    With this CL, we now sort shared function infos by the source range
    *and* call count.

    Bug: v8:6000,v8:9212
    Change-Id: If8bf900727591e71dbd0df621e472a4303f3a353
    Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1771776
    Reviewed-by: Yang Guo <yangguo@chromium.org>
    Commit-Queue: Jakob Gruber <jgruber@chromium.org>
    Cr-Commit-Position: refs/heads/master@{#63411}

Refs: v8/v8@597f885

PR-URL: #29367
Reviewed-By: Rich Trott <rtrott@gmail.com>
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Reviewed-By: Michael Dawson <michael_dawson@ca.ibm.com>
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
  • Loading branch information
bcoe authored and BridgeAR committed Sep 3, 2019
1 parent 8c776cb commit 7a4ae1a
Show file tree
Hide file tree
Showing 2 changed files with 81 additions and 45 deletions.
2 changes: 1 addition & 1 deletion common.gypi
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@

# Reset this number to 0 on major V8 upgrades.
# Increment by one for each non-official patch applied to deps/v8.
'v8_embedder_string': '-node.15',
'v8_embedder_string': '-node.16',

##### V8 defaults for Node.js #####

Expand Down
124 changes: 80 additions & 44 deletions deps/v8/src/debug/debug-coverage.cc
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,6 @@ int StartPosition(SharedFunctionInfo info) {
return start;
}

bool CompareSharedFunctionInfo(SharedFunctionInfo a, SharedFunctionInfo b) {
int a_start = StartPosition(a);
int b_start = StartPosition(b);
if (a_start == b_start) return a.EndPosition() > b.EndPosition();
return a_start < b_start;
}

bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
DCHECK_NE(kNoSourcePosition, a.start);
DCHECK_NE(kNoSourcePosition, b.start);
Expand Down Expand Up @@ -481,32 +474,12 @@ void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
// Reset all counters on the DebugInfo to zero.
ResetAllBlockCounts(info);
}
} // anonymous namespace

std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
DCHECK(!isolate->is_best_effort_code_coverage());
std::unique_ptr<Coverage> result =
Collect(isolate, isolate->code_coverage_mode());
if (!isolate->is_collecting_type_profile() &&
(isolate->is_precise_binary_code_coverage() ||
isolate->is_block_binary_code_coverage())) {
// We do not have to hold onto feedback vectors for invocations we already
// reported. So we can reset the list.
isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
}
return result;
}

std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
}

std::unique_ptr<Coverage> Coverage::Collect(
Isolate* isolate, v8::debug::CoverageMode collectionMode) {
SharedToCounterMap counter_map;

void CollectAndMaybeResetCounts(Isolate* isolate,
SharedToCounterMap* counter_map,
v8::debug::CoverageMode coverage_mode) {
const bool reset_count =
collectionMode != v8::debug::CoverageMode::kBestEffort;
coverage_mode != v8::debug::CoverageMode::kBestEffort;

switch (isolate->code_coverage_mode()) {
case v8::debug::CoverageMode::kBlockBinary:
Expand All @@ -525,15 +498,15 @@ std::unique_ptr<Coverage> Coverage::Collect(
DCHECK(shared.IsSubjectToDebugging());
uint32_t count = static_cast<uint32_t>(vector.invocation_count());
if (reset_count) vector.clear_invocation_count();
counter_map.Add(shared, count);
counter_map->Add(shared, count);
}
break;
}
case v8::debug::CoverageMode::kBestEffort: {
DCHECK(!isolate->factory()
->feedback_vectors_for_profiling_tools()
->IsArrayList());
DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, collectionMode);
DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, coverage_mode);
HeapIterator heap_iterator(isolate->heap());
for (HeapObject current_obj = heap_iterator.next();
!current_obj.is_null(); current_obj = heap_iterator.next()) {
Expand All @@ -542,8 +515,9 @@ std::unique_ptr<Coverage> Coverage::Collect(
SharedFunctionInfo shared = func.shared();
if (!shared.IsSubjectToDebugging()) continue;
if (!(func.has_feedback_vector() ||
func.has_closure_feedback_cell_array()))
func.has_closure_feedback_cell_array())) {
continue;
}
uint32_t count = 0;
if (func.has_feedback_vector()) {
count =
Expand All @@ -554,7 +528,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
// atleast once. We don't have precise invocation count here.
count = 1;
}
counter_map.Add(shared, count);
counter_map->Add(shared, count);
}

// Also check functions on the stack to collect the count map. With lazy
Expand All @@ -563,12 +537,64 @@ std::unique_ptr<Coverage> Coverage::Collect(
// updated (i.e. it didn't execute return / jump).
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
SharedFunctionInfo shared = it.frame()->function().shared();
if (counter_map.Get(shared) != 0) continue;
counter_map.Add(shared, 1);
if (counter_map->Get(shared) != 0) continue;
counter_map->Add(shared, 1);
}
break;
}
}
}

// A {SFI, count} tuple is used to sort by source range (stored on
// the SFI) and call count (in the counter map).
struct SharedFunctionInfoAndCount {
SharedFunctionInfoAndCount(SharedFunctionInfo info, uint32_t count)
: info(info),
count(count),
start(StartPosition(info)),
end(info.EndPosition()) {}

// Sort by:
// - start, ascending.
// - end, descending.
// - count, ascending.
bool operator<(const SharedFunctionInfoAndCount& that) const {
if (this->start != that.start) return this->start < that.start;
if (this->end != that.end) return this->end > that.end;
return this->count < that.count;
}

SharedFunctionInfo info;
uint32_t count;
int start;
int end;
};

} // anonymous namespace

std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
DCHECK(!isolate->is_best_effort_code_coverage());
std::unique_ptr<Coverage> result =
Collect(isolate, isolate->code_coverage_mode());
if (!isolate->is_collecting_type_profile() &&
(isolate->is_precise_binary_code_coverage() ||
isolate->is_block_binary_code_coverage())) {
// We do not have to hold onto feedback vectors for invocations we already
// reported. So we can reset the list.
isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
}
return result;
}

std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
}

std::unique_ptr<Coverage> Coverage::Collect(
Isolate* isolate, v8::debug::CoverageMode collectionMode) {
// Collect call counts for all functions.
SharedToCounterMap counter_map;
CollectAndMaybeResetCounts(isolate, &counter_map, collectionMode);

// Iterate shared function infos of every script and build a mapping
// between source ranges and invocation counts.
Expand All @@ -583,30 +609,40 @@ std::unique_ptr<Coverage> Coverage::Collect(
result->emplace_back(script_handle);
std::vector<CoverageFunction>* functions = &result->back().functions;

std::vector<SharedFunctionInfo> sorted;
std::vector<SharedFunctionInfoAndCount> sorted;

{
// Sort functions by start position, from outer to inner functions.
SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle);
for (SharedFunctionInfo info = infos.Next(); !info.is_null();
info = infos.Next()) {
sorted.push_back(info);
sorted.emplace_back(info, counter_map.Get(info));
}
std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo);
std::sort(sorted.begin(), sorted.end());
}

// Stack to track nested functions, referring function by index.
std::vector<size_t> nesting;

// Use sorted list to reconstruct function nesting.
for (SharedFunctionInfo info : sorted) {
int start = StartPosition(info);
int end = info.EndPosition();
uint32_t count = counter_map.Get(info);
for (const SharedFunctionInfoAndCount& v : sorted) {
SharedFunctionInfo info = v.info;
int start = v.start;
int end = v.end;
uint32_t count = v.count;

// Find the correct outer function based on start position.
//
// This is not robust when considering two functions with identical source
// ranges. In this case, it is unclear which function is the inner / outer
// function. Above, we ensure that such functions are sorted in ascending
// `count` order, so at least our `parent_is_covered` optimization below
// should be fine.
// TODO(jgruber): Consider removing the optimization.
while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
nesting.pop_back();
}

if (count != 0) {
switch (collectionMode) {
case v8::debug::CoverageMode::kBlockCount:
Expand Down

0 comments on commit 7a4ae1a

Please sign in to comment.