summaryrefslogtreecommitdiff
path: root/chromium/v8/src/debug/debug-coverage.cc
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/v8/src/debug/debug-coverage.cc')
-rw-r--r--chromium/v8/src/debug/debug-coverage.cc159
1 files changed, 114 insertions, 45 deletions
diff --git a/chromium/v8/src/debug/debug-coverage.cc b/chromium/v8/src/debug/debug-coverage.cc
index 5337f98db93..cb466ab6ab7 100644
--- a/chromium/v8/src/debug/debug-coverage.cc
+++ b/chromium/v8/src/debug/debug-coverage.cc
@@ -54,13 +54,6 @@ int StartPosition(SharedFunctionInfo info) {
return start;
}
-bool CompareSharedFunctionInfo(SharedFunctionInfo a, SharedFunctionInfo b) {
- int a_start = StartPosition(a);
- int b_start = StartPosition(b);
- if (a_start == b_start) return a.EndPosition() > b.EndPosition();
- return a_start < b_start;
-}
-
bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
DCHECK_NE(kNoSourcePosition, a.start);
DCHECK_NE(kNoSourcePosition, b.start);
@@ -482,32 +475,31 @@ void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
// Reset all counters on the DebugInfo to zero.
ResetAllBlockCounts(info);
}
-} // anonymous namespace
-std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
- DCHECK(!isolate->is_best_effort_code_coverage());
- std::unique_ptr<Coverage> result =
- Collect(isolate, isolate->code_coverage_mode());
- if (!isolate->is_collecting_type_profile() &&
- (isolate->is_precise_binary_code_coverage() ||
- isolate->is_block_binary_code_coverage())) {
- // We do not have to hold onto feedback vectors for invocations we already
- // reported. So we can reset the list.
- isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
+void PrintBlockCoverage(const CoverageFunction* function,
+ SharedFunctionInfo info, bool has_nonempty_source_range,
+ bool function_is_relevant) {
+ DCHECK(FLAG_trace_block_coverage);
+ std::unique_ptr<char[]> function_name =
+ function->name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+ i::PrintF(
+ "Coverage for function='%s', SFI=%p, has_nonempty_source_range=%d, "
+ "function_is_relevant=%d\n",
+ function_name.get(), reinterpret_cast<void*>(info.ptr()),
+ has_nonempty_source_range, function_is_relevant);
+ i::PrintF("{start: %d, end: %d, count: %d}\n", function->start, function->end,
+ function->count);
+ for (const auto& block : function->blocks) {
+ i::PrintF("{start: %d, end: %d, count: %d}\n", block.start, block.end,
+ block.count);
}
- return result;
}
-std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
- return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
-}
-
-std::unique_ptr<Coverage> Coverage::Collect(
- Isolate* isolate, v8::debug::CoverageMode collectionMode) {
- SharedToCounterMap counter_map;
-
+void CollectAndMaybeResetCounts(Isolate* isolate,
+ SharedToCounterMap* counter_map,
+ v8::debug::CoverageMode coverage_mode) {
const bool reset_count =
- collectionMode != v8::debug::CoverageMode::kBestEffort;
+ coverage_mode != v8::debug::CoverageMode::kBestEffort;
switch (isolate->code_coverage_mode()) {
case v8::debug::CoverageMode::kBlockBinary:
@@ -526,7 +518,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
DCHECK(shared.IsSubjectToDebugging());
uint32_t count = static_cast<uint32_t>(vector.invocation_count());
if (reset_count) vector.clear_invocation_count();
- counter_map.Add(shared, count);
+ counter_map->Add(shared, count);
}
break;
}
@@ -534,7 +526,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
DCHECK(!isolate->factory()
->feedback_vectors_for_profiling_tools()
->IsArrayList());
- DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, collectionMode);
+ DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, coverage_mode);
HeapObjectIterator heap_iterator(isolate->heap());
for (HeapObject current_obj = heap_iterator.Next();
!current_obj.is_null(); current_obj = heap_iterator.Next()) {
@@ -543,8 +535,9 @@ std::unique_ptr<Coverage> Coverage::Collect(
SharedFunctionInfo shared = func.shared();
if (!shared.IsSubjectToDebugging()) continue;
if (!(func.has_feedback_vector() ||
- func.has_closure_feedback_cell_array()))
+ func.has_closure_feedback_cell_array())) {
continue;
+ }
uint32_t count = 0;
if (func.has_feedback_vector()) {
count =
@@ -555,7 +548,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
// atleast once. We don't have precise invocation count here.
count = 1;
}
- counter_map.Add(shared, count);
+ counter_map->Add(shared, count);
}
// Also check functions on the stack to collect the count map. With lazy
@@ -564,12 +557,64 @@ std::unique_ptr<Coverage> Coverage::Collect(
// updated (i.e. it didn't execute return / jump).
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
SharedFunctionInfo shared = it.frame()->function().shared();
- if (counter_map.Get(shared) != 0) continue;
- counter_map.Add(shared, 1);
+ if (counter_map->Get(shared) != 0) continue;
+ counter_map->Add(shared, 1);
}
break;
}
}
+}
+
+// A {SFI, count} tuple is used to sort by source range (stored on
+// the SFI) and call count (in the counter map).
+struct SharedFunctionInfoAndCount {
+ SharedFunctionInfoAndCount(SharedFunctionInfo info, uint32_t count)
+ : info(info),
+ count(count),
+ start(StartPosition(info)),
+ end(info.EndPosition()) {}
+
+ // Sort by:
+ // - start, ascending.
+ // - end, descending.
+ // - count, ascending.
+ bool operator<(const SharedFunctionInfoAndCount& that) const {
+ if (this->start != that.start) return this->start < that.start;
+ if (this->end != that.end) return this->end > that.end;
+ return this->count < that.count;
+ }
+
+ SharedFunctionInfo info;
+ uint32_t count;
+ int start;
+ int end;
+};
+
+} // anonymous namespace
+
+std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
+ DCHECK(!isolate->is_best_effort_code_coverage());
+ std::unique_ptr<Coverage> result =
+ Collect(isolate, isolate->code_coverage_mode());
+ if (!isolate->is_collecting_type_profile() &&
+ (isolate->is_precise_binary_code_coverage() ||
+ isolate->is_block_binary_code_coverage())) {
+ // We do not have to hold onto feedback vectors for invocations we already
+ // reported. So we can reset the list.
+ isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
+ }
+ return result;
+}
+
+std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
+ return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
+}
+
+std::unique_ptr<Coverage> Coverage::Collect(
+ Isolate* isolate, v8::debug::CoverageMode collectionMode) {
+ // Collect call counts for all functions.
+ SharedToCounterMap counter_map;
+ CollectAndMaybeResetCounts(isolate, &counter_map, collectionMode);
// Iterate shared function infos of every script and build a mapping
// between source ranges and invocation counts.
@@ -584,30 +629,40 @@ std::unique_ptr<Coverage> Coverage::Collect(
result->emplace_back(script_handle);
std::vector<CoverageFunction>* functions = &result->back().functions;
- std::vector<SharedFunctionInfo> sorted;
+ std::vector<SharedFunctionInfoAndCount> sorted;
{
// Sort functions by start position, from outer to inner functions.
SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle);
for (SharedFunctionInfo info = infos.Next(); !info.is_null();
info = infos.Next()) {
- sorted.push_back(info);
+ sorted.emplace_back(info, counter_map.Get(info));
}
- std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo);
+ std::sort(sorted.begin(), sorted.end());
}
// Stack to track nested functions, referring function by index.
std::vector<size_t> nesting;
// Use sorted list to reconstruct function nesting.
- for (SharedFunctionInfo info : sorted) {
- int start = StartPosition(info);
- int end = info.EndPosition();
- uint32_t count = counter_map.Get(info);
+ for (const SharedFunctionInfoAndCount& v : sorted) {
+ SharedFunctionInfo info = v.info;
+ int start = v.start;
+ int end = v.end;
+ uint32_t count = v.count;
+
// Find the correct outer function based on start position.
+ //
+ // This is not robust when considering two functions with identical source
+ // ranges. In this case, it is unclear which function is the inner / outer
+ // function. Above, we ensure that such functions are sorted in ascending
+ // `count` order, so at least our `parent_is_covered` optimization below
+ // should be fine.
+ // TODO(jgruber): Consider removing the optimization.
while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
nesting.pop_back();
}
+
if (count != 0) {
switch (collectionMode) {
case v8::debug::CoverageMode::kBlockCount:
@@ -632,9 +687,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
}
// Only include a function range if itself or its parent function is
- // covered, or if it contains non-trivial block coverage. It must also
- // have a non-empty source range (otherwise it is not interesting to
- // report).
+ // covered, or if it contains non-trivial block coverage.
bool is_covered = (count != 0);
bool parent_is_covered =
(!nesting.empty() && functions->at(nesting.back()).count != 0);
@@ -642,10 +695,19 @@ std::unique_ptr<Coverage> Coverage::Collect(
bool function_is_relevant =
(is_covered || parent_is_covered || has_block_coverage);
- if (function.HasNonEmptySourceRange() && function_is_relevant) {
+ // It must also have a non-empty source range (otherwise it is not
+ // interesting to report).
+ bool has_nonempty_source_range = function.HasNonEmptySourceRange();
+
+ if (has_nonempty_source_range && function_is_relevant) {
nesting.push_back(functions->size());
functions->emplace_back(function);
}
+
+ if (FLAG_trace_block_coverage) {
+ PrintBlockCoverage(&function, info, has_nonempty_source_range,
+ function_is_relevant);
+ }
}
// Remove entries for scripts that have no coverage.
@@ -655,6 +717,13 @@ std::unique_ptr<Coverage> Coverage::Collect(
}
void Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) {
+ if (mode != isolate->code_coverage_mode()) {
+ // Changing the coverage mode can change the bytecode that would be
+ // generated for a function, which can interfere with lazy source positions,
+ // so just force source position collection whenever there's such a change.
+ isolate->CollectSourcePositionsForAllBytecodeArrays();
+ }
+
switch (mode) {
case debug::CoverageMode::kBestEffort:
// Note that DevTools switches back to best-effort coverage once the