summaryrefslogtreecommitdiff
path: root/chromium/v8/src/debug
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2020-01-20 13:40:20 +0100
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2020-01-22 12:41:23 +0000
commit7961cea6d1041e3e454dae6a1da660b453efd238 (patch)
treec0eeb4a9ff9ba32986289c1653d9608e53ccb444 /chromium/v8/src/debug
parentb7034d0803538058e5c9d904ef03cf5eab34f6ef (diff)
downloadqtwebengine-chromium-7961cea6d1041e3e454dae6a1da660b453efd238.tar.gz
BASELINE: Update Chromium to 78.0.3904.130
Change-Id: If185e0c0061b3437531c97c9c8c78f239352a68b Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/v8/src/debug')
-rw-r--r--chromium/v8/src/debug/debug-coverage.cc159
-rw-r--r--chromium/v8/src/debug/debug-evaluate.cc14
-rw-r--r--chromium/v8/src/debug/debug-frames.cc4
-rw-r--r--chromium/v8/src/debug/debug-scopes.cc9
-rw-r--r--chromium/v8/src/debug/debug-stack-trace-iterator.cc3
-rw-r--r--chromium/v8/src/debug/debug-type-profile.cc7
-rw-r--r--chromium/v8/src/debug/debug.cc45
-rw-r--r--chromium/v8/src/debug/debug.h3
8 files changed, 173 insertions, 71 deletions
diff --git a/chromium/v8/src/debug/debug-coverage.cc b/chromium/v8/src/debug/debug-coverage.cc
index 5337f98db93..cb466ab6ab7 100644
--- a/chromium/v8/src/debug/debug-coverage.cc
+++ b/chromium/v8/src/debug/debug-coverage.cc
@@ -54,13 +54,6 @@ int StartPosition(SharedFunctionInfo info) {
return start;
}
-bool CompareSharedFunctionInfo(SharedFunctionInfo a, SharedFunctionInfo b) {
- int a_start = StartPosition(a);
- int b_start = StartPosition(b);
- if (a_start == b_start) return a.EndPosition() > b.EndPosition();
- return a_start < b_start;
-}
-
bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
DCHECK_NE(kNoSourcePosition, a.start);
DCHECK_NE(kNoSourcePosition, b.start);
@@ -482,32 +475,31 @@ void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
// Reset all counters on the DebugInfo to zero.
ResetAllBlockCounts(info);
}
-} // anonymous namespace
-std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
- DCHECK(!isolate->is_best_effort_code_coverage());
- std::unique_ptr<Coverage> result =
- Collect(isolate, isolate->code_coverage_mode());
- if (!isolate->is_collecting_type_profile() &&
- (isolate->is_precise_binary_code_coverage() ||
- isolate->is_block_binary_code_coverage())) {
- // We do not have to hold onto feedback vectors for invocations we already
- // reported. So we can reset the list.
- isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
+void PrintBlockCoverage(const CoverageFunction* function,
+ SharedFunctionInfo info, bool has_nonempty_source_range,
+ bool function_is_relevant) {
+ DCHECK(FLAG_trace_block_coverage);
+ std::unique_ptr<char[]> function_name =
+ function->name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+ i::PrintF(
+ "Coverage for function='%s', SFI=%p, has_nonempty_source_range=%d, "
+ "function_is_relevant=%d\n",
+ function_name.get(), reinterpret_cast<void*>(info.ptr()),
+ has_nonempty_source_range, function_is_relevant);
+ i::PrintF("{start: %d, end: %d, count: %d}\n", function->start, function->end,
+ function->count);
+ for (const auto& block : function->blocks) {
+ i::PrintF("{start: %d, end: %d, count: %d}\n", block.start, block.end,
+ block.count);
}
- return result;
}
-std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
- return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
-}
-
-std::unique_ptr<Coverage> Coverage::Collect(
- Isolate* isolate, v8::debug::CoverageMode collectionMode) {
- SharedToCounterMap counter_map;
-
+void CollectAndMaybeResetCounts(Isolate* isolate,
+ SharedToCounterMap* counter_map,
+ v8::debug::CoverageMode coverage_mode) {
const bool reset_count =
- collectionMode != v8::debug::CoverageMode::kBestEffort;
+ coverage_mode != v8::debug::CoverageMode::kBestEffort;
switch (isolate->code_coverage_mode()) {
case v8::debug::CoverageMode::kBlockBinary:
@@ -526,7 +518,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
DCHECK(shared.IsSubjectToDebugging());
uint32_t count = static_cast<uint32_t>(vector.invocation_count());
if (reset_count) vector.clear_invocation_count();
- counter_map.Add(shared, count);
+ counter_map->Add(shared, count);
}
break;
}
@@ -534,7 +526,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
DCHECK(!isolate->factory()
->feedback_vectors_for_profiling_tools()
->IsArrayList());
- DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, collectionMode);
+ DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, coverage_mode);
HeapObjectIterator heap_iterator(isolate->heap());
for (HeapObject current_obj = heap_iterator.Next();
!current_obj.is_null(); current_obj = heap_iterator.Next()) {
@@ -543,8 +535,9 @@ std::unique_ptr<Coverage> Coverage::Collect(
SharedFunctionInfo shared = func.shared();
if (!shared.IsSubjectToDebugging()) continue;
if (!(func.has_feedback_vector() ||
- func.has_closure_feedback_cell_array()))
+ func.has_closure_feedback_cell_array())) {
continue;
+ }
uint32_t count = 0;
if (func.has_feedback_vector()) {
count =
@@ -555,7 +548,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
// atleast once. We don't have precise invocation count here.
count = 1;
}
- counter_map.Add(shared, count);
+ counter_map->Add(shared, count);
}
// Also check functions on the stack to collect the count map. With lazy
@@ -564,12 +557,64 @@ std::unique_ptr<Coverage> Coverage::Collect(
// updated (i.e. it didn't execute return / jump).
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
SharedFunctionInfo shared = it.frame()->function().shared();
- if (counter_map.Get(shared) != 0) continue;
- counter_map.Add(shared, 1);
+ if (counter_map->Get(shared) != 0) continue;
+ counter_map->Add(shared, 1);
}
break;
}
}
+}
+
+// A {SFI, count} tuple is used to sort by source range (stored on
+// the SFI) and call count (in the counter map).
+struct SharedFunctionInfoAndCount {
+ SharedFunctionInfoAndCount(SharedFunctionInfo info, uint32_t count)
+ : info(info),
+ count(count),
+ start(StartPosition(info)),
+ end(info.EndPosition()) {}
+
+ // Sort by:
+ // - start, ascending.
+ // - end, descending.
+ // - count, ascending.
+ bool operator<(const SharedFunctionInfoAndCount& that) const {
+ if (this->start != that.start) return this->start < that.start;
+ if (this->end != that.end) return this->end > that.end;
+ return this->count < that.count;
+ }
+
+ SharedFunctionInfo info;
+ uint32_t count;
+ int start;
+ int end;
+};
+
+} // anonymous namespace
+
+std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
+ DCHECK(!isolate->is_best_effort_code_coverage());
+ std::unique_ptr<Coverage> result =
+ Collect(isolate, isolate->code_coverage_mode());
+ if (!isolate->is_collecting_type_profile() &&
+ (isolate->is_precise_binary_code_coverage() ||
+ isolate->is_block_binary_code_coverage())) {
+ // We do not have to hold onto feedback vectors for invocations we already
+ // reported. So we can reset the list.
+ isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
+ }
+ return result;
+}
+
+std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
+ return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
+}
+
+std::unique_ptr<Coverage> Coverage::Collect(
+ Isolate* isolate, v8::debug::CoverageMode collectionMode) {
+ // Collect call counts for all functions.
+ SharedToCounterMap counter_map;
+ CollectAndMaybeResetCounts(isolate, &counter_map, collectionMode);
// Iterate shared function infos of every script and build a mapping
// between source ranges and invocation counts.
@@ -584,30 +629,40 @@ std::unique_ptr<Coverage> Coverage::Collect(
result->emplace_back(script_handle);
std::vector<CoverageFunction>* functions = &result->back().functions;
- std::vector<SharedFunctionInfo> sorted;
+ std::vector<SharedFunctionInfoAndCount> sorted;
{
// Sort functions by start position, from outer to inner functions.
SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle);
for (SharedFunctionInfo info = infos.Next(); !info.is_null();
info = infos.Next()) {
- sorted.push_back(info);
+ sorted.emplace_back(info, counter_map.Get(info));
}
- std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo);
+ std::sort(sorted.begin(), sorted.end());
}
// Stack to track nested functions, referring function by index.
std::vector<size_t> nesting;
// Use sorted list to reconstruct function nesting.
- for (SharedFunctionInfo info : sorted) {
- int start = StartPosition(info);
- int end = info.EndPosition();
- uint32_t count = counter_map.Get(info);
+ for (const SharedFunctionInfoAndCount& v : sorted) {
+ SharedFunctionInfo info = v.info;
+ int start = v.start;
+ int end = v.end;
+ uint32_t count = v.count;
+
// Find the correct outer function based on start position.
+ //
+ // This is not robust when considering two functions with identical source
+ // ranges. In this case, it is unclear which function is the inner / outer
+ // function. Above, we ensure that such functions are sorted in ascending
+ // `count` order, so at least our `parent_is_covered` optimization below
+ // should be fine.
+ // TODO(jgruber): Consider removing the optimization.
while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
nesting.pop_back();
}
+
if (count != 0) {
switch (collectionMode) {
case v8::debug::CoverageMode::kBlockCount:
@@ -632,9 +687,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
}
// Only include a function range if itself or its parent function is
- // covered, or if it contains non-trivial block coverage. It must also
- // have a non-empty source range (otherwise it is not interesting to
- // report).
+ // covered, or if it contains non-trivial block coverage.
bool is_covered = (count != 0);
bool parent_is_covered =
(!nesting.empty() && functions->at(nesting.back()).count != 0);
@@ -642,10 +695,19 @@ std::unique_ptr<Coverage> Coverage::Collect(
bool function_is_relevant =
(is_covered || parent_is_covered || has_block_coverage);
- if (function.HasNonEmptySourceRange() && function_is_relevant) {
+ // It must also have a non-empty source range (otherwise it is not
+ // interesting to report).
+ bool has_nonempty_source_range = function.HasNonEmptySourceRange();
+
+ if (has_nonempty_source_range && function_is_relevant) {
nesting.push_back(functions->size());
functions->emplace_back(function);
}
+
+ if (FLAG_trace_block_coverage) {
+ PrintBlockCoverage(&function, info, has_nonempty_source_range,
+ function_is_relevant);
+ }
}
// Remove entries for scripts that have no coverage.
@@ -655,6 +717,13 @@ std::unique_ptr<Coverage> Coverage::Collect(
}
void Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) {
+ if (mode != isolate->code_coverage_mode()) {
+ // Changing the coverage mode can change the bytecode that would be
+ // generated for a function, which can interfere with lazy source positions,
+ // so just force source position collection whenever there's such a change.
+ isolate->CollectSourcePositionsForAllBytecodeArrays();
+ }
+
switch (mode) {
case debug::CoverageMode::kBestEffort:
// Note that DevTools switches back to best-effort coverage once the
diff --git a/chromium/v8/src/debug/debug-evaluate.cc b/chromium/v8/src/debug/debug-evaluate.cc
index 0d8a7b2c7e6..203885143fa 100644
--- a/chromium/v8/src/debug/debug-evaluate.cc
+++ b/chromium/v8/src/debug/debug-evaluate.cc
@@ -101,11 +101,14 @@ MaybeHandle<Object> DebugEvaluate::WithTopmostArguments(Isolate* isolate,
.Check();
// Materialize receiver.
- Handle<String> this_str = factory->this_string();
- JSObject::SetOwnPropertyIgnoreAttributes(
- materialized, this_str, Handle<Object>(it.frame()->receiver(), isolate),
- NONE)
- .Check();
+ Handle<Object> this_value(it.frame()->receiver(), isolate);
+ DCHECK_EQ(it.frame()->IsConstructor(), this_value->IsTheHole(isolate));
+ if (!this_value->IsTheHole(isolate)) {
+ Handle<String> this_str = factory->this_string();
+ JSObject::SetOwnPropertyIgnoreAttributes(materialized, this_str, this_value,
+ NONE)
+ .Check();
+ }
// Use extension object in a debug-evaluate scope.
Handle<ScopeInfo> scope_info =
@@ -383,6 +386,7 @@ bool BytecodeHasNoSideEffect(interpreter::Bytecode bytecode) {
case Bytecode::kLdaKeyedProperty:
case Bytecode::kLdaGlobalInsideTypeof:
case Bytecode::kLdaLookupSlotInsideTypeof:
+ case Bytecode::kGetIterator:
// Arithmetics.
case Bytecode::kAdd:
case Bytecode::kAddSmi:
diff --git a/chromium/v8/src/debug/debug-frames.cc b/chromium/v8/src/debug/debug-frames.cc
index 4fe062b277a..78c4c323fcd 100644
--- a/chromium/v8/src/debug/debug-frames.cc
+++ b/chromium/v8/src/debug/debug-frames.cc
@@ -93,10 +93,8 @@ bool FrameInspector::ParameterIsShadowedByContextLocal(
VariableMode mode;
InitializationFlag init_flag;
MaybeAssignedFlag maybe_assigned_flag;
- RequiresBrandCheckFlag requires_brand_check;
return ScopeInfo::ContextSlotIndex(*info, *parameter_name, &mode, &init_flag,
- &maybe_assigned_flag,
- &requires_brand_check) != -1;
+ &maybe_assigned_flag) != -1;
}
RedirectActiveFunctions::RedirectActiveFunctions(SharedFunctionInfo shared,
diff --git a/chromium/v8/src/debug/debug-scopes.cc b/chromium/v8/src/debug/debug-scopes.cc
index 1091e3a8196..4569780d001 100644
--- a/chromium/v8/src/debug/debug-scopes.cc
+++ b/chromium/v8/src/debug/debug-scopes.cc
@@ -774,7 +774,7 @@ void ScopeIterator::VisitLocalScope(const Visitor& visitor, Mode mode) const {
DCHECK(!context_->IsScriptContext());
DCHECK(!context_->IsNativeContext());
DCHECK(!context_->IsWithContext());
- if (!context_->scope_info().CallsSloppyEval()) return;
+ if (!context_->scope_info().SloppyEvalCanExtendVars()) return;
if (context_->extension_object().is_null()) return;
Handle<JSObject> extension(context_->extension_object(), isolate_);
Handle<FixedArray> keys =
@@ -884,10 +884,9 @@ bool ScopeIterator::SetContextVariableValue(Handle<String> variable_name,
VariableMode mode;
InitializationFlag flag;
MaybeAssignedFlag maybe_assigned_flag;
- RequiresBrandCheckFlag requires_brand_check;
- int slot_index = ScopeInfo::ContextSlotIndex(
- context_->scope_info(), *variable_name, &mode, &flag,
- &maybe_assigned_flag, &requires_brand_check);
+ int slot_index =
+ ScopeInfo::ContextSlotIndex(context_->scope_info(), *variable_name, &mode,
+ &flag, &maybe_assigned_flag);
if (slot_index < 0) return false;
context_->set(slot_index, *new_value);
diff --git a/chromium/v8/src/debug/debug-stack-trace-iterator.cc b/chromium/v8/src/debug/debug-stack-trace-iterator.cc
index a0c6fa967c8..4f691e63a22 100644
--- a/chromium/v8/src/debug/debug-stack-trace-iterator.cc
+++ b/chromium/v8/src/debug/debug-stack-trace-iterator.cc
@@ -98,10 +98,9 @@ v8::MaybeLocal<v8::Value> DebugStackTraceIterator::GetReceiver() const {
VariableMode mode;
InitializationFlag flag;
MaybeAssignedFlag maybe_assigned_flag;
- RequiresBrandCheckFlag requires_brand_check;
int slot_index = ScopeInfo::ContextSlotIndex(
context->scope_info(), ReadOnlyRoots(isolate_->heap()).this_string(),
- &mode, &flag, &maybe_assigned_flag, &requires_brand_check);
+ &mode, &flag, &maybe_assigned_flag);
if (slot_index < 0) return v8::MaybeLocal<v8::Value>();
Handle<Object> value = handle(context->get(slot_index), isolate_);
if (value->IsTheHole(isolate_)) return v8::MaybeLocal<v8::Value>();
diff --git a/chromium/v8/src/debug/debug-type-profile.cc b/chromium/v8/src/debug/debug-type-profile.cc
index 5ed2dfb116f..c0ba96c2484 100644
--- a/chromium/v8/src/debug/debug-type-profile.cc
+++ b/chromium/v8/src/debug/debug-type-profile.cc
@@ -71,6 +71,13 @@ std::unique_ptr<TypeProfile> TypeProfile::Collect(Isolate* isolate) {
}
void TypeProfile::SelectMode(Isolate* isolate, debug::TypeProfileMode mode) {
+ if (mode != isolate->type_profile_mode()) {
+ // Changing the type profile mode can change the bytecode that would be
+ // generated for a function, which can interfere with lazy source positions,
+ // so just force source position collection whenever there's such a change.
+ isolate->CollectSourcePositionsForAllBytecodeArrays();
+ }
+
HandleScope handle_scope(isolate);
if (mode == debug::TypeProfileMode::kNone) {
diff --git a/chromium/v8/src/debug/debug.cc b/chromium/v8/src/debug/debug.cc
index 9b5200e3430..aa308150acb 100644
--- a/chromium/v8/src/debug/debug.cc
+++ b/chromium/v8/src/debug/debug.cc
@@ -1224,8 +1224,12 @@ void Debug::InstallDebugBreakTrampoline() {
Handle<Code> trampoline = BUILTIN_CODE(isolate_, DebugBreakTrampoline);
std::vector<Handle<JSFunction>> needs_compile;
- std::vector<Handle<AccessorPair>> needs_instantiate;
+ using AccessorPairWithContext =
+ std::pair<Handle<AccessorPair>, Handle<NativeContext>>;
+ std::vector<AccessorPairWithContext> needs_instantiate;
{
+ // Deduplicate {needs_instantiate} by recording all collected AccessorPairs.
+ std::set<AccessorPair> recorded;
HeapObjectIterator iterator(isolate_->heap());
for (HeapObject obj = iterator.Next(); !obj.is_null();
obj = iterator.Next()) {
@@ -1242,11 +1246,26 @@ void Debug::InstallDebugBreakTrampoline() {
} else {
fun.set_code(*trampoline);
}
- } else if (obj.IsAccessorPair()) {
- AccessorPair accessor_pair = AccessorPair::cast(obj);
- if (accessor_pair.getter().IsFunctionTemplateInfo() ||
- accessor_pair.setter().IsFunctionTemplateInfo()) {
- needs_instantiate.push_back(handle(accessor_pair, isolate_));
+ } else if (obj.IsJSObject()) {
+ JSObject object = JSObject::cast(obj);
+ DescriptorArray descriptors = object.map().instance_descriptors();
+
+ for (int i = 0; i < object.map().NumberOfOwnDescriptors(); ++i) {
+ if (descriptors.GetDetails(i).kind() == PropertyKind::kAccessor) {
+ Object value = descriptors.GetStrongValue(i);
+ if (!value.IsAccessorPair()) continue;
+
+ AccessorPair accessor_pair = AccessorPair::cast(value);
+ if (!accessor_pair.getter().IsFunctionTemplateInfo() &&
+ !accessor_pair.setter().IsFunctionTemplateInfo()) {
+ continue;
+ }
+ if (recorded.find(accessor_pair) != recorded.end()) continue;
+
+ needs_instantiate.emplace_back(handle(accessor_pair, isolate_),
+ object.GetCreationContext());
+ recorded.insert(accessor_pair);
+ }
}
}
}
@@ -1254,10 +1273,13 @@ void Debug::InstallDebugBreakTrampoline() {
// Forcibly instantiate all lazy accessor pairs to make sure that they
// properly hit the debug break trampoline.
- for (Handle<AccessorPair> accessor_pair : needs_instantiate) {
+ for (AccessorPairWithContext tuple : needs_instantiate) {
+ Handle<AccessorPair> accessor_pair = tuple.first;
+ Handle<NativeContext> native_context = tuple.second;
if (accessor_pair->getter().IsFunctionTemplateInfo()) {
Handle<JSFunction> fun =
ApiNatives::InstantiateFunction(
+ isolate_, native_context,
handle(FunctionTemplateInfo::cast(accessor_pair->getter()),
isolate_))
.ToHandleChecked();
@@ -1266,6 +1288,7 @@ void Debug::InstallDebugBreakTrampoline() {
if (accessor_pair->setter().IsFunctionTemplateInfo()) {
Handle<JSFunction> fun =
ApiNatives::InstantiateFunction(
+ isolate_, native_context,
handle(FunctionTemplateInfo::cast(accessor_pair->setter()),
isolate_))
.ToHandleChecked();
@@ -1734,9 +1757,6 @@ bool Debug::IsFrameBlackboxed(JavaScriptFrame* frame) {
void Debug::OnException(Handle<Object> exception, Handle<Object> promise,
v8::debug::ExceptionType exception_type) {
- // TODO(kozyatinskiy): regress-662674.js test fails on arm without this.
- if (!AllowJavascriptExecution::IsAllowed(isolate_)) return;
-
Isolate::CatchType catch_type = isolate_->PredictExceptionCatcher();
// Don't notify listener of exceptions that are internal to a desugaring.
@@ -1775,6 +1795,11 @@ void Debug::OnException(Handle<Object> exception, Handle<Object> promise,
if (it.done()) return; // Do not trigger an event with an empty stack.
}
+ // Do not trigger exception event on stack overflow. We cannot perform
+ // anything useful for debugging in that situation.
+ StackLimitCheck stack_limit_check(isolate_);
+ if (stack_limit_check.JsHasOverflowed()) return;
+
DebugScope debug_scope(this);
HandleScope scope(isolate_);
DisableBreak no_recursive_break(this);
diff --git a/chromium/v8/src/debug/debug.h b/chromium/v8/src/debug/debug.h
index 684397400ac..eef89f93725 100644
--- a/chromium/v8/src/debug/debug.h
+++ b/chromium/v8/src/debug/debug.h
@@ -375,6 +375,8 @@ class V8_EXPORT_PRIVATE Debug {
return thread_local_.break_on_next_function_call_;
}
+ inline bool break_disabled() const { return break_disabled_; }
+
DebugFeatureTracker* feature_tracker() { return &feature_tracker_; }
// For functions in which we cannot set a break point, use a canonical
@@ -399,7 +401,6 @@ class V8_EXPORT_PRIVATE Debug {
return is_suppressed_ || !is_active_ ||
isolate_->debug_execution_mode() == DebugInfo::kSideEffects;
}
- inline bool break_disabled() const { return break_disabled_; }
void clear_suspended_generator() {
thread_local_.suspended_generator_ = Smi::kZero;