summaryrefslogtreecommitdiff
path: root/chromium/v8/src/profiler
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/v8/src/profiler')
-rw-r--r--chromium/v8/src/profiler/cpu-profiler.cc21
-rw-r--r--chromium/v8/src/profiler/cpu-profiler.h20
-rw-r--r--chromium/v8/src/profiler/heap-profiler.cc5
-rw-r--r--chromium/v8/src/profiler/heap-profiler.h3
-rw-r--r--chromium/v8/src/profiler/heap-snapshot-generator.cc148
-rw-r--r--chromium/v8/src/profiler/heap-snapshot-generator.h31
-rw-r--r--chromium/v8/src/profiler/profile-generator.cc33
-rw-r--r--chromium/v8/src/profiler/profile-generator.h33
-rw-r--r--chromium/v8/src/profiler/profiler-listener.cc7
-rw-r--r--chromium/v8/src/profiler/profiler-listener.h1
-rw-r--r--chromium/v8/src/profiler/sampling-heap-profiler.cc2
-rw-r--r--chromium/v8/src/profiler/tick-sample.cc20
-rw-r--r--chromium/v8/src/profiler/tick-sample.h1
13 files changed, 270 insertions, 55 deletions
diff --git a/chromium/v8/src/profiler/cpu-profiler.cc b/chromium/v8/src/profiler/cpu-profiler.cc
index eba513b39d5..ba9022be1b0 100644
--- a/chromium/v8/src/profiler/cpu-profiler.cc
+++ b/chromium/v8/src/profiler/cpu-profiler.cc
@@ -104,10 +104,11 @@ ProfilingScope::~ProfilingScope() {
ProfilerEventsProcessor::ProfilerEventsProcessor(
Isolate* isolate, Symbolizer* symbolizer,
- ProfilerCodeObserver* code_observer)
+ ProfilerCodeObserver* code_observer, CpuProfilesCollection* profiles)
: Thread(Thread::Options("v8:ProfEvntProc", kProfilerStackSize)),
symbolizer_(symbolizer),
code_observer_(code_observer),
+ profiles_(profiles),
last_code_event_id_(0),
last_processed_code_event_id_(0),
isolate_(isolate) {
@@ -119,9 +120,8 @@ SamplingEventsProcessor::SamplingEventsProcessor(
Isolate* isolate, Symbolizer* symbolizer,
ProfilerCodeObserver* code_observer, CpuProfilesCollection* profiles,
base::TimeDelta period, bool use_precise_sampling)
- : ProfilerEventsProcessor(isolate, symbolizer, code_observer),
+ : ProfilerEventsProcessor(isolate, symbolizer, code_observer, profiles),
sampler_(new CpuSampler(isolate, this)),
- profiles_(profiles),
period_(period),
use_precise_sampling_(use_precise_sampling) {
sampler_->Start();
@@ -188,7 +188,14 @@ void ProfilerEventsProcessor::StopSynchronously() {
bool ProfilerEventsProcessor::ProcessCodeEvent() {
CodeEventsContainer record;
if (events_buffer_.Dequeue(&record)) {
- code_observer_->CodeEventHandlerInternal(record);
+ if (record.generic.type == CodeEventRecord::NATIVE_CONTEXT_MOVE) {
+ NativeContextMoveEventRecord& nc_record =
+ record.NativeContextMoveEventRecord_;
+ profiles_->UpdateNativeContextAddressForCurrentProfiles(
+ nc_record.from_address, nc_record.to_address);
+ } else {
+ code_observer_->CodeEventHandlerInternal(record);
+ }
last_processed_code_event_id_ = record.generic.order;
return true;
}
@@ -202,6 +209,7 @@ void ProfilerEventsProcessor::CodeEventHandler(
case CodeEventRecord::CODE_MOVE:
case CodeEventRecord::CODE_DISABLE_OPT:
case CodeEventRecord::CODE_DELETE:
+ case CodeEventRecord::NATIVE_CONTEXT_MOVE:
Enqueue(evt_rec);
break;
case CodeEventRecord::CODE_DEOPT: {
@@ -224,7 +232,8 @@ void SamplingEventsProcessor::SymbolizeAndAddToProfiles(
symbolizer_->SymbolizeTickSample(record->sample);
profiles_->AddPathToCurrentProfiles(
record->sample.timestamp, symbolized.stack_trace, symbolized.src_line,
- record->sample.update_stats, record->sample.sampling_interval);
+ record->sample.update_stats, record->sample.sampling_interval,
+ reinterpret_cast<Address>(record->sample.context));
}
ProfilerEventsProcessor::SampleProcessingResult
@@ -371,6 +380,7 @@ void ProfilerCodeObserver::CodeEventHandlerInternal(
}
void ProfilerCodeObserver::CreateEntriesForRuntimeCallStats() {
+#ifdef V8_RUNTIME_CALL_STATS
RuntimeCallStats* rcs = isolate_->counters()->runtime_call_stats();
for (int i = 0; i < RuntimeCallStats::kNumberOfCounters; ++i) {
RuntimeCallCounter* counter = rcs->GetCounter(i);
@@ -379,6 +389,7 @@ void ProfilerCodeObserver::CreateEntriesForRuntimeCallStats() {
"native V8Runtime");
code_map_.AddCode(reinterpret_cast<Address>(counter), entry, 1);
}
+#endif // V8_RUNTIME_CALL_STATS
}
void ProfilerCodeObserver::LogBuiltins() {
diff --git a/chromium/v8/src/profiler/cpu-profiler.h b/chromium/v8/src/profiler/cpu-profiler.h
index d605a8c3d3d..ced37e4ade5 100644
--- a/chromium/v8/src/profiler/cpu-profiler.h
+++ b/chromium/v8/src/profiler/cpu-profiler.h
@@ -37,10 +37,14 @@ class Symbolizer;
V(REPORT_BUILTIN, ReportBuiltinEventRecord) \
V(CODE_DELETE, CodeDeleteEventRecord)
+#define VM_EVENTS_TYPE_LIST(V) \
+ CODE_EVENTS_TYPE_LIST(V) \
+ V(NATIVE_CONTEXT_MOVE, NativeContextMoveEventRecord)
+
class CodeEventRecord {
public:
#define DECLARE_TYPE(type, ignore) type,
- enum Type { NONE = 0, CODE_EVENTS_TYPE_LIST(DECLARE_TYPE) };
+ enum Type { NONE = 0, VM_EVENTS_TYPE_LIST(DECLARE_TYPE) };
#undef DECLARE_TYPE
Type type;
@@ -99,6 +103,13 @@ class ReportBuiltinEventRecord : public CodeEventRecord {
V8_INLINE void UpdateCodeMap(CodeMap* code_map);
};
+// Signals that a native context's address has changed.
+class NativeContextMoveEventRecord : public CodeEventRecord {
+ public:
+ Address from_address;
+ Address to_address;
+};
+
// A record type for sending samples from the main thread/signal handler to the
// profiling thread.
class TickSampleEventRecord {
@@ -130,7 +141,7 @@ class CodeEventsContainer {
union {
CodeEventRecord generic;
#define DECLARE_CLASS(ignore, type) type type##_;
- CODE_EVENTS_TYPE_LIST(DECLARE_CLASS)
+ VM_EVENTS_TYPE_LIST(DECLARE_CLASS)
#undef DECLARE_CLASS
};
};
@@ -174,7 +185,8 @@ class V8_EXPORT_PRIVATE ProfilerEventsProcessor : public base::Thread,
protected:
ProfilerEventsProcessor(Isolate* isolate, Symbolizer* symbolizer,
- ProfilerCodeObserver* code_observer);
+ ProfilerCodeObserver* code_observer,
+ CpuProfilesCollection* profiles);
// Called from events processing thread (Run() method.)
bool ProcessCodeEvent();
@@ -188,6 +200,7 @@ class V8_EXPORT_PRIVATE ProfilerEventsProcessor : public base::Thread,
Symbolizer* symbolizer_;
ProfilerCodeObserver* code_observer_;
+ CpuProfilesCollection* profiles_;
std::atomic_bool running_{true};
base::ConditionVariable running_cond_;
base::Mutex running_mutex_;
@@ -238,7 +251,6 @@ class V8_EXPORT_PRIVATE SamplingEventsProcessor
SamplingCircularQueue<TickSampleEventRecord,
kTickSampleQueueLength> ticks_buffer_;
std::unique_ptr<sampler::Sampler> sampler_;
- CpuProfilesCollection* profiles_;
base::TimeDelta period_; // Samples & code events processing period.
const bool use_precise_sampling_; // Whether or not busy-waiting is used for
// low sampling intervals on Windows.
diff --git a/chromium/v8/src/profiler/heap-profiler.cc b/chromium/v8/src/profiler/heap-profiler.cc
index 8a7ed34d46e..dbe48876d2e 100644
--- a/chromium/v8/src/profiler/heap-profiler.cc
+++ b/chromium/v8/src/profiler/heap-profiler.cc
@@ -81,9 +81,10 @@ v8::EmbedderGraph::Node::Detachedness HeapProfiler::GetDetachedness(
HeapSnapshot* HeapProfiler::TakeSnapshot(
v8::ActivityControl* control,
v8::HeapProfiler::ObjectNameResolver* resolver,
- bool treat_global_objects_as_roots) {
+ bool treat_global_objects_as_roots, bool capture_numeric_value) {
is_taking_snapshot_ = true;
- HeapSnapshot* result = new HeapSnapshot(this, treat_global_objects_as_roots);
+ HeapSnapshot* result = new HeapSnapshot(this, treat_global_objects_as_roots,
+ capture_numeric_value);
{
HeapSnapshotGenerator generator(result, control, resolver, heap());
if (!generator.GenerateSnapshot()) {
diff --git a/chromium/v8/src/profiler/heap-profiler.h b/chromium/v8/src/profiler/heap-profiler.h
index ebf737523cc..fc867e66324 100644
--- a/chromium/v8/src/profiler/heap-profiler.h
+++ b/chromium/v8/src/profiler/heap-profiler.h
@@ -33,7 +33,8 @@ class HeapProfiler : public HeapObjectAllocationTracker {
HeapSnapshot* TakeSnapshot(v8::ActivityControl* control,
v8::HeapProfiler::ObjectNameResolver* resolver,
- bool treat_global_objects_as_roots);
+ bool treat_global_objects_as_roots,
+ bool capture_numeric_value);
bool StartSamplingHeapProfiler(uint64_t sample_interval, int stack_depth,
v8::HeapProfiler::SamplingFlags);
diff --git a/chromium/v8/src/profiler/heap-snapshot-generator.cc b/chromium/v8/src/profiler/heap-snapshot-generator.cc
index 9cc26fa3e20..da4e57fad9a 100644
--- a/chromium/v8/src/profiler/heap-snapshot-generator.cc
+++ b/chromium/v8/src/profiler/heap-snapshot-generator.cc
@@ -183,9 +183,11 @@ const char* HeapEntry::TypeAsString() const {
}
}
-HeapSnapshot::HeapSnapshot(HeapProfiler* profiler, bool global_objects_as_roots)
+HeapSnapshot::HeapSnapshot(HeapProfiler* profiler, bool global_objects_as_roots,
+ bool capture_numeric_value)
: profiler_(profiler),
- treat_global_objects_as_roots_(global_objects_as_roots) {
+ treat_global_objects_as_roots_(global_objects_as_roots),
+ capture_numeric_value_(capture_numeric_value) {
// It is very important to keep objects that form a heap snapshot
// as small as possible. Check assumptions about data structure sizes.
STATIC_ASSERT(kSystemPointerSize != 4 || sizeof(HeapGraphEdge) == 12);
@@ -387,8 +389,7 @@ SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
return entry_info.id;
}
entry->value = reinterpret_cast<void*>(entries_.size());
- SnapshotObjectId id = next_id_;
- next_id_ += kObjectIdStep;
+ SnapshotObjectId id = get_next_id();
entries_.push_back(EntryInfo(id, addr, size, accessed));
DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
return id;
@@ -553,6 +554,16 @@ HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
return AddEntry(HeapObject::cast(Object(reinterpret_cast<Address>(ptr))));
}
+HeapEntry* V8HeapExplorer::AllocateEntry(Smi smi) {
+ SnapshotObjectId id = heap_object_map_->get_next_id();
+ HeapEntry* entry =
+ snapshot_->AddEntry(HeapEntry::kHeapNumber, "smi number", id, 0, 0);
+ // XXX: Smis do not appear in CombinedHeapObjectIterator, so we need to
+ // extract the references here
+ ExtractNumberReference(entry, smi);
+ return entry;
+}
+
void V8HeapExplorer::ExtractLocation(HeapEntry* entry, HeapObject object) {
if (object.IsJSFunction()) {
JSFunction func = JSFunction::cast(object);
@@ -638,7 +649,7 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject object) {
object.IsByteArray()) {
return AddEntry(object, HeapEntry::kArray, "");
} else if (object.IsHeapNumber()) {
- return AddEntry(object, HeapEntry::kHeapNumber, "number");
+ return AddEntry(object, HeapEntry::kHeapNumber, "heap number");
}
return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
}
@@ -721,6 +732,13 @@ class IndexedReferencesExtractor : public ObjectVisitor {
ObjectSlot end) override {
VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
}
+ void VisitMapPointer(HeapObject object) override {
+ if (generator_->visited_fields_[0]) {
+ generator_->visited_fields_[0] = false;
+ } else {
+ VisitHeapObjectImpl(object.map(), 0);
+ }
+ }
void VisitPointers(HeapObject host, MaybeObjectSlot start,
MaybeObjectSlot end) override {
// [start,end) must be a sub-region of [parent_start_, parent_end), i.e.
@@ -830,6 +848,10 @@ void V8HeapExplorer::ExtractReferences(HeapEntry* entry, HeapObject obj) {
ExtractEphemeronHashTableReferences(entry, EphemeronHashTable::cast(obj));
} else if (obj.IsFixedArray()) {
ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
+ } else if (obj.IsHeapNumber()) {
+ if (snapshot_->capture_numeric_value()) {
+ ExtractNumberReference(entry, obj);
+ }
}
}
@@ -867,7 +889,7 @@ void V8HeapExplorer::ExtractJSObjectReferences(HeapEntry* entry,
} else if (obj.IsJSFunction()) {
JSFunction js_fun = JSFunction::cast(js_obj);
if (js_fun.has_prototype_slot()) {
- Object proto_or_map = js_fun.prototype_or_initial_map();
+ Object proto_or_map = js_fun.prototype_or_initial_map(kAcquireLoad);
if (!proto_or_map.IsTheHole(isolate)) {
if (!proto_or_map.IsMap()) {
SetPropertyReference(entry, roots.prototype_string(), proto_or_map,
@@ -1246,6 +1268,11 @@ class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
HeapEntry::kNative, "system / JSArrayBufferData",
size_);
}
+ HeapEntry* AllocateEntry(Smi smi) override {
+ DCHECK(false);
+ return nullptr;
+ }
+
private:
size_t size_;
V8HeapExplorer* explorer_;
@@ -1291,6 +1318,30 @@ void V8HeapExplorer::ExtractFixedArrayReferences(HeapEntry* entry,
}
}
+void V8HeapExplorer::ExtractNumberReference(HeapEntry* entry, Object number) {
+ DCHECK(number.IsNumber());
+
+ // Must be large enough to fit any double, int, or size_t.
+ char arr[32];
+ Vector<char> buffer(arr, arraysize(arr));
+
+ const char* string;
+ if (number.IsSmi()) {
+ int int_value = Smi::ToInt(number);
+ string = IntToCString(int_value, buffer);
+ } else {
+ double double_value = HeapNumber::cast(number).value();
+ string = DoubleToCString(double_value, buffer);
+ }
+
+ const char* name = names_->GetCopy(string);
+
+ SnapshotObjectId id = heap_object_map_->get_next_id();
+ HeapEntry* child_entry =
+ snapshot_->AddEntry(HeapEntry::kString, name, id, 0, 0);
+ entry->SetNamedReference(HeapGraphEdge::kInternal, "value", child_entry);
+}
+
void V8HeapExplorer::ExtractFeedbackVectorReferences(
HeapEntry* entry, FeedbackVector feedback_vector) {
MaybeObject code = feedback_vector.maybe_optimized_code();
@@ -1345,8 +1396,10 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject js_obj,
PropertyDetails details = descs.GetDetails(i);
switch (details.location()) {
case kField: {
- Representation r = details.representation();
- if (r.IsSmi() || r.IsDouble()) break;
+ if (!snapshot_->capture_numeric_value()) {
+ Representation r = details.representation();
+ if (r.IsSmi() || r.IsDouble()) break;
+ }
Name k = descs.GetKey(i);
FieldIndex field_index = FieldIndex::ForDescriptor(js_obj.map(), i);
@@ -1476,9 +1529,15 @@ String V8HeapExplorer::GetConstructorName(JSObject object) {
}
HeapEntry* V8HeapExplorer::GetEntry(Object obj) {
- return obj.IsHeapObject() ? generator_->FindOrAddEntry(
- reinterpret_cast<void*>(obj.ptr()), this)
- : nullptr;
+ if (obj.IsHeapObject()) {
+ return generator_->FindOrAddEntry(reinterpret_cast<void*>(obj.ptr()), this);
+ }
+
+ DCHECK(obj.IsSmi());
+ if (!snapshot_->capture_numeric_value()) {
+ return nullptr;
+ }
+ return generator_->FindOrAddEntry(Smi::cast(obj), this);
}
class RootsReferencesExtractor : public RootVisitor {
@@ -1500,6 +1559,7 @@ class RootsReferencesExtractor : public RootVisitor {
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
for (FullObjectSlot p = start; p < end; ++p) {
+ DCHECK(!MapWord::IsPacked(p.Relaxed_Load().ptr()));
VisitRootPointer(root, description, p);
}
}
@@ -1649,23 +1709,25 @@ void V8HeapExplorer::SetElementReference(HeapEntry* parent_entry, int index,
void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry,
const char* reference_name,
Object child_obj, int field_offset) {
- HeapEntry* child_entry = GetEntry(child_obj);
- if (child_entry == nullptr) return;
- if (IsEssentialObject(child_obj)) {
- parent_entry->SetNamedReference(HeapGraphEdge::kInternal, reference_name,
- child_entry);
+ if (!IsEssentialObject(child_obj)) {
+ return;
}
+ HeapEntry* child_entry = GetEntry(child_obj);
+ DCHECK_NOT_NULL(child_entry);
+ parent_entry->SetNamedReference(HeapGraphEdge::kInternal, reference_name,
+ child_entry);
MarkVisitedField(field_offset);
}
void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry, int index,
Object child_obj, int field_offset) {
- HeapEntry* child_entry = GetEntry(child_obj);
- if (child_entry == nullptr) return;
- if (IsEssentialObject(child_obj)) {
- parent_entry->SetNamedReference(HeapGraphEdge::kInternal,
- names_->GetName(index), child_entry);
+ if (!IsEssentialObject(child_obj)) {
+ return;
}
+ HeapEntry* child_entry = GetEntry(child_obj);
+ DCHECK_NOT_NULL(child_entry);
+ parent_entry->SetNamedReference(HeapGraphEdge::kInternal,
+ names_->GetName(index), child_entry);
MarkVisitedField(field_offset);
}
@@ -1673,9 +1735,13 @@ void V8HeapExplorer::SetHiddenReference(HeapObject parent_obj,
HeapEntry* parent_entry, int index,
Object child_obj, int field_offset) {
DCHECK_EQ(parent_entry, GetEntry(parent_obj));
+ DCHECK(!MapWord::IsPacked(child_obj.ptr()));
+ if (!IsEssentialObject(child_obj)) {
+ return;
+ }
HeapEntry* child_entry = GetEntry(child_obj);
- if (child_entry != nullptr && IsEssentialObject(child_obj) &&
- IsEssentialHiddenReference(parent_obj, field_offset)) {
+ DCHECK_NOT_NULL(child_entry);
+ if (IsEssentialHiddenReference(parent_obj, field_offset)) {
parent_entry->SetIndexedReference(HeapGraphEdge::kHidden, index,
child_entry);
}
@@ -1684,23 +1750,25 @@ void V8HeapExplorer::SetHiddenReference(HeapObject parent_obj,
void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry,
const char* reference_name,
Object child_obj, int field_offset) {
- HeapEntry* child_entry = GetEntry(child_obj);
- if (child_entry == nullptr) return;
- if (IsEssentialObject(child_obj)) {
- parent_entry->SetNamedReference(HeapGraphEdge::kWeak, reference_name,
- child_entry);
+ if (!IsEssentialObject(child_obj)) {
+ return;
}
+ HeapEntry* child_entry = GetEntry(child_obj);
+ DCHECK_NOT_NULL(child_entry);
+ parent_entry->SetNamedReference(HeapGraphEdge::kWeak, reference_name,
+ child_entry);
MarkVisitedField(field_offset);
}
void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry, int index,
Object child_obj, int field_offset) {
- HeapEntry* child_entry = GetEntry(child_obj);
- if (child_entry == nullptr) return;
- if (IsEssentialObject(child_obj)) {
- parent_entry->SetNamedReference(
- HeapGraphEdge::kWeak, names_->GetFormatted("%d", index), child_entry);
+ if (!IsEssentialObject(child_obj)) {
+ return;
}
+ HeapEntry* child_entry = GetEntry(child_obj);
+ DCHECK_NOT_NULL(child_entry);
+ parent_entry->SetNamedReference(
+ HeapGraphEdge::kWeak, names_->GetFormatted("%d", index), child_entry);
MarkVisitedField(field_offset);
}
@@ -1758,6 +1826,13 @@ void V8HeapExplorer::SetGcRootsReference(Root root) {
void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description,
bool is_weak, Object child_obj) {
+ if (child_obj.IsSmi()) {
+ // TODO(arenevier): if we handle smis here, the snapshot gets 2 to 3 times
+ // slower on large heaps. According to perf, The bulk of the extra works
+ // happens in TemplateHashMapImpl::Probe method, when tyring to get
+ // names->GetFormatted("%d / %s", index, description)
+ return;
+ }
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry == nullptr) return;
const char* name = GetStrongGcSubrootName(child_obj);
@@ -1834,6 +1909,7 @@ class GlobalObjectsEnumerator : public RootVisitor {
void VisitRootPointersImpl(Root root, const char* description, TSlot start,
TSlot end) {
for (TSlot p = start; p < end; ++p) {
+ DCHECK(!MapWord::IsPacked(p.Relaxed_Load(isolate_).ptr()));
Object o = p.load(isolate_);
if (!o.IsNativeContext(isolate_)) continue;
JSObject proxy = Context::cast(o).global_proxy();
@@ -1934,6 +2010,7 @@ class EmbedderGraphEntriesAllocator : public HeapEntriesAllocator {
names_(snapshot_->profiler()->names()),
heap_object_map_(snapshot_->profiler()->heap_object_map()) {}
HeapEntry* AllocateEntry(HeapThing ptr) override;
+ HeapEntry* AllocateEntry(Smi smi) override;
private:
HeapSnapshot* snapshot_;
@@ -1984,6 +2061,11 @@ HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(HeapThing ptr) {
return heap_entry;
}
+HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(Smi smi) {
+ DCHECK(false);
+ return nullptr;
+}
+
NativeObjectsExplorer::NativeObjectsExplorer(
HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
: isolate_(
diff --git a/chromium/v8/src/profiler/heap-snapshot-generator.h b/chromium/v8/src/profiler/heap-snapshot-generator.h
index 12fd9450a9b..2ab13a99bf3 100644
--- a/chromium/v8/src/profiler/heap-snapshot-generator.h
+++ b/chromium/v8/src/profiler/heap-snapshot-generator.h
@@ -188,7 +188,8 @@ class HeapEntry {
// HeapSnapshotGenerator fills in a HeapSnapshot.
class HeapSnapshot {
public:
- explicit HeapSnapshot(HeapProfiler* profiler, bool global_objects_as_roots);
+ explicit HeapSnapshot(HeapProfiler* profiler, bool global_objects_as_roots,
+ bool capture_numeric_value);
HeapSnapshot(const HeapSnapshot&) = delete;
HeapSnapshot& operator=(const HeapSnapshot&) = delete;
void Delete();
@@ -213,6 +214,7 @@ class HeapSnapshot {
bool treat_global_objects_as_roots() const {
return treat_global_objects_as_roots_;
}
+ bool capture_numeric_value() const { return capture_numeric_value_; }
void AddLocation(HeapEntry* entry, int scriptId, int line, int col);
HeapEntry* AddEntry(HeapEntry::Type type,
@@ -245,6 +247,7 @@ class HeapSnapshot {
std::vector<SourceLocation> locations_;
SnapshotObjectId max_snapshot_js_object_id_ = -1;
bool treat_global_objects_as_roots_;
+ bool capture_numeric_value_;
};
@@ -277,6 +280,10 @@ class HeapObjectsMap {
SnapshotObjectId last_assigned_id() const {
return next_id_ - kObjectIdStep;
}
+ SnapshotObjectId get_next_id() {
+ next_id_ += kObjectIdStep;
+ return next_id_ - kObjectIdStep;
+ }
void StopHeapObjectsTracking();
SnapshotObjectId PushHeapObjectsStats(OutputStream* stream,
@@ -322,6 +329,7 @@ class HeapEntriesAllocator {
public:
virtual ~HeapEntriesAllocator() = default;
virtual HeapEntry* AllocateEntry(HeapThing ptr) = 0;
+ virtual HeapEntry* AllocateEntry(Smi smi) = 0;
};
class SnapshottingProgressReportingInterface {
@@ -342,6 +350,7 @@ class V8_EXPORT_PRIVATE V8HeapExplorer : public HeapEntriesAllocator {
V8HeapExplorer& operator=(const V8HeapExplorer&) = delete;
HeapEntry* AllocateEntry(HeapThing ptr) override;
+ HeapEntry* AllocateEntry(Smi smi) override;
int EstimateObjectsCount();
bool IterateAndExtractReferences(HeapSnapshotGenerator* generator);
void CollectGlobalObjectsTags();
@@ -397,6 +406,7 @@ class V8_EXPORT_PRIVATE V8HeapExplorer : public HeapEntriesAllocator {
void ExtractJSGeneratorObjectReferences(HeapEntry* entry,
JSGeneratorObject generator);
void ExtractFixedArrayReferences(HeapEntry* entry, FixedArray array);
+ void ExtractNumberReference(HeapEntry* entry, Object number);
void ExtractFeedbackVectorReferences(HeapEntry* entry,
FeedbackVector feedback_vector);
void ExtractDescriptorArrayReferences(HeapEntry* entry,
@@ -501,6 +511,9 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
// The HeapEntriesMap instance is used to track a mapping between
// real heap objects and their representations in heap snapshots.
using HeapEntriesMap = std::unordered_map<HeapThing, HeapEntry*>;
+ // The SmiEntriesMap instance is used to track a mapping between smi and
+ // their representations in heap snapshots.
+ using SmiEntriesMap = std::unordered_map<int, HeapEntry*>;
HeapSnapshotGenerator(HeapSnapshot* snapshot,
v8::ActivityControl* control,
@@ -515,16 +528,31 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
return it != entries_map_.end() ? it->second : nullptr;
}
+ HeapEntry* FindEntry(Smi smi) {
+ auto it = smis_map_.find(smi.value());
+ return it != smis_map_.end() ? it->second : nullptr;
+ }
+
HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
return entries_map_.emplace(ptr, allocator->AllocateEntry(ptr))
.first->second;
}
+ HeapEntry* AddEntry(Smi smi, HeapEntriesAllocator* allocator) {
+ return smis_map_.emplace(smi.value(), allocator->AllocateEntry(smi))
+ .first->second;
+ }
+
HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
HeapEntry* entry = FindEntry(ptr);
return entry != nullptr ? entry : AddEntry(ptr, allocator);
}
+ HeapEntry* FindOrAddEntry(Smi smi, HeapEntriesAllocator* allocator) {
+ HeapEntry* entry = FindEntry(smi);
+ return entry != nullptr ? entry : AddEntry(smi, allocator);
+ }
+
private:
bool FillReferences();
void ProgressStep() override;
@@ -537,6 +565,7 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
NativeObjectsExplorer dom_explorer_;
// Mapping from HeapThing pointers to HeapEntry indices.
HeapEntriesMap entries_map_;
+ SmiEntriesMap smis_map_;
// Used during snapshot generation.
int progress_counter_;
int progress_total_;
diff --git a/chromium/v8/src/profiler/profile-generator.cc b/chromium/v8/src/profiler/profile-generator.cc
index 375079de3e8..93075d4f7c2 100644
--- a/chromium/v8/src/profiler/profile-generator.cc
+++ b/chromium/v8/src/profiler/profile-generator.cc
@@ -533,6 +533,12 @@ void ProfileTree::TraverseDepthFirst(Callback* callback) {
}
}
+void ContextFilter::OnMoveEvent(Address from_address, Address to_address) {
+ if (native_context_address() != from_address) return;
+
+ set_native_context_address(to_address);
+}
+
using v8::tracing::TracedValue;
std::atomic<uint32_t> CpuProfile::last_id_;
@@ -557,6 +563,13 @@ CpuProfile::CpuProfile(CpuProfiler* profiler, const char* title,
value->SetDouble("startTime", start_time_.since_origin().InMicroseconds());
TRACE_EVENT_SAMPLE_WITH_ID1(TRACE_DISABLED_BY_DEFAULT("v8.cpu_profiler"),
"Profile", id_, "data", std::move(value));
+
+ DisallowHeapAllocation no_gc;
+ if (options_.has_filter_context()) {
+ i::Address raw_filter_context =
+ reinterpret_cast<i::Address>(options_.raw_filter_context());
+ context_filter_.set_native_context_address(raw_filter_context);
+ }
}
bool CpuProfile::CheckSubsample(base::TimeDelta source_sampling_interval) {
@@ -706,6 +719,8 @@ void CpuProfile::StreamPendingTraceEvents() {
void CpuProfile::FinishProfile() {
end_time_ = base::TimeTicks::HighResolutionNow();
+ // Stop tracking context movements after profiling stops.
+ context_filter_.set_native_context_address(kNullAddress);
StreamPendingTraceEvents();
auto value = TracedValue::Create();
// The endTime timestamp is not converted to Perfetto's clock domain and will
@@ -942,14 +957,26 @@ base::TimeDelta CpuProfilesCollection::GetCommonSamplingInterval() const {
void CpuProfilesCollection::AddPathToCurrentProfiles(
base::TimeTicks timestamp, const ProfileStackTrace& path, int src_line,
- bool update_stats, base::TimeDelta sampling_interval) {
+ bool update_stats, base::TimeDelta sampling_interval,
+ Address native_context_address) {
// As starting / stopping profiles is rare relatively to this
// method, we don't bother minimizing the duration of lock holding,
// e.g. copying contents of the list to a local vector.
current_profiles_semaphore_.Wait();
for (const std::unique_ptr<CpuProfile>& profile : current_profiles_) {
- profile->AddPath(timestamp, path, src_line, update_stats,
- sampling_interval);
+ if (profile->context_filter().Accept(native_context_address)) {
+ profile->AddPath(timestamp, path, src_line, update_stats,
+ sampling_interval);
+ }
+ }
+ current_profiles_semaphore_.Signal();
+}
+
+void CpuProfilesCollection::UpdateNativeContextAddressForCurrentProfiles(
+ Address from, Address to) {
+ current_profiles_semaphore_.Wait();
+ for (const std::unique_ptr<CpuProfile>& profile : current_profiles_) {
+ profile->context_filter().OnMoveEvent(from, to);
}
current_profiles_semaphore_.Signal();
}
diff --git a/chromium/v8/src/profiler/profile-generator.h b/chromium/v8/src/profiler/profile-generator.h
index 551dfdf5917..c4bffa945a0 100644
--- a/chromium/v8/src/profiler/profile-generator.h
+++ b/chromium/v8/src/profiler/profile-generator.h
@@ -237,6 +237,31 @@ struct CodeEntryAndLineNumber {
using ProfileStackTrace = std::vector<CodeEntryAndLineNumber>;
+// Filters stack frames from sources other than a target native context.
+class ContextFilter {
+ public:
+ explicit ContextFilter(Address native_context_address = kNullAddress)
+ : native_context_address_(native_context_address) {}
+
+ // Invoked when a native context has changed address.
+ void OnMoveEvent(Address from_address, Address to_address);
+
+ bool Accept(Address native_context_address) const {
+ if (native_context_address_ == kNullAddress) return true;
+ return (native_context_address & ~kHeapObjectTag) ==
+ native_context_address_;
+ }
+
+ // Update the context's tracked address based on VM-thread events.
+ void set_native_context_address(Address address) {
+ native_context_address_ = address;
+ }
+ Address native_context_address() const { return native_context_address_; }
+
+ private:
+ Address native_context_address_;
+};
+
class ProfileTree;
class V8_EXPORT_PRIVATE ProfileNode {
@@ -386,6 +411,7 @@ class CpuProfile {
base::TimeTicks start_time() const { return start_time_; }
base::TimeTicks end_time() const { return end_time_; }
CpuProfiler* cpu_profiler() const { return profiler_; }
+ ContextFilter& context_filter() { return context_filter_; }
void UpdateTicksScale();
@@ -397,6 +423,7 @@ class CpuProfile {
const char* title_;
const CpuProfilingOptions options_;
std::unique_ptr<DiscardedSamplesDelegate> delegate_;
+ ContextFilter context_filter_;
base::TimeTicks start_time_;
base::TimeTicks end_time_;
std::deque<SampleInfo> samples_;
@@ -486,7 +513,11 @@ class V8_EXPORT_PRIVATE CpuProfilesCollection {
void AddPathToCurrentProfiles(base::TimeTicks timestamp,
const ProfileStackTrace& path, int src_line,
bool update_stats,
- base::TimeDelta sampling_interval);
+ base::TimeDelta sampling_interval,
+ Address native_context_address = kNullAddress);
+
+ // Called from profile generator thread.
+ void UpdateNativeContextAddressForCurrentProfiles(Address from, Address to);
// Limits the number of profiles that can be simultaneously collected.
static const int kMaxSimultaneousProfiles = 100;
diff --git a/chromium/v8/src/profiler/profiler-listener.cc b/chromium/v8/src/profiler/profiler-listener.cc
index 8b253fb4729..a2cfb8b07b2 100644
--- a/chromium/v8/src/profiler/profiler-listener.cc
+++ b/chromium/v8/src/profiler/profiler-listener.cc
@@ -302,6 +302,13 @@ void ProfilerListener::CodeMoveEvent(AbstractCode from, AbstractCode to) {
DispatchCodeEvent(evt_rec);
}
+void ProfilerListener::NativeContextMoveEvent(Address from, Address to) {
+ CodeEventsContainer evt_rec(CodeEventRecord::NATIVE_CONTEXT_MOVE);
+ evt_rec.NativeContextMoveEventRecord_.from_address = from;
+ evt_rec.NativeContextMoveEventRecord_.to_address = to;
+ DispatchCodeEvent(evt_rec);
+}
+
void ProfilerListener::CodeDisableOptEvent(Handle<AbstractCode> code,
Handle<SharedFunctionInfo> shared) {
CodeEventsContainer evt_rec(CodeEventRecord::CODE_DISABLE_OPT);
diff --git a/chromium/v8/src/profiler/profiler-listener.h b/chromium/v8/src/profiler/profiler-listener.h
index 49e7db32baa..50a9b818936 100644
--- a/chromium/v8/src/profiler/profiler-listener.h
+++ b/chromium/v8/src/profiler/profiler-listener.h
@@ -59,6 +59,7 @@ class V8_EXPORT_PRIVATE ProfilerListener : public CodeEventListener,
Handle<String> source) override;
void CodeMoveEvent(AbstractCode from, AbstractCode to) override;
void SharedFunctionInfoMoveEvent(Address from, Address to) override {}
+ void NativeContextMoveEvent(Address from, Address to) override;
void CodeMovingGCEvent() override {}
void CodeDisableOptEvent(Handle<AbstractCode> code,
Handle<SharedFunctionInfo> shared) override;
diff --git a/chromium/v8/src/profiler/sampling-heap-profiler.cc b/chromium/v8/src/profiler/sampling-heap-profiler.cc
index aef0170bb48..4bef9793abf 100644
--- a/chromium/v8/src/profiler/sampling-heap-profiler.cc
+++ b/chromium/v8/src/profiler/sampling-heap-profiler.cc
@@ -75,7 +75,7 @@ void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
DisallowGarbageCollection no_gc;
// Check if the area is iterable by confirming that it starts with a map.
- DCHECK((*ObjectSlot(soon_object)).IsMap());
+ DCHECK(HeapObject::FromAddress(soon_object).map().IsMap());
HandleScope scope(isolate_);
HeapObject heap_object = HeapObject::FromAddress(soon_object);
diff --git a/chromium/v8/src/profiler/tick-sample.cc b/chromium/v8/src/profiler/tick-sample.cc
index 638aa5545a1..253b80d19e2 100644
--- a/chromium/v8/src/profiler/tick-sample.cc
+++ b/chromium/v8/src/profiler/tick-sample.cc
@@ -7,14 +7,14 @@
#include <cinttypes>
#include "include/v8-profiler.h"
+#include "src/base/sanitizer/asan.h"
+#include "src/base/sanitizer/msan.h"
#include "src/execution/frames-inl.h"
#include "src/execution/simulator.h"
#include "src/execution/vm-state-inl.h"
#include "src/heap/heap-inl.h" // For Heap::code_range.
#include "src/logging/counters.h"
#include "src/profiler/profiler-stats.h"
-#include "src/sanitizer/asan.h"
-#include "src/sanitizer/msan.h"
namespace v8 {
namespace internal {
@@ -177,6 +177,7 @@ DISABLE_ASAN void TickSample::Init(Isolate* v8_isolate,
pc = regs.pc;
frames_count = static_cast<unsigned>(info.frames_count);
has_external_callback = info.external_callback_entry != nullptr;
+ context = info.context;
if (has_external_callback) {
external_callback_entry = info.external_callback_entry;
} else if (frames_count) {
@@ -209,6 +210,7 @@ bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs,
sample_info->frames_count = 0;
sample_info->vm_state = isolate->current_vm_state();
sample_info->external_callback_entry = nullptr;
+ sample_info->context = nullptr;
if (sample_info->vm_state == GC) return true;
i::Address js_entry_sp = isolate->js_entry_sp();
@@ -232,7 +234,7 @@ bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs,
// TODO(petermarshall): Code range is always null on ia32 so this check for
// IsNoFrameRegion will never actually run there.
if (regs->pc &&
- isolate->heap()->memory_allocator()->code_range().contains(
+ isolate->heap()->code_region().contains(
reinterpret_cast<i::Address>(regs->pc)) &&
IsNoFrameRegion(reinterpret_cast<i::Address>(regs->pc))) {
// The frame is not setup, so it'd be hard to iterate the stack. Bailout.
@@ -278,6 +280,13 @@ bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs,
reinterpret_cast<i::Address>(regs->lr),
js_entry_sp);
+ Context top_context = isolate->context();
+ if (top_context.ptr() != i::Context::kNoContext &&
+ top_context.ptr() != i::Context::kInvalidContext) {
+ NativeContext top_native_context = top_context.native_context();
+ sample_info->context = reinterpret_cast<void*>(top_native_context.ptr());
+ }
+
if (it.done()) return true;
size_t i = 0;
@@ -287,15 +296,18 @@ bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs,
frames[i] = reinterpret_cast<void*>(isolate->c_function());
i++;
}
-
+#ifdef V8_RUNTIME_CALL_STATS
i::RuntimeCallTimer* timer =
isolate->counters()->runtime_call_stats()->current_timer();
+#endif // V8_RUNTIME_CALL_STATS
for (; !it.done() && i < frames_limit; it.Advance()) {
+#ifdef V8_RUNTIME_CALL_STATS
while (timer && reinterpret_cast<i::Address>(timer) < it.frame()->fp() &&
i < frames_limit) {
frames[i++] = reinterpret_cast<void*>(timer->counter());
timer = timer->parent();
}
+#endif // V8_RUNTIME_CALL_STATS
if (i == frames_limit) break;
if (it.frame()->is_interpreted()) {
diff --git a/chromium/v8/src/profiler/tick-sample.h b/chromium/v8/src/profiler/tick-sample.h
index 777c3d192dc..1bfcb7d0971 100644
--- a/chromium/v8/src/profiler/tick-sample.h
+++ b/chromium/v8/src/profiler/tick-sample.h
@@ -90,6 +90,7 @@ struct V8_EXPORT TickSample {
static const unsigned kMaxFramesCountLog2 = 8;
static const unsigned kMaxFramesCount = (1 << kMaxFramesCountLog2) - 1;
void* stack[kMaxFramesCount]; // Call stack.
+ void* context = nullptr; // Address of the incumbent native context.
unsigned frames_count : kMaxFramesCountLog2; // Number of captured frames.
bool has_external_callback : 1;
bool update_stats : 1; // Whether the sample should update aggregated stats.