summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/mark-compact-inl.h
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2019-03-12 09:01:49 +0100
committerMichaël Zasso <targos@protonmail.com>2019-03-14 18:49:21 +0100
commit7b48713334469818661fe276cf571de9c7899f2d (patch)
tree4dbda49ac88db76ce09dc330a0cb587e68e139ba /deps/v8/src/heap/mark-compact-inl.h
parent8549ac09b256666cf5275224ec58fab9939ff32e (diff)
downloadnode-new-7b48713334469818661fe276cf571de9c7899f2d.tar.gz
deps: update V8 to 7.3.492.25
PR-URL: https://github.com/nodejs/node/pull/25852 Reviewed-By: Ujjwal Sharma <usharma1998@gmail.com> Reviewed-By: Matteo Collina <matteo.collina@gmail.com> Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com>
Diffstat (limited to 'deps/v8/src/heap/mark-compact-inl.h')
-rw-r--r--deps/v8/src/heap/mark-compact-inl.h433
1 files changed, 257 insertions, 176 deletions
diff --git a/deps/v8/src/heap/mark-compact-inl.h b/deps/v8/src/heap/mark-compact-inl.h
index 449ca43e50..cb73fedca7 100644
--- a/deps/v8/src/heap/mark-compact-inl.h
+++ b/deps/v8/src/heap/mark-compact-inl.h
@@ -5,19 +5,22 @@
#ifndef V8_HEAP_MARK_COMPACT_INL_H_
#define V8_HEAP_MARK_COMPACT_INL_H_
+#include "src/assembler-inl.h"
#include "src/base/bits.h"
#include "src/heap/mark-compact.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/remembered-set.h"
#include "src/objects/js-collection-inl.h"
+#include "src/objects/js-weak-refs-inl.h"
+#include "src/objects/slots-inl.h"
+#include "src/transitions.h"
namespace v8 {
namespace internal {
template <typename ConcreteState, AccessMode access_mode>
-bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(
- HeapObject* obj) {
- MemoryChunk* p = MemoryChunk::FromAddress(obj->address());
+bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(HeapObject obj) {
+ MemoryChunk* p = MemoryChunk::FromHeapObject(obj);
MarkBit markbit = MarkBitFrom(p, obj->address());
if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
static_cast<ConcreteState*>(this)->IncrementLiveBytes(p, obj->Size());
@@ -25,14 +28,13 @@ bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(
}
template <typename ConcreteState, AccessMode access_mode>
-bool MarkingStateBase<ConcreteState, access_mode>::WhiteToGrey(
- HeapObject* obj) {
+bool MarkingStateBase<ConcreteState, access_mode>::WhiteToGrey(HeapObject obj) {
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj));
}
template <typename ConcreteState, AccessMode access_mode>
bool MarkingStateBase<ConcreteState, access_mode>::WhiteToBlack(
- HeapObject* obj) {
+ HeapObject obj) {
return WhiteToGrey(obj) && GreyToBlack(obj);
}
@@ -43,24 +45,71 @@ MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingState* marking_state)
: heap_(collector->heap()),
collector_(collector),
- marking_state_(marking_state) {}
+ marking_state_(marking_state),
+ mark_compact_epoch_(collector->epoch()) {}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitBytecodeArray(Map* map,
- BytecodeArray* array) {
+ MarkingState>::VisitBytecodeArray(Map map,
+ BytecodeArray array) {
int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
- array->MakeOlder();
+
+ if (!heap_->is_current_gc_forced()) {
+ array->MakeOlder();
+ }
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitFixedArray(Map* map,
- FixedArray* object) {
+ MarkingState>::VisitDescriptorArray(Map map,
+ DescriptorArray array) {
+ int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
+ VisitPointers(array, array->GetFirstPointerSlot(),
+ array->GetDescriptorSlot(0));
+ VisitDescriptors(array, array->number_of_descriptors());
+ return size;
+}
+
+template <FixedArrayVisitationMode fixed_array_mode,
+ TraceRetainingPathMode retaining_path_mode, typename MarkingState>
+int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
+ VisitSharedFunctionInfo(Map map, SharedFunctionInfo shared_info) {
+ int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, shared_info);
+ SharedFunctionInfo::BodyDescriptor::IterateBody(map, shared_info, size, this);
+
+ // If the SharedFunctionInfo has old bytecode, mark it as flushable,
+ // otherwise visit the function data field strongly.
+ if (shared_info->ShouldFlushBytecode()) {
+ collector_->AddBytecodeFlushingCandidate(shared_info);
+ } else {
+ VisitPointer(shared_info,
+ HeapObject::RawField(shared_info,
+ SharedFunctionInfo::kFunctionDataOffset));
+ }
+ return size;
+}
+
+template <FixedArrayVisitationMode fixed_array_mode,
+ TraceRetainingPathMode retaining_path_mode, typename MarkingState>
+int MarkingVisitor<fixed_array_mode, retaining_path_mode,
+ MarkingState>::VisitJSFunction(Map map, JSFunction object) {
+ int size = Parent::VisitJSFunction(map, object);
+
+ // Check if the JSFunction needs reset due to bytecode being flushed.
+ if (FLAG_flush_bytecode && object->NeedsResetDueToFlushedBytecode()) {
+ collector_->AddFlushedJSFunction(object);
+ }
+
+ return size;
+}
+template <FixedArrayVisitationMode fixed_array_mode,
+ TraceRetainingPathMode retaining_path_mode, typename MarkingState>
+int MarkingVisitor<fixed_array_mode, retaining_path_mode,
+ MarkingState>::VisitFixedArray(Map map, FixedArray object) {
return (fixed_array_mode == FixedArrayVisitationMode::kRegular)
? Parent::VisitFixedArray(map, object)
: VisitFixedArrayIncremental(map, object);
@@ -71,10 +120,10 @@ template <FixedArrayVisitationMode fixed_array_mode,
template <typename T>
V8_INLINE int
MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitEmbedderTracingSubclass(Map* map,
- T* object) {
+ MarkingState>::VisitEmbedderTracingSubclass(Map map, T object) {
if (heap_->local_embedder_heap_tracer()->InUse()) {
- heap_->TracePossibleWrapper(object);
+ marking_worklist()->embedder()->Push(MarkCompactCollectorBase::kMainThread,
+ object);
}
int size = T::BodyDescriptor::SizeOf(map, object);
T::BodyDescriptor::IterateBody(map, object, size, this);
@@ -84,57 +133,56 @@ MarkingVisitor<fixed_array_mode, retaining_path_mode,
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitJSApiObject(Map* map, JSObject* object) {
+ MarkingState>::VisitJSApiObject(Map map, JSObject object) {
return VisitEmbedderTracingSubclass(map, object);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitJSArrayBuffer(Map* map,
- JSArrayBuffer* object) {
+ MarkingState>::VisitJSArrayBuffer(Map map,
+ JSArrayBuffer object) {
return VisitEmbedderTracingSubclass(map, object);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitJSDataView(Map* map,
- JSDataView* object) {
+ MarkingState>::VisitJSDataView(Map map, JSDataView object) {
return VisitEmbedderTracingSubclass(map, object);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitJSTypedArray(Map* map,
- JSTypedArray* object) {
+ MarkingState>::VisitJSTypedArray(Map map,
+ JSTypedArray object) {
return VisitEmbedderTracingSubclass(map, object);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
- VisitEphemeronHashTable(Map* map, EphemeronHashTable* table) {
+ VisitEphemeronHashTable(Map map, EphemeronHashTable table) {
collector_->AddEphemeronHashTable(table);
for (int i = 0; i < table->Capacity(); i++) {
- Object** key_slot =
+ ObjectSlot key_slot =
table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
- HeapObject* key = HeapObject::cast(table->KeyAt(i));
+ HeapObject key = HeapObject::cast(table->KeyAt(i));
collector_->RecordSlot(table, key_slot, key);
- Object** value_slot =
+ ObjectSlot value_slot =
table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
if (marking_state()->IsBlackOrGrey(key)) {
VisitPointer(table, value_slot);
} else {
- Object* value_obj = *value_slot;
+ Object value_obj = *value_slot;
if (value_obj->IsHeapObject()) {
- HeapObject* value = HeapObject::cast(value_obj);
+ HeapObject value = HeapObject::cast(value_obj);
collector_->RecordSlot(table, value_slot, value);
// Revisit ephemerons with both key and value unreachable at end
@@ -152,23 +200,37 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitMap(Map* map, Map* object) {
- // When map collection is enabled we have to mark through map's transitions
- // and back pointers in a special way to make these links weak.
- int size = Map::BodyDescriptor::SizeOf(map, object);
- if (object->CanTransition()) {
- MarkMapContents(object);
- } else {
- Map::BodyDescriptor::IterateBody(map, object, size, this);
+ MarkingState>::VisitMap(Map meta_map, Map map) {
+ int size = Map::BodyDescriptor::SizeOf(meta_map, map);
+ if (map->CanTransition()) {
+ // Maps that can transition share their descriptor arrays and require
+ // special visiting logic to avoid memory leaks.
+ // Since descriptor arrays are potentially shared, ensure that only the
+ // descriptors that belong to this map are marked. The first time a
+ // non-empty descriptor array is marked, its header is also visited. The
+ // slot holding the descriptor array will be implicitly recorded when the
+ // pointer fields of this map are visited.
+ DescriptorArray descriptors = map->instance_descriptors();
+ MarkDescriptorArrayBlack(map, descriptors);
+ int number_of_own_descriptors = map->NumberOfOwnDescriptors();
+ if (number_of_own_descriptors) {
+ DCHECK_LE(number_of_own_descriptors,
+ descriptors->number_of_descriptors());
+ VisitDescriptors(descriptors, number_of_own_descriptors);
+ }
+ // Mark the pointer fields of the Map. Since the transitions array has
+ // been marked already, it is fine that one of these fields contains a
+ // pointer to it.
}
+ Map::BodyDescriptor::IterateBody(meta_map, map, size, this);
return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitTransitionArray(Map* map,
- TransitionArray* array) {
+ MarkingState>::VisitTransitionArray(Map map,
+ TransitionArray array) {
int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
collector_->AddTransitionArray(array);
@@ -177,46 +239,90 @@ int MarkingVisitor<fixed_array_mode, retaining_path_mode,
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
-void MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitPointer(HeapObject* host, Object** p) {
- if (!(*p)->IsHeapObject()) return;
- HeapObject* target_object = HeapObject::cast(*p);
- collector_->RecordSlot(host, p, target_object);
- MarkObject(host, target_object);
+int MarkingVisitor<fixed_array_mode, retaining_path_mode,
+ MarkingState>::VisitJSWeakRef(Map map, JSWeakRef weak_ref) {
+ if (weak_ref->target()->IsHeapObject()) {
+ HeapObject target = HeapObject::cast(weak_ref->target());
+ if (marking_state()->IsBlackOrGrey(target)) {
+ // Record the slot inside the JSWeakRef, since the IterateBody below
+ // won't visit it.
+ ObjectSlot slot =
+ HeapObject::RawField(weak_ref, JSWeakCell::kTargetOffset);
+ collector_->RecordSlot(weak_ref, slot, target);
+ } else {
+ // JSWeakRef points to a potentially dead object. We have to process
+ // them when we know the liveness of the whole transitive closure.
+ collector_->AddWeakRef(weak_ref);
+ }
+ }
+ int size = JSWeakRef::BodyDescriptor::SizeOf(map, weak_ref);
+ JSWeakRef::BodyDescriptor::IterateBody(map, weak_ref, size, this);
+ return size;
+}
+
+template <FixedArrayVisitationMode fixed_array_mode,
+ TraceRetainingPathMode retaining_path_mode, typename MarkingState>
+int MarkingVisitor<fixed_array_mode, retaining_path_mode,
+ MarkingState>::VisitJSWeakCell(Map map,
+ JSWeakCell weak_cell) {
+ if (weak_cell->target()->IsHeapObject()) {
+ HeapObject target = HeapObject::cast(weak_cell->target());
+ if (marking_state()->IsBlackOrGrey(target)) {
+ // Record the slot inside the JSWeakCell, since the IterateBody below
+ // won't visit it.
+ ObjectSlot slot =
+ HeapObject::RawField(weak_cell, JSWeakCell::kTargetOffset);
+ collector_->RecordSlot(weak_cell, slot, target);
+ } else {
+ // JSWeakCell points to a potentially dead object. We have to process
+ // them when we know the liveness of the whole transitive closure.
+ collector_->AddWeakCell(weak_cell);
+ }
+ }
+ int size = JSWeakCell::BodyDescriptor::SizeOf(map, weak_cell);
+ JSWeakCell::BodyDescriptor::IterateBody(map, weak_cell, size, this);
+ return size;
}
+// class template arguments
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
+// method template arguments
+template <typename TSlot>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitPointer(HeapObject* host,
- MaybeObject** p) {
- HeapObject* target_object;
- if ((*p)->GetHeapObjectIfStrong(&target_object)) {
- collector_->RecordSlot(host, reinterpret_cast<HeapObjectReference**>(p),
- target_object);
+ MarkingState>::VisitPointerImpl(HeapObject host,
+ TSlot slot) {
+ static_assert(std::is_same<TSlot, ObjectSlot>::value ||
+ std::is_same<TSlot, MaybeObjectSlot>::value,
+ "Only ObjectSlot and MaybeObjectSlot are expected here");
+ typename TSlot::TObject object = *slot;
+ HeapObject target_object;
+ if (object.GetHeapObjectIfStrong(&target_object)) {
+ collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
MarkObject(host, target_object);
- } else if ((*p)->GetHeapObjectIfWeak(&target_object)) {
+ } else if (TSlot::kCanBeWeak && object.GetHeapObjectIfWeak(&target_object)) {
if (marking_state()->IsBlackOrGrey(target_object)) {
// Weak references with live values are directly processed here to reduce
// the processing time of weak cells during the main GC pause.
- collector_->RecordSlot(host, reinterpret_cast<HeapObjectReference**>(p),
- target_object);
+ collector_->RecordSlot(host, HeapObjectSlot(slot), target_object);
} else {
// If we do not know about liveness of values of weak cells, we have to
// process them when we know the liveness of the whole transitive
// closure.
- collector_->AddWeakReference(host,
- reinterpret_cast<HeapObjectReference**>(p));
+ collector_->AddWeakReference(host, HeapObjectSlot(slot));
}
}
}
+// class template arguments
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
+// method template arguments
+template <typename TSlot>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitPointers(HeapObject* host,
- Object** start, Object** end) {
- for (Object** p = start; p < end; p++) {
+ MarkingState>::VisitPointersImpl(HeapObject host,
+ TSlot start, TSlot end) {
+ for (TSlot p = start; p < end; ++p) {
VisitPointer(host, p);
}
}
@@ -224,60 +330,56 @@ void MarkingVisitor<fixed_array_mode, retaining_path_mode,
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitPointers(HeapObject* host,
- MaybeObject** start,
- MaybeObject** end) {
- for (MaybeObject** p = start; p < end; p++) {
- VisitPointer(host, p);
- }
-}
-
-template <FixedArrayVisitationMode fixed_array_mode,
- TraceRetainingPathMode retaining_path_mode, typename MarkingState>
-void MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitEmbeddedPointer(Code* host,
+ MarkingState>::VisitEmbeddedPointer(Code host,
RelocInfo* rinfo) {
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
- HeapObject* object = HeapObject::cast(rinfo->target_object());
+ HeapObject object = HeapObject::cast(rinfo->target_object());
collector_->RecordRelocSlot(host, rinfo, object);
- if (!host->IsWeakObject(object)) {
- MarkObject(host, object);
- } else if (!marking_state()->IsBlackOrGrey(object)) {
- collector_->AddWeakObjectInCode(object, host);
+ if (!marking_state()->IsBlackOrGrey(object)) {
+ if (host->IsWeakObject(object)) {
+ collector_->AddWeakObjectInCode(object, host);
+ } else {
+ MarkObject(host, object);
+ }
}
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::VisitCodeTarget(Code* host,
+ MarkingState>::VisitCodeTarget(Code host,
RelocInfo* rinfo) {
DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
- Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
+ Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
collector_->RecordRelocSlot(host, rinfo, target);
MarkObject(host, target);
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
-bool MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::MarkObjectWithoutPush(HeapObject* host,
- HeapObject* object) {
- if (marking_state()->WhiteToBlack(object)) {
+void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
+ MarkDescriptorArrayBlack(HeapObject host, DescriptorArray descriptors) {
+ // Note that WhiteToBlack is not sufficient here because it fails if the
+ // descriptor array is grey. So we need to do two steps: WhiteToGrey and
+ // GreyToBlack. Alternatively, we could check WhiteToGrey || WhiteToBlack.
+ if (marking_state()->WhiteToGrey(descriptors)) {
if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
V8_UNLIKELY(FLAG_track_retaining_path)) {
- heap_->AddRetainer(host, object);
+ heap_->AddRetainer(host, descriptors);
}
- return true;
}
- return false;
+ if (marking_state()->GreyToBlack(descriptors)) {
+ VisitPointers(descriptors, descriptors->GetFirstPointerSlot(),
+ descriptors->GetDescriptorSlot(0));
+ }
+ DCHECK(marking_state()->IsBlack(descriptors));
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::MarkObject(HeapObject* host,
- HeapObject* object) {
+ MarkingState>::MarkObject(HeapObject host,
+ HeapObject object) {
if (marking_state()->WhiteToGrey(object)) {
marking_worklist()->Push(object);
if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
@@ -290,81 +392,53 @@ void MarkingVisitor<fixed_array_mode, retaining_path_mode,
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
- VisitFixedArrayIncremental(Map* map, FixedArray* object) {
- MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
- int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
+ VisitFixedArrayIncremental(Map map, FixedArray object) {
+ MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
+ int size = FixedArray::BodyDescriptor::SizeOf(map, object);
if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
- DCHECK(!FLAG_use_marking_progress_bar ||
- chunk->owner()->identity() == LO_SPACE);
- // When using a progress bar for large fixed arrays, scan only a chunk of
- // the array and try to push it onto the marking deque again until it is
- // fully scanned. Fall back to scanning it through to the end in case this
- // fails because of a full deque.
- int start_offset =
+ DCHECK(FLAG_use_marking_progress_bar);
+ DCHECK(heap_->IsLargeObject(object));
+ int start =
Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
- if (start_offset < object_size) {
- // Ensure that the object is either grey or black before pushing it
- // into marking worklist.
- marking_state()->WhiteToGrey(object);
- if (FLAG_concurrent_marking) {
- marking_worklist()->PushBailout(object);
- } else {
+ int end = Min(size, start + kProgressBarScanningChunk);
+ if (start < end) {
+ VisitPointers(object, HeapObject::RawField(object, start),
+ HeapObject::RawField(object, end));
+ chunk->set_progress_bar(end);
+ if (end < size) {
+ DCHECK(marking_state()->IsBlack(object));
+ // The object can be pushed back onto the marking worklist only after
+ // progress bar was updated.
marking_worklist()->Push(object);
- }
- DCHECK(marking_state()->IsGrey(object) ||
- marking_state()->IsBlack(object));
-
- int end_offset =
- Min(object_size, start_offset + kProgressBarScanningChunk);
- int already_scanned_offset = start_offset;
- VisitPointers(object, HeapObject::RawField(object, start_offset),
- HeapObject::RawField(object, end_offset));
- start_offset = end_offset;
- end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
- chunk->set_progress_bar(start_offset);
- if (start_offset < object_size) {
heap_->incremental_marking()->NotifyIncompleteScanOfObject(
- object_size - (start_offset - already_scanned_offset));
+ size - (end - start));
}
}
} else {
- FixedArray::BodyDescriptor::IterateBody(map, object, object_size, this);
+ FixedArray::BodyDescriptor::IterateBody(map, object, size, this);
}
- return object_size;
+ return size;
}
template <FixedArrayVisitationMode fixed_array_mode,
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
-void MarkingVisitor<fixed_array_mode, retaining_path_mode,
- MarkingState>::MarkMapContents(Map* map) {
- // Since descriptor arrays are potentially shared, ensure that only the
- // descriptors that belong to this map are marked. The first time a non-empty
- // descriptor array is marked, its header is also visited. The slot holding
- // the descriptor array will be implicitly recorded when the pointer fields of
- // this map are visited. Prototype maps don't keep track of transitions, so
- // just mark the entire descriptor array.
- if (!map->is_prototype_map()) {
- DescriptorArray* descriptors = map->instance_descriptors();
- if (MarkObjectWithoutPush(map, descriptors) && descriptors->length() > 0) {
- VisitPointers(descriptors, descriptors->GetFirstElementAddress(),
- descriptors->GetDescriptorEndSlot(0));
- }
- int start = 0;
- int end = map->NumberOfOwnDescriptors();
- if (start < end) {
- VisitPointers(descriptors, descriptors->GetDescriptorStartSlot(start),
- descriptors->GetDescriptorEndSlot(end));
- }
+void MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
+ VisitDescriptors(DescriptorArray descriptors,
+ int number_of_own_descriptors) {
+ // Updating the number of marked descriptor is supported only for black
+ // descriptor arrays.
+ DCHECK(marking_state()->IsBlack(descriptors));
+ int16_t new_marked = static_cast<int16_t>(number_of_own_descriptors);
+ int16_t old_marked = descriptors->UpdateNumberOfMarkedDescriptors(
+ mark_compact_epoch_, new_marked);
+ if (old_marked < new_marked) {
+ VisitPointers(descriptors,
+ MaybeObjectSlot(descriptors->GetDescriptorSlot(old_marked)),
+ MaybeObjectSlot(descriptors->GetDescriptorSlot(new_marked)));
}
-
- // Mark the pointer fields of the Map. Since the transitions array has
- // been marked already, it is fine that one of these fields contains a
- // pointer to it.
- Map::BodyDescriptor::IterateBody(
- map->map(), map, Map::BodyDescriptor::SizeOf(map->map(), map), this);
}
-void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
+void MarkCompactCollector::MarkObject(HeapObject host, HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
@@ -373,7 +447,7 @@ void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
}
}
-void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
+void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
@@ -384,7 +458,7 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
#ifdef ENABLE_MINOR_MC
-void MinorMarkCompactCollector::MarkRootObject(HeapObject* obj) {
+void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
if (Heap::InNewSpace(obj) && non_atomic_marking_state_.WhiteToGrey(obj)) {
worklist_->Push(kMainThread, obj);
}
@@ -392,7 +466,7 @@ void MinorMarkCompactCollector::MarkRootObject(HeapObject* obj) {
#endif
-void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
+void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) {
marking_worklist()->Push(obj);
if (V8_UNLIKELY(FLAG_track_retaining_path)) {
@@ -401,23 +475,34 @@ void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
}
}
-void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
- HeapObject* target) {
- RecordSlot(object, reinterpret_cast<HeapObjectReference**>(slot), target);
+void MarkCompactCollector::RecordSlot(HeapObject object, ObjectSlot slot,
+ HeapObject target) {
+ RecordSlot(object, HeapObjectSlot(slot), target);
}
-void MarkCompactCollector::RecordSlot(HeapObject* object,
- HeapObjectReference** slot,
- HeapObject* target) {
- Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
- Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
+void MarkCompactCollector::RecordSlot(HeapObject object, HeapObjectSlot slot,
+ HeapObject target) {
+ Page* target_page = Page::FromHeapObject(target);
+ Page* source_page = Page::FromHeapObject(object);
if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
!source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
- RememberedSet<OLD_TO_OLD>::Insert(source_page,
- reinterpret_cast<Address>(slot));
+ RememberedSet<OLD_TO_OLD>::Insert(source_page, slot.address());
}
}
+void MarkCompactCollector::AddTransitionArray(TransitionArray array) {
+ weak_objects_.transition_arrays.Push(kMainThread, array);
+}
+
+void MarkCompactCollector::AddBytecodeFlushingCandidate(
+ SharedFunctionInfo flush_candidate) {
+ weak_objects_.bytecode_flushing_candidates.Push(kMainThread, flush_candidate);
+}
+
+void MarkCompactCollector::AddFlushedJSFunction(JSFunction flushed_function) {
+ weak_objects_.flushed_js_functions.Push(kMainThread, flushed_function);
+}
+
template <LiveObjectIterationMode mode>
LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
Address start)
@@ -434,8 +519,6 @@ LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
cell_base_ = it_.CurrentCellBase();
current_cell_ = *it_.CurrentCell();
AdvanceToNextValidObject();
- } else {
- current_object_ = nullptr;
}
}
@@ -457,11 +540,11 @@ operator++(int) {
template <LiveObjectIterationMode mode>
void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
while (!it_.Done()) {
- HeapObject* object = nullptr;
+ HeapObject object;
int size = 0;
while (current_cell_ != 0) {
uint32_t trailing_zeros = base::bits::CountTrailingZeros(current_cell_);
- Address addr = cell_base_ + trailing_zeros * kPointerSize;
+ Address addr = cell_base_ + trailing_zeros * kTaggedSize;
// Clear the first bit of the found object..
current_cell_ &= ~(1u << trailing_zeros);
@@ -476,7 +559,7 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
// that case we can return immediately.
if (!it_.Advance()) {
DCHECK(HeapObject::FromAddress(addr)->map() == one_word_filler_map_);
- current_object_ = nullptr;
+ current_object_ = HeapObject();
return;
}
cell_base_ = it_.CurrentCellBase();
@@ -485,20 +568,19 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
second_bit_index = 1u << (trailing_zeros + 1);
}
- Map* map = nullptr;
+ Map map;
if (current_cell_ & second_bit_index) {
// We found a black object. If the black object is within a black area,
// make sure that we skip all set bits in the black area until the
// object ends.
- HeapObject* black_object = HeapObject::FromAddress(addr);
- map =
- base::AsAtomicPointer::Relaxed_Load(reinterpret_cast<Map**>(addr));
+ HeapObject black_object = HeapObject::FromAddress(addr);
+ map = Map::cast(ObjectSlot(addr).Acquire_Load());
size = black_object->SizeFromMap(map);
- Address end = addr + size - kPointerSize;
+ Address end = addr + size - kTaggedSize;
// One word filler objects do not borrow the second mark bit. We have
// to jump over the advancing and clearing part.
// Note that we know that we are at a one word filler when
- // object_start + object_size - kPointerSize == object_start.
+ // object_start + object_size - kTaggedSize == object_start.
if (addr != end) {
DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
@@ -519,14 +601,13 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
object = black_object;
}
} else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
- map =
- base::AsAtomicPointer::Relaxed_Load(reinterpret_cast<Map**>(addr));
+ map = Map::cast(ObjectSlot(addr).Acquire_Load());
object = HeapObject::FromAddress(addr);
size = object->SizeFromMap(map);
}
// We found a live object.
- if (object != nullptr) {
+ if (!object.is_null()) {
// Do not use IsFiller() here. This may cause a data race for reading
// out the instance type when a new map concurrently is written into
// this object while iterating over the object.
@@ -538,7 +619,7 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
// 2) Left trimming may leave black or grey fillers behind because we
// do not clear the old location of the object start.
// We filter these objects out in the iterator.
- object = nullptr;
+ object = HeapObject();
} else {
break;
}
@@ -551,13 +632,13 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
current_cell_ = *it_.CurrentCell();
}
}
- if (object != nullptr) {
+ if (!object.is_null()) {
current_object_ = object;
current_size_ = size;
return;
}
}
- current_object_ = nullptr;
+ current_object_ = HeapObject();
}
template <LiveObjectIterationMode mode>