diff options
Diffstat (limited to 'deps/v8/src/heap/cppgc/compactor.cc')
-rw-r--r-- | deps/v8/src/heap/cppgc/compactor.cc | 21 |
1 files changed, 11 insertions, 10 deletions
diff --git a/deps/v8/src/heap/cppgc/compactor.cc b/deps/v8/src/heap/cppgc/compactor.cc index f4498e7fbc..5f68705014 100644 --- a/deps/v8/src/heap/cppgc/compactor.cc +++ b/deps/v8/src/heap/cppgc/compactor.cc @@ -15,6 +15,7 @@ #include "src/heap/cppgc/heap-base.h" #include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-space.h" +#include "src/heap/cppgc/memory.h" #include "src/heap/cppgc/object-poisoner.h" #include "src/heap/cppgc/raw-heap.h" #include "src/heap/cppgc/stats-collector.h" @@ -129,7 +130,7 @@ void MovableReferences::AddOrFilter(MovableReference* slot) { interior_movable_references_.find(slot)); interior_movable_references_.emplace(slot, nullptr); #if DEBUG - interior_slot_to_object_.emplace(slot, slot_header.Payload()); + interior_slot_to_object_.emplace(slot, slot_header.ObjectStart()); #endif // DEBUG } @@ -144,8 +145,8 @@ void MovableReferences::Relocate(Address from, Address to) { // find the corresponding slot A.x. Object A may be moved already and the // memory may have been freed, which would result in a crash. if (!interior_movable_references_.empty()) { - const HeapObjectHeader& header = HeapObjectHeader::FromPayload(to); - const size_t size = header.GetSize() - sizeof(HeapObjectHeader); + const HeapObjectHeader& header = HeapObjectHeader::FromObject(to); + const size_t size = header.ObjectSize(); RelocateInteriorReferences(from, to, size); } @@ -275,14 +276,14 @@ class CompactionState final { // Return remaining available pages to the free page pool, decommitting // them from the pagefile. for (NormalPage* page : available_pages_) { - SET_MEMORY_INACCESSIBLE(page->PayloadStart(), page->PayloadSize()); + SetMemoryInaccessible(page->PayloadStart(), page->PayloadSize()); NormalPage::Destroy(page); } } void FinishCompactingPage(NormalPage* page) { -#if DEBUG || defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER) || \ - defined(MEMORY_SANITIZER) +#if DEBUG || defined(V8_USE_MEMORY_SANITIZER) || \ + defined(V8_USE_ADDRESS_SANITIZER) // Zap the unused portion, until it is either compacted into or freed. if (current_page_ != page) { ZapMemory(page->PayloadStart(), page->PayloadSize()); @@ -303,7 +304,7 @@ class CompactionState final { current_page_->PayloadSize() - used_bytes_in_current_page_; Address payload = current_page_->PayloadStart(); Address free_start = payload + used_bytes_in_current_page_; - SET_MEMORY_INACCESSIBLE(free_start, freed_size); + SetMemoryInaccessible(free_start, freed_size); space_->free_list().Add({free_start, freed_size}); current_page_->object_start_bitmap().SetBit(free_start); } @@ -329,7 +330,7 @@ void CompactPage(NormalPage* page, CompactionState& compaction_state) { header_address < page->PayloadEnd();) { HeapObjectHeader* header = reinterpret_cast<HeapObjectHeader*>(header_address); - size_t size = header->GetSize(); + size_t size = header->AllocatedSize(); DCHECK_GT(size, 0u); DCHECK_LT(size, kPageSize); @@ -349,8 +350,8 @@ void CompactPage(NormalPage* page, CompactionState& compaction_state) { // As compaction is under way, leave the freed memory accessible // while compacting the rest of the page. We just zap the payload // to catch out other finalizers trying to access it. -#if DEBUG || defined(LEAK_SANITIZER) || defined(ADDRESS_SANITIZER) || \ - defined(MEMORY_SANITIZER) +#if DEBUG || defined(V8_USE_MEMORY_SANITIZER) || \ + defined(V8_USE_ADDRESS_SANITIZER) ZapMemory(header, size); #endif header_address += size; |