summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/paged-spaces.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/paged-spaces.cc')
-rw-r--r--deps/v8/src/heap/paged-spaces.cc48
1 files changed, 30 insertions, 18 deletions
diff --git a/deps/v8/src/heap/paged-spaces.cc b/deps/v8/src/heap/paged-spaces.cc
index 8543f109ed..021cf940d7 100644
--- a/deps/v8/src/heap/paged-spaces.cc
+++ b/deps/v8/src/heap/paged-spaces.cc
@@ -15,7 +15,7 @@
#include "src/heap/memory-chunk-inl.h"
#include "src/heap/paged-spaces-inl.h"
#include "src/heap/read-only-heap.h"
-#include "src/logging/counters.h"
+#include "src/logging/runtime-call-stats-scope.h"
#include "src/objects/string.h"
#include "src/utils/utils.h"
@@ -81,10 +81,10 @@ Page* PagedSpace::InitializePage(MemoryChunk* chunk) {
PagedSpace::PagedSpace(Heap* heap, AllocationSpace space,
Executability executable, FreeList* free_list,
- LocalSpaceKind local_space_kind)
+ CompactionSpaceKind compaction_space_kind)
: SpaceWithLinearArea(heap, space, free_list),
executable_(executable),
- local_space_kind_(local_space_kind) {
+ compaction_space_kind_(compaction_space_kind) {
area_size_ = MemoryChunkLayout::AllocatableMemoryInMemoryChunk(space);
accounting_stats_.Clear();
}
@@ -105,7 +105,6 @@ void PagedSpace::RefillFreeList() {
identity() != MAP_SPACE) {
return;
}
- DCHECK_IMPLIES(is_local_space(), is_compaction_space());
MarkCompactCollector* collector = heap()->mark_compact_collector();
size_t added = 0;
@@ -123,7 +122,8 @@ void PagedSpace::RefillFreeList() {
// Also merge old-to-new remembered sets if not scavenging because of
// data races: One thread might iterate remembered set, while another
// thread merges them.
- if (local_space_kind() != LocalSpaceKind::kCompactionSpaceForScavenge) {
+ if (compaction_space_kind() !=
+ CompactionSpaceKind::kCompactionSpaceForScavenge) {
p->MergeOldToNewRememberedSets();
}
@@ -150,7 +150,7 @@ void PagedSpace::RefillFreeList() {
}
}
-void PagedSpace::MergeLocalSpace(LocalSpace* other) {
+void PagedSpace::MergeCompactionSpace(CompactionSpace* other) {
base::MutexGuard guard(mutex());
DCHECK(identity() == other->identity());
@@ -275,9 +275,12 @@ void PagedSpace::SetTopAndLimit(Address top, Address limit) {
Page::FromAddress(top) == Page::FromAddress(limit - 1));
BasicMemoryChunk::UpdateHighWaterMark(allocation_info_.top());
allocation_info_.Reset(top, limit);
- // The order of the following two stores is important.
- original_limit_.store(limit, std::memory_order_relaxed);
- original_top_.store(top, std::memory_order_release);
+
+ base::Optional<base::SharedMutexGuard<base::kExclusive>> optional_guard;
+ if (!is_compaction_space())
+ optional_guard.emplace(&pending_allocation_mutex_);
+ original_limit_ = limit;
+ original_top_ = top;
}
size_t PagedSpace::ShrinkPageToHighWaterMark(Page* page) {
@@ -413,6 +416,15 @@ size_t PagedSpace::Available() {
return free_list_->Available();
}
+namespace {
+
+UnprotectMemoryOrigin GetUnprotectMemoryOrigin(bool is_compaction_space) {
+ return is_compaction_space ? UnprotectMemoryOrigin::kMaybeOffMainThread
+ : UnprotectMemoryOrigin::kMainThread;
+}
+
+} // namespace
+
void PagedSpace::FreeLinearAllocationArea() {
// Mark the old linear allocation area with a free space map so it can be
// skipped when scanning the heap.
@@ -438,7 +450,8 @@ void PagedSpace::FreeLinearAllocationArea() {
// because we are going to write a filler into that memory area below.
if (identity() == CODE_SPACE) {
heap()->UnprotectAndRegisterMemoryChunk(
- MemoryChunk::FromAddress(current_top));
+ MemoryChunk::FromAddress(current_top),
+ GetUnprotectMemoryOrigin(is_compaction_space()));
}
DCHECK_IMPLIES(current_limit - current_top >= 2 * kTaggedSize,
@@ -540,7 +553,8 @@ bool PagedSpace::TryAllocationFromFreeListMain(size_t size_in_bytes,
DCHECK_LE(size_in_bytes, limit - start);
if (limit != end) {
if (identity() == CODE_SPACE) {
- heap()->UnprotectAndRegisterMemoryChunk(page);
+ heap()->UnprotectAndRegisterMemoryChunk(
+ page, GetUnprotectMemoryOrigin(is_compaction_space()));
}
Free(limit, end - limit, SpaceAccountingMode::kSpaceAccounted);
}
@@ -552,7 +566,8 @@ bool PagedSpace::TryAllocationFromFreeListMain(size_t size_in_bytes,
base::Optional<std::pair<Address, size_t>> PagedSpace::RawRefillLabBackground(
LocalHeap* local_heap, size_t min_size_in_bytes, size_t max_size_in_bytes,
AllocationAlignment alignment, AllocationOrigin origin) {
- DCHECK(!is_local_space() && identity() == OLD_SPACE);
+ DCHECK(!is_compaction_space());
+ DCHECK(identity() == OLD_SPACE || identity() == MAP_SPACE);
DCHECK_EQ(origin, AllocationOrigin::kRuntime);
auto result = TryAllocationFromFreeListBackground(
@@ -622,7 +637,7 @@ PagedSpace::TryAllocationFromFreeListBackground(LocalHeap* local_heap,
AllocationOrigin origin) {
base::MutexGuard lock(&space_mutex_);
DCHECK_LE(min_size_in_bytes, max_size_in_bytes);
- DCHECK_EQ(identity(), OLD_SPACE);
+ DCHECK(identity() == OLD_SPACE || identity() == MAP_SPACE);
size_t new_node_size = 0;
FreeSpace new_node =
@@ -837,7 +852,7 @@ bool PagedSpace::RefillLabMain(int size_in_bytes, AllocationOrigin origin) {
return RawRefillLabMain(size_in_bytes, origin);
}
-Page* LocalSpace::Expand() {
+Page* CompactionSpace::Expand() {
Page* page = PagedSpace::Expand();
new_pages_.push_back(page);
return page;
@@ -861,9 +876,6 @@ bool PagedSpace::TryExpand(int size_in_bytes, AllocationOrigin origin) {
}
bool PagedSpace::RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) {
- // Non-compaction local spaces are not supported.
- DCHECK_IMPLIES(is_local_space(), is_compaction_space());
-
// Allocation in this space has failed.
DCHECK_GE(size_in_bytes, 0);
const int kMaxPagesToSweep = 1;
@@ -943,7 +955,7 @@ bool PagedSpace::ContributeToSweepingMain(int required_freed_bytes,
AllocationResult PagedSpace::AllocateRawSlow(int size_in_bytes,
AllocationAlignment alignment,
AllocationOrigin origin) {
- if (!is_local_space()) {
+ if (!is_compaction_space()) {
// Start incremental marking before the actual allocation, this allows the
// allocation function to mark the object black when incremental marking is
// running.