1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
|
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_LOCAL_ALLOCATOR_INL_H_
#define V8_HEAP_LOCAL_ALLOCATOR_INL_H_
#include "src/heap/local-allocator.h"
#include "src/heap/spaces-inl.h"
namespace v8 {
namespace internal {
AllocationResult LocalAllocator::Allocate(AllocationSpace space,
int object_size,
AllocationAlignment alignment) {
switch (space) {
case NEW_SPACE:
return AllocateInNewSpace(object_size, alignment);
case OLD_SPACE:
return compaction_spaces_.Get(OLD_SPACE)->AllocateRaw(object_size,
alignment);
case CODE_SPACE:
return compaction_spaces_.Get(CODE_SPACE)
->AllocateRaw(object_size, alignment);
default:
UNREACHABLE();
}
}
void LocalAllocator::FreeLast(AllocationSpace space, HeapObject object,
int object_size) {
switch (space) {
case NEW_SPACE:
FreeLastInNewSpace(object, object_size);
return;
case OLD_SPACE:
FreeLastInOldSpace(object, object_size);
return;
default:
// Only new and old space supported.
UNREACHABLE();
}
}
void LocalAllocator::FreeLastInNewSpace(HeapObject object, int object_size) {
if (!new_space_lab_.TryFreeLast(object, object_size)) {
// We couldn't free the last object so we have to write a proper filler.
heap_->CreateFillerObjectAt(object.address(), object_size,
ClearRecordedSlots::kNo);
}
}
void LocalAllocator::FreeLastInOldSpace(HeapObject object, int object_size) {
if (!compaction_spaces_.Get(OLD_SPACE)->TryFreeLast(object, object_size)) {
// We couldn't free the last object so we have to write a proper filler.
heap_->CreateFillerObjectAt(object.address(), object_size,
ClearRecordedSlots::kNo);
}
}
AllocationResult LocalAllocator::AllocateInLAB(int object_size,
AllocationAlignment alignment) {
AllocationResult allocation;
if (!new_space_lab_.IsValid() && !NewLocalAllocationBuffer()) {
return AllocationResult::Retry(OLD_SPACE);
}
allocation = new_space_lab_.AllocateRawAligned(object_size, alignment);
if (allocation.IsRetry()) {
if (!NewLocalAllocationBuffer()) {
return AllocationResult::Retry(OLD_SPACE);
} else {
allocation = new_space_lab_.AllocateRawAligned(object_size, alignment);
CHECK(!allocation.IsRetry());
}
}
return allocation;
}
bool LocalAllocator::NewLocalAllocationBuffer() {
if (lab_allocation_will_fail_) return false;
LocalAllocationBuffer saved_lab_ = new_space_lab_;
AllocationResult result =
new_space_->AllocateRawSynchronized(kLabSize, kWordAligned);
new_space_lab_ = LocalAllocationBuffer::FromResult(heap_, result, kLabSize);
if (new_space_lab_.IsValid()) {
new_space_lab_.TryMerge(&saved_lab_);
return true;
}
new_space_lab_ = saved_lab_;
lab_allocation_will_fail_ = true;
return false;
}
AllocationResult LocalAllocator::AllocateInNewSpace(
int object_size, AllocationAlignment alignment) {
if (object_size > kMaxLabObjectSize) {
return new_space_->AllocateRawSynchronized(object_size, alignment);
}
return AllocateInLAB(object_size, alignment);
}
} // namespace internal
} // namespace v8
#endif // V8_HEAP_LOCAL_ALLOCATOR_INL_H_
|