diff options
author | Michaël Zasso <targos@protonmail.com> | 2017-12-05 16:41:55 +0100 |
---|---|---|
committer | Michaël Zasso <targos@protonmail.com> | 2017-12-06 12:52:07 +0100 |
commit | 1854ba04e9a68f062beb299dd6e1479279b26363 (patch) | |
tree | d5b2df9b8c1deb6388f7a728fca8e1c98c779abe /deps/v8/src/heap/heap.cc | |
parent | b52c23b75f96e1c9d2c7b3a7e5619170d0a0d8e1 (diff) | |
download | node-new-1854ba04e9a68f062beb299dd6e1479279b26363.tar.gz |
deps: update V8 to 6.3.292.46
PR-URL: https://github.com/nodejs/node/pull/16271
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Reviewed-By: Myles Borins <myles.borins@gmail.com>
Diffstat (limited to 'deps/v8/src/heap/heap.cc')
-rw-r--r-- | deps/v8/src/heap/heap.cc | 903 |
1 files changed, 208 insertions, 695 deletions
diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index b13ec784f5..458c6c7e09 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -24,6 +24,7 @@ #include "src/feedback-vector.h" #include "src/global-handles.h" #include "src/heap/array-buffer-tracker-inl.h" +#include "src/heap/barrier.h" #include "src/heap/code-stats.h" #include "src/heap/concurrent-marking.h" #include "src/heap/embedder-tracing.h" @@ -50,6 +51,8 @@ #include "src/snapshot/serializer-common.h" #include "src/snapshot/snapshot.h" #include "src/tracing/trace-event.h" +#include "src/trap-handler/trap-handler.h" +#include "src/unicode-inl.h" #include "src/utils-inl.h" #include "src/utils.h" #include "src/v8.h" @@ -58,6 +61,54 @@ namespace v8 { namespace internal { +void Heap::SetArgumentsAdaptorDeoptPCOffset(int pc_offset) { + DCHECK_EQ(Smi::kZero, arguments_adaptor_deopt_pc_offset()); + set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset)); +} + +void Heap::SetConstructStubCreateDeoptPCOffset(int pc_offset) { + // TODO(tebbi): Remove second half of DCHECK once + // FLAG_harmony_restrict_constructor_return is gone. + DCHECK(construct_stub_create_deopt_pc_offset() == Smi::kZero || + construct_stub_create_deopt_pc_offset() == Smi::FromInt(pc_offset)); + set_construct_stub_create_deopt_pc_offset(Smi::FromInt(pc_offset)); +} + +void Heap::SetConstructStubInvokeDeoptPCOffset(int pc_offset) { + // TODO(tebbi): Remove second half of DCHECK once + // FLAG_harmony_restrict_constructor_return is gone. + DCHECK(construct_stub_invoke_deopt_pc_offset() == Smi::kZero || + construct_stub_invoke_deopt_pc_offset() == Smi::FromInt(pc_offset)); + set_construct_stub_invoke_deopt_pc_offset(Smi::FromInt(pc_offset)); +} + +void Heap::SetGetterStubDeoptPCOffset(int pc_offset) { + DCHECK_EQ(Smi::kZero, getter_stub_deopt_pc_offset()); + set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset)); +} + +void Heap::SetSetterStubDeoptPCOffset(int pc_offset) { + DCHECK_EQ(Smi::kZero, setter_stub_deopt_pc_offset()); + set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset)); +} + +void Heap::SetInterpreterEntryReturnPCOffset(int pc_offset) { + DCHECK_EQ(Smi::kZero, interpreter_entry_return_pc_offset()); + set_interpreter_entry_return_pc_offset(Smi::FromInt(pc_offset)); +} + +void Heap::SetSerializedTemplates(FixedArray* templates) { + DCHECK_EQ(empty_fixed_array(), serialized_templates()); + DCHECK(isolate()->serializer_enabled()); + set_serialized_templates(templates); +} + +void Heap::SetSerializedGlobalProxySizes(FixedArray* sizes) { + DCHECK_EQ(empty_fixed_array(), serialized_global_proxy_sizes()); + DCHECK(isolate()->serializer_enabled()); + set_serialized_global_proxy_sizes(sizes); +} + bool Heap::GCCallbackTuple::operator==( const Heap::GCCallbackTuple& other) const { return other.callback == callback && other.data == data; @@ -183,7 +234,7 @@ Heap::Heap() delay_sweeper_tasks_for_testing_(false), pending_layout_change_object_(nullptr) { // Ensure old_generation_size_ is a multiple of kPageSize. - DCHECK((max_old_generation_size_ & (Page::kPageSize - 1)) == 0); + DCHECK_EQ(0, max_old_generation_size_ & (Page::kPageSize - 1)); memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); set_native_contexts_list(NULL); @@ -703,6 +754,14 @@ inline bool DigestPretenuringFeedback(Isolate* isolate, AllocationSite* site, } } // namespace +void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite* site) { + global_pretenuring_feedback_.erase(site); +} + +bool Heap::DeoptMaybeTenuredAllocationSites() { + return new_space_->IsAtMaximumCapacity() && maximum_size_scavenges_ == 0; +} + void Heap::ProcessPretenuringFeedback() { bool trigger_deoptimization = false; if (FLAG_allocation_site_pretenuring) { @@ -832,18 +891,6 @@ void Heap::GarbageCollectionEpilogue() { isolate_->counters()->external_fragmentation_total()->AddSample( static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory())); - isolate_->counters()->heap_fraction_new_space()->AddSample(static_cast<int>( - (new_space()->CommittedMemory() * 100.0) / CommittedMemory())); - isolate_->counters()->heap_fraction_old_space()->AddSample(static_cast<int>( - (old_space()->CommittedMemory() * 100.0) / CommittedMemory())); - isolate_->counters()->heap_fraction_code_space()->AddSample( - static_cast<int>((code_space()->CommittedMemory() * 100.0) / - CommittedMemory())); - isolate_->counters()->heap_fraction_map_space()->AddSample(static_cast<int>( - (map_space()->CommittedMemory() * 100.0) / CommittedMemory())); - isolate_->counters()->heap_fraction_lo_space()->AddSample(static_cast<int>( - (lo_space()->CommittedMemory() * 100.0) / CommittedMemory())); - isolate_->counters()->heap_sample_total_committed()->AddSample( static_cast<int>(CommittedMemory() / KB)); isolate_->counters()->heap_sample_total_used()->AddSample( @@ -888,9 +935,6 @@ void Heap::GarbageCollectionEpilogue() { ReportStatisticsAfterGC(); #endif // DEBUG - // Remember the last top pointer so that we can later find out - // whether we allocated in new space since the last GC. - new_space_top_after_last_gc_ = new_space()->top(); last_gc_time_ = MonotonicallyIncreasingTimeInMs(); { @@ -1335,8 +1379,11 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) { if (space == MAP_SPACE) { // We allocate each map individually to avoid fragmentation. maps->clear(); - DCHECK_EQ(1, reservation->size()); - int num_maps = reservation->at(0).size / Map::kSize; + DCHECK_LE(reservation->size(), 2); + int reserved_size = 0; + for (const Chunk& c : *reservation) reserved_size += c.size; + DCHECK_EQ(0, reserved_size % Map::kSize); + int num_maps = reserved_size / Map::kSize; for (int i = 0; i < num_maps; i++) { // The deserializer will update the skip list. AllocationResult allocation = map_space()->AllocateRawUnaligned( @@ -1356,8 +1403,10 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) { } } else if (space == LO_SPACE) { // Just check that we can allocate during deserialization. - DCHECK_EQ(1, reservation->size()); - perform_gc = !CanExpandOldGeneration(reservation->at(0).size); + DCHECK_LE(reservation->size(), 2); + int reserved_size = 0; + for (const Chunk& c : *reservation) reserved_size += c.size; + perform_gc = !CanExpandOldGeneration(reserved_size); } else { for (auto& chunk : *reservation) { AllocationResult allocation; @@ -1379,7 +1428,8 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) { Address free_space_address = free_space->address(); CreateFillerObjectAt(free_space_address, size, ClearRecordedSlots::kNo); - DCHECK(space < SerializerDeserializer::kNumberOfPreallocatedSpaces); + DCHECK_GT(SerializerDeserializer::kNumberOfPreallocatedSpaces, + space); chunk.start = free_space_address; chunk.end = free_space_address + size; } else { @@ -1667,8 +1717,6 @@ void Heap::MarkCompactEpilogue() { PreprocessStackTraces(); DCHECK(incremental_marking()->IsStopped()); - - mark_compact_collector()->marking_worklist()->StopUsing(); } @@ -1787,7 +1835,7 @@ class ScavengingItem : public ItemParallelJob::Item { class ScavengingTask final : public ItemParallelJob::Task { public: - ScavengingTask(Heap* heap, Scavenger* scavenger, Scavenger::Barrier* barrier) + ScavengingTask(Heap* heap, Scavenger* scavenger, OneshotBarrier* barrier) : ItemParallelJob::Task(heap->isolate()), heap_(heap), scavenger_(scavenger), @@ -1803,10 +1851,9 @@ class ScavengingTask final : public ItemParallelJob::Task { item->Process(scavenger_); item->MarkFinished(); } - while (!barrier_->Done()) { + do { scavenger_->Process(barrier_); - barrier_->Wait(); - } + } while (!barrier_->Wait()); scavenger_->Process(); } if (FLAG_trace_parallel_scavenge) { @@ -1820,7 +1867,7 @@ class ScavengingTask final : public ItemParallelJob::Task { private: Heap* const heap_; Scavenger* const scavenger_; - Scavenger::Barrier* const barrier_; + OneshotBarrier* const barrier_; }; class PageScavengingItem final : public ScavengingItem { @@ -1909,9 +1956,9 @@ void Heap::Scavenge() { Scavenger* scavengers[kMaxScavengerTasks]; const bool is_logging = IsLogging(isolate()); const int num_scavenge_tasks = NumberOfScavengeTasks(); - Scavenger::Barrier barrier; - CopiedList copied_list(num_scavenge_tasks); - PromotionList promotion_list(num_scavenge_tasks); + OneshotBarrier barrier; + Scavenger::CopiedList copied_list(num_scavenge_tasks); + Scavenger::PromotionList promotion_list(num_scavenge_tasks); for (int i = 0; i < num_scavenge_tasks; i++) { scavengers[i] = new Scavenger(this, is_logging, &copied_list, &promotion_list, i); @@ -2044,6 +2091,21 @@ String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, return string->IsExternalString() ? string : nullptr; } +void Heap::ExternalStringTable::Verify() { +#ifdef DEBUG + for (size_t i = 0; i < new_space_strings_.size(); ++i) { + Object* obj = Object::cast(new_space_strings_[i]); + DCHECK(heap_->InNewSpace(obj)); + DCHECK(!obj->IsTheHole(heap_->isolate())); + } + for (size_t i = 0; i < old_space_strings_.size(); ++i) { + Object* obj = Object::cast(old_space_strings_[i]); + DCHECK(!heap_->InNewSpace(obj)); + DCHECK(!obj->IsTheHole(heap_->isolate())); + } +#endif +} + void Heap::ExternalStringTable::UpdateNewSpaceReferences( Heap::ExternalStringTableUpdaterCallback updater_func) { if (new_space_strings_.empty()) return; @@ -2060,12 +2122,12 @@ void Heap::ExternalStringTable::UpdateNewSpaceReferences( DCHECK(target->IsExternalString()); if (heap_->InNewSpace(target)) { - // String is still in new space. Update the table entry. + // String is still in new space. Update the table entry. *last = target; ++last; } else { - // String got promoted. Move it to the old string list. - AddOldString(target); + // String got promoted. Move it to the old string list. + old_space_strings_.push_back(target); } } @@ -2078,6 +2140,29 @@ void Heap::ExternalStringTable::UpdateNewSpaceReferences( #endif } +void Heap::ExternalStringTable::PromoteAllNewSpaceStrings() { + old_space_strings_.reserve(old_space_strings_.size() + + new_space_strings_.size()); + std::move(std::begin(new_space_strings_), std::end(new_space_strings_), + std::back_inserter(old_space_strings_)); + new_space_strings_.clear(); +} + +void Heap::ExternalStringTable::IterateNewSpaceStrings(RootVisitor* v) { + if (!new_space_strings_.empty()) { + v->VisitRootPointers(Root::kExternalStringsTable, new_space_strings_.data(), + new_space_strings_.data() + new_space_strings_.size()); + } +} + +void Heap::ExternalStringTable::IterateAll(RootVisitor* v) { + IterateNewSpaceStrings(v); + if (!old_space_strings_.empty()) { + v->VisitRootPointers(Root::kExternalStringsTable, old_space_strings_.data(), + old_space_strings_.data() + old_space_strings_.size()); + } +} + void Heap::UpdateNewSpaceReferencesInExternalStringTable( ExternalStringTableUpdaterCallback updater_func) { external_string_table_.UpdateNewSpaceReferences(updater_func); @@ -2238,7 +2323,7 @@ HeapObject* Heap::AlignWithFiller(HeapObject* object, int object_size, int allocation_size, AllocationAlignment alignment) { int filler_size = allocation_size - object_size; - DCHECK(filler_size > 0); + DCHECK_LT(0, filler_size); int pre_filler = GetFillToAlign(object->address(), alignment); if (pre_filler) { object = PrecedeWithFiller(object, pre_filler); @@ -2296,6 +2381,7 @@ AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, Map::ConstructionCounter::encode(Map::kNoSlackTracking); map->set_bit_field3(bit_field3); map->set_weak_cell_cache(Smi::kZero); + map->set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND); return map; } @@ -2303,6 +2389,11 @@ AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, AllocationResult Heap::AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind) { + STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE); + DCHECK_IMPLIES(instance_type >= FIRST_JS_OBJECT_TYPE && + !Map::CanHaveFastTransitionableElementsKind(instance_type), + IsDictionaryElementsKind(elements_kind) || + IsTerminalElementsKind(elements_kind)); HeapObject* result = nullptr; AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE); if (!allocation.To(&result)) return allocation; @@ -2316,7 +2407,6 @@ AllocationResult Heap::AllocateMap(InstanceType instance_type, map->set_instance_size(instance_size); map->clear_unused(); map->set_inobject_properties_or_constructor_function_index(0); - map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER); map->set_dependent_code(DependentCode::cast(empty_fixed_array()), SKIP_WRITE_BARRIER); map->set_weak_cell_cache(Smi::kZero); @@ -2359,307 +2449,6 @@ AllocationResult Heap::AllocateFillerObject(int size, bool double_align, } -const Heap::StringTypeTable Heap::string_type_table[] = { -#define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ - { type, size, k##camel_name##MapRootIndex } \ - , - STRING_TYPE_LIST(STRING_TYPE_ELEMENT) -#undef STRING_TYPE_ELEMENT -}; - - -const Heap::ConstantStringTable Heap::constant_string_table[] = { - {"", kempty_stringRootIndex}, -#define CONSTANT_STRING_ELEMENT(name, contents) \ - { contents, k##name##RootIndex } \ - , - INTERNALIZED_STRING_LIST(CONSTANT_STRING_ELEMENT) -#undef CONSTANT_STRING_ELEMENT -}; - - -const Heap::StructTable Heap::struct_table[] = { -#define STRUCT_TABLE_ELEMENT(NAME, Name, name) \ - { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex } \ - , - STRUCT_LIST(STRUCT_TABLE_ELEMENT) -#undef STRUCT_TABLE_ELEMENT -}; - -namespace { - -void FinalizePartialMap(Heap* heap, Map* map) { - map->set_code_cache(heap->empty_fixed_array()); - map->set_dependent_code(DependentCode::cast(heap->empty_fixed_array())); - map->set_raw_transitions(Smi::kZero); - map->set_instance_descriptors(heap->empty_descriptor_array()); - if (FLAG_unbox_double_fields) { - map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout()); - } - map->set_prototype(heap->null_value()); - map->set_constructor_or_backpointer(heap->null_value()); -} - -} // namespace - -bool Heap::CreateInitialMaps() { - HeapObject* obj = nullptr; - { - AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize); - if (!allocation.To(&obj)) return false; - } - // Map::cast cannot be used due to uninitialized map field. - Map* new_meta_map = reinterpret_cast<Map*>(obj); - set_meta_map(new_meta_map); - new_meta_map->set_map_after_allocation(new_meta_map); - - { // Partial map allocation -#define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \ - { \ - Map* map; \ - if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \ - set_##field_name##_map(map); \ - } - - ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array); - fixed_array_map()->set_elements_kind(HOLEY_ELEMENTS); - ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined); - ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null); - ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, the_hole); - -#undef ALLOCATE_PARTIAL_MAP - } - - // Allocate the empty array. - { - AllocationResult allocation = AllocateEmptyFixedArray(); - if (!allocation.To(&obj)) return false; - } - set_empty_fixed_array(FixedArray::cast(obj)); - - { - AllocationResult allocation = Allocate(null_map(), OLD_SPACE); - if (!allocation.To(&obj)) return false; - } - set_null_value(Oddball::cast(obj)); - Oddball::cast(obj)->set_kind(Oddball::kNull); - - { - AllocationResult allocation = Allocate(undefined_map(), OLD_SPACE); - if (!allocation.To(&obj)) return false; - } - set_undefined_value(Oddball::cast(obj)); - Oddball::cast(obj)->set_kind(Oddball::kUndefined); - DCHECK(!InNewSpace(undefined_value())); - { - AllocationResult allocation = Allocate(the_hole_map(), OLD_SPACE); - if (!allocation.To(&obj)) return false; - } - set_the_hole_value(Oddball::cast(obj)); - Oddball::cast(obj)->set_kind(Oddball::kTheHole); - - // Set preliminary exception sentinel value before actually initializing it. - set_exception(null_value()); - - // Allocate the empty descriptor array. - { - AllocationResult allocation = AllocateEmptyFixedArray(); - if (!allocation.To(&obj)) return false; - } - set_empty_descriptor_array(DescriptorArray::cast(obj)); - - // Fix the instance_descriptors for the existing maps. - FinalizePartialMap(this, meta_map()); - FinalizePartialMap(this, fixed_array_map()); - FinalizePartialMap(this, undefined_map()); - undefined_map()->set_is_undetectable(); - FinalizePartialMap(this, null_map()); - null_map()->set_is_undetectable(); - FinalizePartialMap(this, the_hole_map()); - - { // Map allocation -#define ALLOCATE_MAP(instance_type, size, field_name) \ - { \ - Map* map; \ - if (!AllocateMap((instance_type), size).To(&map)) return false; \ - set_##field_name##_map(map); \ - } - -#define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \ - ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name) - -#define ALLOCATE_PRIMITIVE_MAP(instance_type, size, field_name, \ - constructor_function_index) \ - { \ - ALLOCATE_MAP((instance_type), (size), field_name); \ - field_name##_map()->SetConstructorFunctionIndex( \ - (constructor_function_index)); \ - } - - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, fixed_cow_array) - fixed_cow_array_map()->set_elements_kind(HOLEY_ELEMENTS); - DCHECK_NE(fixed_array_map(), fixed_cow_array_map()); - - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, scope_info) - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_info) - ALLOCATE_VARSIZE_MAP(FEEDBACK_VECTOR_TYPE, feedback_vector) - ALLOCATE_PRIMITIVE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number, - Context::NUMBER_FUNCTION_INDEX) - ALLOCATE_MAP(MUTABLE_HEAP_NUMBER_TYPE, HeapNumber::kSize, - mutable_heap_number) - ALLOCATE_PRIMITIVE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol, - Context::SYMBOL_FUNCTION_INDEX) - ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign) - - ALLOCATE_PRIMITIVE_MAP(ODDBALL_TYPE, Oddball::kSize, boolean, - Context::BOOLEAN_FUNCTION_INDEX); - ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, uninitialized); - ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, arguments_marker); - ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, exception); - ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, termination_exception); - ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, optimized_out); - ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, stale_register); - - ALLOCATE_MAP(JS_PROMISE_CAPABILITY_TYPE, JSPromiseCapability::kSize, - js_promise_capability); - - for (unsigned i = 0; i < arraysize(string_type_table); i++) { - const StringTypeTable& entry = string_type_table[i]; - { - AllocationResult allocation = AllocateMap(entry.type, entry.size); - if (!allocation.To(&obj)) return false; - } - Map* map = Map::cast(obj); - map->SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX); - // Mark cons string maps as unstable, because their objects can change - // maps during GC. - if (StringShape(entry.type).IsCons()) map->mark_unstable(); - roots_[entry.index] = map; - } - - { // Create a separate external one byte string map for native sources. - AllocationResult allocation = - AllocateMap(SHORT_EXTERNAL_ONE_BYTE_STRING_TYPE, - ExternalOneByteString::kShortSize); - if (!allocation.To(&obj)) return false; - Map* map = Map::cast(obj); - map->SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX); - set_native_source_string_map(map); - } - - ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array) - fixed_double_array_map()->set_elements_kind(HOLEY_DOUBLE_ELEMENTS); - ALLOCATE_VARSIZE_MAP(BYTE_ARRAY_TYPE, byte_array) - ALLOCATE_VARSIZE_MAP(BYTECODE_ARRAY_TYPE, bytecode_array) - ALLOCATE_VARSIZE_MAP(FREE_SPACE_TYPE, free_space) - ALLOCATE_VARSIZE_MAP(PROPERTY_ARRAY_TYPE, property_array) - ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_HASH_MAP_TYPE, small_ordered_hash_map) - ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_HASH_SET_TYPE, small_ordered_hash_set) - -#define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size) \ - ALLOCATE_VARSIZE_MAP(FIXED_##TYPE##_ARRAY_TYPE, fixed_##type##_array) - - TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP) -#undef ALLOCATE_FIXED_TYPED_ARRAY_MAP - - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, sloppy_arguments_elements) - - ALLOCATE_VARSIZE_MAP(CODE_TYPE, code) - - ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell) - ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell) - ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell) - ALLOCATE_MAP(CELL_TYPE, Cell::kSize, no_closures_cell) - ALLOCATE_MAP(CELL_TYPE, Cell::kSize, one_closure_cell) - ALLOCATE_MAP(CELL_TYPE, Cell::kSize, many_closures_cell) - ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler) - ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler) - - ALLOCATE_VARSIZE_MAP(TRANSITION_ARRAY_TYPE, transition_array) - - for (unsigned i = 0; i < arraysize(struct_table); i++) { - const StructTable& entry = struct_table[i]; - Map* map; - if (!AllocateMap(entry.type, entry.size).To(&map)) return false; - roots_[entry.index] = map; - } - - ALLOCATE_VARSIZE_MAP(HASH_TABLE_TYPE, hash_table) - ALLOCATE_VARSIZE_MAP(HASH_TABLE_TYPE, ordered_hash_table) - ALLOCATE_VARSIZE_MAP(HASH_TABLE_TYPE, unseeded_number_dictionary) - - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, function_context) - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, catch_context) - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, with_context) - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, debug_evaluate_context) - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, block_context) - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_context) - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, eval_context) - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, script_context) - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, script_context_table) - - ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, native_context) - native_context_map()->set_visitor_id(kVisitNativeContext); - - ALLOCATE_MAP(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kAlignedSize, - shared_function_info) - - ALLOCATE_MAP(JS_MESSAGE_OBJECT_TYPE, JSMessageObject::kSize, message_object) - ALLOCATE_MAP(JS_OBJECT_TYPE, JSObject::kHeaderSize + kPointerSize, external) - external_map()->set_is_extensible(false); -#undef ALLOCATE_PRIMITIVE_MAP -#undef ALLOCATE_VARSIZE_MAP -#undef ALLOCATE_MAP - } - - { - AllocationResult allocation = AllocateEmptyScopeInfo(); - if (!allocation.To(&obj)) return false; - } - - set_empty_scope_info(ScopeInfo::cast(obj)); - { - AllocationResult allocation = Allocate(boolean_map(), OLD_SPACE); - if (!allocation.To(&obj)) return false; - } - set_true_value(Oddball::cast(obj)); - Oddball::cast(obj)->set_kind(Oddball::kTrue); - - { - AllocationResult allocation = Allocate(boolean_map(), OLD_SPACE); - if (!allocation.To(&obj)) return false; - } - set_false_value(Oddball::cast(obj)); - Oddball::cast(obj)->set_kind(Oddball::kFalse); - - { // Empty arrays - { - ByteArray* byte_array; - if (!AllocateByteArray(0, TENURED).To(&byte_array)) return false; - set_empty_byte_array(byte_array); - } - - { - PropertyArray* property_array; - if (!AllocatePropertyArray(0, TENURED).To(&property_array)) return false; - set_empty_property_array(property_array); - } - -#define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \ - { \ - FixedTypedArrayBase* obj; \ - if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array).To(&obj)) \ - return false; \ - set_empty_fixed_##type##_array(obj); \ - } - - TYPED_ARRAYS(ALLOCATE_EMPTY_FIXED_TYPED_ARRAY) -#undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY - } - DCHECK(!InNewSpace(empty_fixed_array())); - return true; -} - AllocationResult Heap::AllocateHeapNumber(MutableMode mode, PretenureFlag pretenure) { // Statically ensure that it is safe to allocate heap numbers in paged @@ -2680,6 +2469,23 @@ AllocationResult Heap::AllocateHeapNumber(MutableMode mode, return result; } +AllocationResult Heap::AllocateBigInt(int length, bool zero_initialize, + PretenureFlag pretenure) { + if (length < 0 || length > BigInt::kMaxLength) { + v8::internal::Heap::FatalProcessOutOfMemory("invalid BigInt length", true); + } + int size = BigInt::SizeFor(length); + AllocationSpace space = SelectSpace(pretenure); + HeapObject* result = nullptr; + { + AllocationResult allocation = AllocateRaw(size, space); + if (!allocation.To(&result)) return allocation; + } + result->set_map_after_allocation(bigint_map(), SKIP_WRITE_BARRIER); + BigInt::cast(result)->Initialize(length, zero_initialize); + return result; +} + AllocationResult Heap::AllocateCell(Object* value) { int size = Cell::kSize; STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize); @@ -2730,7 +2536,7 @@ AllocationResult Heap::AllocateWeakCell(HeapObject* value) { AllocationResult Heap::AllocateTransitionArray(int capacity) { - DCHECK(capacity > 0); + DCHECK_LT(0, capacity); HeapObject* raw_array = nullptr; { AllocationResult allocation = AllocateRawFixedArray(capacity, TENURED); @@ -2749,21 +2555,6 @@ AllocationResult Heap::AllocateTransitionArray(int capacity) { return array; } -bool Heap::CreateApiObjects() { - HandleScope scope(isolate()); - set_message_listeners(*TemplateList::New(isolate(), 2)); - HeapObject* obj = nullptr; - { - AllocationResult allocation = AllocateStruct(INTERCEPTOR_INFO_TYPE); - if (!allocation.To(&obj)) return false; - } - InterceptorInfo* info = InterceptorInfo::cast(obj); - info->set_flags(0); - set_noop_interceptor_info(info); - return true; -} - - void Heap::CreateJSEntryStub() { JSEntryStub stub(isolate(), StackFrame::ENTRY); set_js_entry_code(*stub.GetCode()); @@ -2808,294 +2599,6 @@ void Heap::CreateFixedStubs() { Heap::CreateJSConstructEntryStub(); } - -void Heap::CreateInitialObjects() { - HandleScope scope(isolate()); - Factory* factory = isolate()->factory(); - - // The -0 value must be set before NewNumber works. - set_minus_zero_value(*factory->NewHeapNumber(-0.0, IMMUTABLE, TENURED)); - DCHECK(std::signbit(minus_zero_value()->Number()) != 0); - - set_nan_value(*factory->NewHeapNumber( - std::numeric_limits<double>::quiet_NaN(), IMMUTABLE, TENURED)); - set_hole_nan_value( - *factory->NewHeapNumberFromBits(kHoleNanInt64, IMMUTABLE, TENURED)); - set_infinity_value(*factory->NewHeapNumber(V8_INFINITY, IMMUTABLE, TENURED)); - set_minus_infinity_value( - *factory->NewHeapNumber(-V8_INFINITY, IMMUTABLE, TENURED)); - - // Allocate initial string table. - set_string_table(*StringTable::New(isolate(), kInitialStringTableSize)); - - // Allocate - - // Finish initializing oddballs after creating the string table. - Oddball::Initialize(isolate(), factory->undefined_value(), "undefined", - factory->nan_value(), "undefined", Oddball::kUndefined); - - // Initialize the null_value. - Oddball::Initialize(isolate(), factory->null_value(), "null", - handle(Smi::kZero, isolate()), "object", Oddball::kNull); - - // Initialize the_hole_value. - Oddball::Initialize(isolate(), factory->the_hole_value(), "hole", - factory->hole_nan_value(), "undefined", - Oddball::kTheHole); - - // Initialize the true_value. - Oddball::Initialize(isolate(), factory->true_value(), "true", - handle(Smi::FromInt(1), isolate()), "boolean", - Oddball::kTrue); - - // Initialize the false_value. - Oddball::Initialize(isolate(), factory->false_value(), "false", - handle(Smi::kZero, isolate()), "boolean", - Oddball::kFalse); - - set_uninitialized_value( - *factory->NewOddball(factory->uninitialized_map(), "uninitialized", - handle(Smi::FromInt(-1), isolate()), "undefined", - Oddball::kUninitialized)); - - set_arguments_marker( - *factory->NewOddball(factory->arguments_marker_map(), "arguments_marker", - handle(Smi::FromInt(-4), isolate()), "undefined", - Oddball::kArgumentsMarker)); - - set_termination_exception(*factory->NewOddball( - factory->termination_exception_map(), "termination_exception", - handle(Smi::FromInt(-3), isolate()), "undefined", Oddball::kOther)); - - set_exception(*factory->NewOddball(factory->exception_map(), "exception", - handle(Smi::FromInt(-5), isolate()), - "undefined", Oddball::kException)); - - set_optimized_out(*factory->NewOddball(factory->optimized_out_map(), - "optimized_out", - handle(Smi::FromInt(-6), isolate()), - "undefined", Oddball::kOptimizedOut)); - - set_stale_register( - *factory->NewOddball(factory->stale_register_map(), "stale_register", - handle(Smi::FromInt(-7), isolate()), "undefined", - Oddball::kStaleRegister)); - - for (unsigned i = 0; i < arraysize(constant_string_table); i++) { - Handle<String> str = - factory->InternalizeUtf8String(constant_string_table[i].contents); - roots_[constant_string_table[i].index] = *str; - } - - // Create the code_stubs dictionary. The initial size is set to avoid - // expanding the dictionary during bootstrapping. - set_code_stubs(*UnseededNumberDictionary::New(isolate(), 128)); - - { - HandleScope scope(isolate()); -#define SYMBOL_INIT(name) \ - { \ - Handle<String> name##d = factory->NewStringFromStaticChars(#name); \ - Handle<Symbol> symbol(isolate()->factory()->NewPrivateSymbol()); \ - symbol->set_name(*name##d); \ - roots_[k##name##RootIndex] = *symbol; \ - } - PRIVATE_SYMBOL_LIST(SYMBOL_INIT) -#undef SYMBOL_INIT - } - - { - HandleScope scope(isolate()); -#define SYMBOL_INIT(name, description) \ - Handle<Symbol> name = factory->NewSymbol(); \ - Handle<String> name##d = factory->NewStringFromStaticChars(#description); \ - name->set_name(*name##d); \ - roots_[k##name##RootIndex] = *name; - PUBLIC_SYMBOL_LIST(SYMBOL_INIT) -#undef SYMBOL_INIT - -#define SYMBOL_INIT(name, description) \ - Handle<Symbol> name = factory->NewSymbol(); \ - Handle<String> name##d = factory->NewStringFromStaticChars(#description); \ - name->set_is_well_known_symbol(true); \ - name->set_name(*name##d); \ - roots_[k##name##RootIndex] = *name; - WELL_KNOWN_SYMBOL_LIST(SYMBOL_INIT) -#undef SYMBOL_INIT - - // Mark "Interesting Symbols" appropriately. - to_string_tag_symbol->set_is_interesting_symbol(true); - } - - Handle<NameDictionary> empty_property_dictionary = - NameDictionary::New(isolate(), 1, TENURED, USE_CUSTOM_MINIMUM_CAPACITY); - DCHECK(!empty_property_dictionary->HasSufficientCapacityToAdd(1)); - set_empty_property_dictionary(*empty_property_dictionary); - - set_public_symbol_table(*empty_property_dictionary); - set_api_symbol_table(*empty_property_dictionary); - set_api_private_symbol_table(*empty_property_dictionary); - - set_number_string_cache( - *factory->NewFixedArray(kInitialNumberStringCacheSize * 2, TENURED)); - - // Allocate cache for single character one byte strings. - set_single_character_string_cache( - *factory->NewFixedArray(String::kMaxOneByteCharCode + 1, TENURED)); - - // Allocate cache for string split and regexp-multiple. - set_string_split_cache(*factory->NewFixedArray( - RegExpResultsCache::kRegExpResultsCacheSize, TENURED)); - set_regexp_multiple_cache(*factory->NewFixedArray( - RegExpResultsCache::kRegExpResultsCacheSize, TENURED)); - - set_undefined_cell(*factory->NewCell(factory->undefined_value())); - - // Microtask queue uses the empty fixed array as a sentinel for "empty". - // Number of queued microtasks stored in Isolate::pending_microtask_count(). - set_microtask_queue(empty_fixed_array()); - - { - Handle<FixedArray> empty_sloppy_arguments_elements = - factory->NewFixedArray(2, TENURED); - empty_sloppy_arguments_elements->set_map_after_allocation( - sloppy_arguments_elements_map(), SKIP_WRITE_BARRIER); - set_empty_sloppy_arguments_elements(*empty_sloppy_arguments_elements); - } - - { - Handle<WeakCell> cell = factory->NewWeakCell(factory->undefined_value()); - set_empty_weak_cell(*cell); - cell->clear(); - } - - set_detached_contexts(empty_fixed_array()); - set_retained_maps(ArrayList::cast(empty_fixed_array())); - set_retaining_path_targets(undefined_value()); - - set_weak_object_to_code_table(*WeakHashTable::New(isolate(), 16, TENURED)); - - set_weak_new_space_object_to_code_list( - ArrayList::cast(*(factory->NewFixedArray(16, TENURED)))); - weak_new_space_object_to_code_list()->SetLength(0); - - set_code_coverage_list(undefined_value()); - - set_script_list(Smi::kZero); - - Handle<SeededNumberDictionary> slow_element_dictionary = - SeededNumberDictionary::New(isolate(), 1, TENURED, - USE_CUSTOM_MINIMUM_CAPACITY); - DCHECK(!slow_element_dictionary->HasSufficientCapacityToAdd(1)); - slow_element_dictionary->set_requires_slow_elements(); - set_empty_slow_element_dictionary(*slow_element_dictionary); - - set_materialized_objects(*factory->NewFixedArray(0, TENURED)); - - // Handling of script id generation is in Heap::NextScriptId(). - set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId)); - set_next_template_serial_number(Smi::kZero); - - // Allocate the empty OrderedHashTable. - Handle<FixedArray> empty_ordered_hash_table = - factory->NewFixedArray(OrderedHashMap::kHashTableStartIndex, TENURED); - empty_ordered_hash_table->set_map_no_write_barrier( - *factory->ordered_hash_table_map()); - for (int i = 0; i < empty_ordered_hash_table->length(); ++i) { - empty_ordered_hash_table->set(i, Smi::kZero); - } - set_empty_ordered_hash_table(*empty_ordered_hash_table); - - // Allocate the empty script. - Handle<Script> script = factory->NewScript(factory->empty_string()); - script->set_type(Script::TYPE_NATIVE); - set_empty_script(*script); - - Handle<PropertyCell> cell = factory->NewPropertyCell(factory->empty_string()); - cell->set_value(Smi::FromInt(Isolate::kProtectorValid)); - set_array_protector(*cell); - - cell = factory->NewPropertyCell(factory->empty_string()); - cell->set_value(the_hole_value()); - set_empty_property_cell(*cell); - - cell = factory->NewPropertyCell(factory->empty_string()); - cell->set_value(Smi::FromInt(Isolate::kProtectorValid)); - set_array_iterator_protector(*cell); - - Handle<Cell> is_concat_spreadable_cell = factory->NewCell( - handle(Smi::FromInt(Isolate::kProtectorValid), isolate())); - set_is_concat_spreadable_protector(*is_concat_spreadable_cell); - - cell = factory->NewPropertyCell(factory->empty_string()); - cell->set_value(Smi::FromInt(Isolate::kProtectorValid)); - set_species_protector(*cell); - - Handle<Cell> string_length_overflow_cell = factory->NewCell( - handle(Smi::FromInt(Isolate::kProtectorValid), isolate())); - set_string_length_protector(*string_length_overflow_cell); - - Handle<Cell> fast_array_iteration_cell = factory->NewCell( - handle(Smi::FromInt(Isolate::kProtectorValid), isolate())); - set_fast_array_iteration_protector(*fast_array_iteration_cell); - - cell = factory->NewPropertyCell(factory->empty_string()); - cell->set_value(Smi::FromInt(Isolate::kProtectorValid)); - set_array_buffer_neutering_protector(*cell); - - set_serialized_templates(empty_fixed_array()); - set_serialized_global_proxy_sizes(empty_fixed_array()); - - set_weak_stack_trace_list(Smi::kZero); - - set_noscript_shared_function_infos(Smi::kZero); - - // Initialize context slot cache. - isolate_->context_slot_cache()->Clear(); - - // Initialize descriptor cache. - isolate_->descriptor_lookup_cache()->Clear(); - - // Initialize compilation cache. - isolate_->compilation_cache()->Clear(); - - // Finish creating JSPromiseCapabilityMap - { - // TODO(caitp): This initialization can be removed once PromiseCapability - // object is no longer used by builtins implemented in javascript. - Handle<Map> map = factory->js_promise_capability_map(); - map->set_inobject_properties_or_constructor_function_index(3); - - Map::EnsureDescriptorSlack(map, 3); - - PropertyAttributes attrs = - static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE); - { // promise - Descriptor d = Descriptor::DataField(factory->promise_string(), - JSPromiseCapability::kPromiseIndex, - attrs, Representation::Tagged()); - map->AppendDescriptor(&d); - } - - { // resolve - Descriptor d = Descriptor::DataField(factory->resolve_string(), - JSPromiseCapability::kResolveIndex, - attrs, Representation::Tagged()); - map->AppendDescriptor(&d); - } - - { // reject - Descriptor d = Descriptor::DataField(factory->reject_string(), - JSPromiseCapability::kRejectIndex, - attrs, Representation::Tagged()); - map->AppendDescriptor(&d); - } - - map->set_is_extensible(false); - set_js_promise_capability_map(*map); - } -} - bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) { switch (root_index) { case kNumberStringCacheRootIndex: @@ -3297,7 +2800,7 @@ AllocationResult Heap::AllocateBytecodeArray(int length, instance->set_parameter_count(parameter_count); instance->set_incoming_new_target_or_generator_register( interpreter::Register::invalid_value()); - instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget()); + instance->set_interrupt_budget(interpreter::Interpreter::kInterruptBudget); instance->set_osr_loop_nesting_level(0); instance->set_bytecode_age(BytecodeArray::kNoAgeBytecodeAge); instance->set_constant_pool(constant_pool); @@ -3617,6 +3120,12 @@ AllocationResult Heap::CopyCode(Code* code) { DCHECK(!memory_allocator()->code_range()->valid() || memory_allocator()->code_range()->contains(code->address()) || obj_size <= code_space()->AreaSize()); + + // Clear the trap handler index since they can't be shared between code. We + // have to do this before calling Relocate becauase relocate would adjust the + // base pointer for the old code. + new_code->set_trap_handler_index(Smi::FromInt(trap_handler::kInvalidIndex)); + new_code->Relocate(new_addr - old_addr); // We have to iterate over the object and process its pointers when black // allocation is on. @@ -3872,7 +3381,7 @@ static inline void WriteTwoByteData(Vector<const char> vector, uint16_t* chars, while (stream_length != 0) { size_t consumed = 0; uint32_t c = unibrow::Utf8::ValueOf(stream, stream_length, &consumed); - DCHECK(c != unibrow::Utf8::kBadChar); + DCHECK_NE(unibrow::Utf8::kBadChar, c); DCHECK(consumed <= stream_length); stream_length -= consumed; stream += consumed; @@ -3887,8 +3396,8 @@ static inline void WriteTwoByteData(Vector<const char> vector, uint16_t* chars, *chars++ = c; } } - DCHECK(stream_length == 0); - DCHECK(len == 0); + DCHECK_EQ(0, stream_length); + DCHECK_EQ(0, len); } @@ -3907,7 +3416,7 @@ static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) { template <bool is_one_byte, typename T> AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field) { - DCHECK(chars >= 0); + DCHECK_LE(0, chars); // Compute map and object size. int size; Map* map; @@ -3962,7 +3471,7 @@ AllocationResult Heap::AllocateRawOneByteString(int length, DCHECK_LE(0, length); DCHECK_GE(String::kMaxLength, length); int size = SeqOneByteString::SizeFor(length); - DCHECK(size <= SeqOneByteString::kMaxSize); + DCHECK_GE(SeqOneByteString::kMaxSize, size); AllocationSpace space = SelectSpace(pretenure); HeapObject* result = nullptr; @@ -3986,7 +3495,7 @@ AllocationResult Heap::AllocateRawTwoByteString(int length, DCHECK_LE(0, length); DCHECK_GE(String::kMaxLength, length); int size = SeqTwoByteString::SizeFor(length); - DCHECK(size <= SeqTwoByteString::kMaxSize); + DCHECK_GE(SeqTwoByteString::kMaxSize, size); AllocationSpace space = SelectSpace(pretenure); HeapObject* result = nullptr; @@ -4237,7 +3746,7 @@ AllocationResult Heap::AllocateRawFixedArray(int length, AllocationResult Heap::AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure, Object* filler) { - DCHECK(length >= 0); + DCHECK_LE(0, length); DCHECK(empty_fixed_array()->IsFixedArray()); if (length == 0) return empty_fixed_array(); @@ -4257,7 +3766,10 @@ AllocationResult Heap::AllocateFixedArrayWithFiller(int length, AllocationResult Heap::AllocatePropertyArray(int length, PretenureFlag pretenure) { - DCHECK(length >= 0); + // Allow length = 0 for the empty_property_array singleton. + DCHECK_LE(0, length); + DCHECK_IMPLIES(length == 0, pretenure == TENURED); + DCHECK(!InNewSpace(undefined_value())); HeapObject* result = nullptr; { @@ -4272,12 +3784,13 @@ AllocationResult Heap::AllocatePropertyArray(int length, return result; } -AllocationResult Heap::AllocateUninitializedFixedArray(int length) { +AllocationResult Heap::AllocateUninitializedFixedArray( + int length, PretenureFlag pretenure) { if (length == 0) return empty_fixed_array(); HeapObject* obj = nullptr; { - AllocationResult allocation = AllocateRawFixedArray(length, NOT_TENURED); + AllocationResult allocation = AllocateRawFixedArray(length, pretenure); if (!allocation.To(&obj)) return allocation; } @@ -4321,7 +3834,7 @@ AllocationResult Heap::AllocateRawFixedDoubleArray(int length, AllocationResult Heap::AllocateRawFeedbackVector(int length, PretenureFlag pretenure) { - DCHECK(length >= 0); + DCHECK_LE(0, length); int size = FeedbackVector::SizeFor(length); AllocationSpace space = SelectSpace(pretenure); @@ -4381,8 +3894,8 @@ AllocationResult Heap::AllocateSymbol() { return result; } - -AllocationResult Heap::AllocateStruct(InstanceType type) { +AllocationResult Heap::AllocateStruct(InstanceType type, + PretenureFlag pretenure) { Map* map; switch (type) { #define MAKE_CASE(NAME, Name, name) \ @@ -4397,7 +3910,8 @@ AllocationResult Heap::AllocateStruct(InstanceType type) { int size = map->instance_size(); Struct* result = nullptr; { - AllocationResult allocation = Allocate(map, OLD_SPACE); + AllocationSpace space = SelectSpace(pretenure); + AllocationResult allocation = Allocate(map, space); if (!allocation.To(&result)) return allocation; } result->InitializeBody(size); @@ -4555,7 +4069,7 @@ void Heap::RegisterDeserializedObjectsForBlackAllocation( // object space for side effects. IncrementalMarking::MarkingState* marking_state = incremental_marking()->marking_state(); - for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) { + for (int i = OLD_SPACE; i < Serializer<>::kNumberOfSpaces; i++) { const Heap::Reservation& res = reservations[i]; for (auto& chunk : res) { Address addr = chunk.start; @@ -4600,8 +4114,10 @@ void Heap::NotifyObjectLayoutChange(HeapObject* object, int size, } } #ifdef VERIFY_HEAP - DCHECK(pending_layout_change_object_ == nullptr); - pending_layout_change_object_ = object; + if (FLAG_verify_heap) { + DCHECK_NULL(pending_layout_change_object_); + pending_layout_change_object_ = object; + } #endif } @@ -4624,6 +4140,8 @@ class SlotCollectingVisitor final : public ObjectVisitor { }; void Heap::VerifyObjectLayoutChange(HeapObject* object, Map* new_map) { + if (!FLAG_verify_heap) return; + // Check that Heap::NotifyObjectLayout was called for object transitions // that are not safe for concurrent marking. // If you see this check triggering for a freshly allocated object, @@ -4686,7 +4204,7 @@ bool Heap::PerformIdleTimeAction(GCIdleTimeAction action, break; } case DO_FULL_GC: { - DCHECK(contexts_disposed_ > 0); + DCHECK_LT(0, contexts_disposed_); HistogramTimerScope scope(isolate_->counters()->gc_context()); TRACE_EVENT0("v8", "V8.GCContext"); CollectAllGarbage(kNoGCFlags, GarbageCollectionReason::kContextDisposal); @@ -5302,8 +4820,8 @@ class FixStaleLeftTrimmedHandlesVisitor : public RootVisitor { private: inline void FixHandle(Object** p) { + if (!(*p)->IsHeapObject()) return; HeapObject* current = reinterpret_cast<HeapObject*>(*p); - if (!current->IsHeapObject()) return; const MapWord map_word = current->map_word(); if (!map_word.IsForwardingAddress() && current->IsFiller()) { #ifdef DEBUG @@ -5649,8 +5167,8 @@ const double Heap::kTargetMutatorUtilization = 0.97; // F = R * (1 - MU) / (R * (1 - MU) - MU) double Heap::HeapGrowingFactor(double gc_speed, double mutator_speed, double max_factor) { - DCHECK(max_factor >= kMinHeapGrowingFactor); - DCHECK(max_factor <= kMaxHeapGrowingFactor); + DCHECK_LE(kMinHeapGrowingFactor, max_factor); + DCHECK_GE(kMaxHeapGrowingFactor, max_factor); if (gc_speed == 0 || mutator_speed == 0) return max_factor; const double speed_ratio = gc_speed / mutator_speed; @@ -5695,8 +5213,8 @@ double Heap::MaxHeapGrowingFactor(size_t max_old_generation_size) { size_t Heap::CalculateOldGenerationAllocationLimit(double factor, size_t old_gen_size) { - CHECK(factor > 1.0); - CHECK(old_gen_size > 0); + CHECK_LT(1.0, factor); + CHECK_LT(0, old_gen_size); uint64_t limit = static_cast<uint64_t>(old_gen_size * factor); limit = Max(limit, static_cast<uint64_t>(old_gen_size) + MinimumAllocationLimitGrowingStep()); @@ -5886,7 +5404,7 @@ bool Heap::SetUp() { } mmap_region_base_ = - reinterpret_cast<uintptr_t>(base::OS::GetRandomMmapAddr()) & + reinterpret_cast<uintptr_t>(v8::internal::GetRandomMmapAddr()) & ~kMmapRegionMask; // Set up memory allocator. @@ -5905,10 +5423,10 @@ bool Heap::SetUp() { mark_compact_collector_->marking_worklist(); concurrent_marking_ = new ConcurrentMarking( this, marking_worklist->shared(), marking_worklist->bailout(), - mark_compact_collector_->weak_objects()); + marking_worklist->on_hold(), mark_compact_collector_->weak_objects()); } else { concurrent_marking_ = - new ConcurrentMarking(this, nullptr, nullptr, nullptr); + new ConcurrentMarking(this, nullptr, nullptr, nullptr, nullptr); } for (int i = 0; i <= LAST_SPACE; i++) { @@ -5919,7 +5437,6 @@ bool Heap::SetUp() { if (!new_space_->SetUp(initial_semispace_size_, max_semi_space_size_)) { return false; } - new_space_top_after_last_gc_ = new_space()->top(); space_[OLD_SPACE] = old_space_ = new OldSpace(this, OLD_SPACE, NOT_EXECUTABLE); @@ -5938,7 +5455,7 @@ bool Heap::SetUp() { if (!lo_space_->SetUp()) return false; // Set up the seed that is used to randomize the string hash function. - DCHECK(hash_seed() == 0); + DCHECK_EQ(Smi::kZero, hash_seed()); if (FLAG_randomize_hashes) InitializeHashSeed(); for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount); @@ -5986,22 +5503,6 @@ void Heap::InitializeHashSeed() { } } -bool Heap::CreateHeapObjects() { - // Create initial maps. - if (!CreateInitialMaps()) return false; - if (!CreateApiObjects()) return false; - - // Create initial objects - CreateInitialObjects(); - CHECK_EQ(0u, gc_count_); - - set_native_contexts_list(undefined_value()); - set_allocation_sites_list(undefined_value()); - - return true; -} - - void Heap::SetStackLimits() { DCHECK(isolate_ != NULL); DCHECK(isolate_ == isolate()); @@ -6034,7 +5535,7 @@ void Heap::NotifyDeserializationComplete() { #ifdef DEBUG // All pages right after bootstrapping must be marked as never-evacuate. for (Page* p : *s) { - CHECK(p->NeverEvacuate()); + DCHECK(p->NeverEvacuate()); } #endif // DEBUG } @@ -6053,7 +5554,8 @@ void Heap::TracePossibleWrapper(JSObject* js_object) { js_object->GetEmbedderField(0) && js_object->GetEmbedderField(0) != undefined_value() && js_object->GetEmbedderField(1) != undefined_value()) { - DCHECK(reinterpret_cast<intptr_t>(js_object->GetEmbedderField(0)) % 2 == 0); + DCHECK_EQ(0, + reinterpret_cast<intptr_t>(js_object->GetEmbedderField(0)) % 2); local_embedder_heap_tracer()->AddWrapperToTrace(std::pair<void*, void*>( reinterpret_cast<void*>(js_object->GetEmbedderField(0)), reinterpret_cast<void*>(js_object->GetEmbedderField(1)))); @@ -6359,7 +5861,7 @@ class CheckHandleCountVisitor : public RootVisitor { public: CheckHandleCountVisitor() : handle_count_(0) {} ~CheckHandleCountVisitor() override { - CHECK(handle_count_ < HandleScope::kCheckHandleThreshold); + CHECK_GT(HandleScope::kCheckHandleThreshold, handle_count_); } void VisitRootPointers(Root root, Object** start, Object** end) override { handle_count_ += end - start; @@ -6532,7 +6034,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter { class MarkingVisitor : public ObjectVisitor, public RootVisitor { public: explicit MarkingVisitor(UnreachableObjectsFilter* filter) - : filter_(filter), marking_stack_(10) {} + : filter_(filter) {} void VisitPointers(HeapObject* host, Object** start, Object** end) override { @@ -6544,8 +6046,9 @@ class UnreachableObjectsFilter : public HeapObjectsFilter { } void TransitiveClosure() { - while (!marking_stack_.is_empty()) { - HeapObject* obj = marking_stack_.RemoveLast(); + while (!marking_stack_.empty()) { + HeapObject* obj = marking_stack_.back(); + marking_stack_.pop_back(); obj->Iterate(this); } } @@ -6556,12 +6059,12 @@ class UnreachableObjectsFilter : public HeapObjectsFilter { if (!(*p)->IsHeapObject()) continue; HeapObject* obj = HeapObject::cast(*p); if (filter_->MarkAsReachable(obj)) { - marking_stack_.Add(obj); + marking_stack_.push_back(obj); } } } UnreachableObjectsFilter* filter_; - List<HeapObject*> marking_stack_; + std::vector<HeapObject*> marking_stack_; }; friend class MarkingVisitor; @@ -6606,7 +6109,7 @@ HeapIterator::~HeapIterator() { // Assert that in filtering mode we have iterated through all // objects. Otherwise, heap will be left in an inconsistent state. if (filtering_ != kNoFiltering) { - DCHECK(object_iterator_ == nullptr); + DCHECK_NULL(object_iterator_); } #endif delete space_iterator_; @@ -6891,5 +6394,15 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { UNREACHABLE(); } +void Heap::CreateObjectStats() { + if (V8_LIKELY(FLAG_gc_stats == 0)) return; + if (!live_object_stats_) { + live_object_stats_ = new ObjectStats(this); + } + if (!dead_object_stats_) { + dead_object_stats_ = new ObjectStats(this); + } +} + } // namespace internal } // namespace v8 |