summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap.cc
diff options
context:
space:
mode:
authorRyan Dahl <ry@tinyclouds.org>2011-06-29 17:26:51 +0200
committerRyan Dahl <ry@tinyclouds.org>2011-06-29 17:26:51 +0200
commit33af2720f26c2b25bc7f75ce7eb454ff99db6d35 (patch)
tree9a38f0c96420edf503eebd6325dd8d2d8249f653 /deps/v8/src/heap.cc
parent6afdca885adeeeed9eef8cbb01c3d97af0bc084d (diff)
downloadnode-33af2720f26c2b25bc7f75ce7eb454ff99db6d35.tar.gz
Upgrade V8 to 3.4.8
Diffstat (limited to 'deps/v8/src/heap.cc')
-rw-r--r--deps/v8/src/heap.cc1661
1 files changed, 1047 insertions, 614 deletions
diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc
index 34ab9aafc..6bb0206cf 100644
--- a/deps/v8/src/heap.cc
+++ b/deps/v8/src/heap.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -30,11 +30,11 @@
#include "accessors.h"
#include "api.h"
#include "bootstrapper.h"
-#include "codegen-inl.h"
+#include "codegen.h"
#include "compilation-cache.h"
#include "debug.h"
-#include "heap-profiler.h"
#include "global-handles.h"
+#include "heap-profiler.h"
#include "liveobjectlist-inl.h"
#include "mark-compact.h"
#include "natives.h"
@@ -49,119 +49,125 @@
#include "regexp-macro-assembler.h"
#include "arm/regexp-macro-assembler-arm.h"
#endif
-
+#if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP
+#include "regexp-macro-assembler.h"
+#include "mips/regexp-macro-assembler-mips.h"
+#endif
namespace v8 {
namespace internal {
-String* Heap::hidden_symbol_;
-Object* Heap::roots_[Heap::kRootListLength];
-Object* Heap::global_contexts_list_;
-
-
-NewSpace Heap::new_space_;
-OldSpace* Heap::old_pointer_space_ = NULL;
-OldSpace* Heap::old_data_space_ = NULL;
-OldSpace* Heap::code_space_ = NULL;
-MapSpace* Heap::map_space_ = NULL;
-CellSpace* Heap::cell_space_ = NULL;
-LargeObjectSpace* Heap::lo_space_ = NULL;
-
static const intptr_t kMinimumPromotionLimit = 2 * MB;
static const intptr_t kMinimumAllocationLimit = 8 * MB;
-intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
-intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
-int Heap::old_gen_exhausted_ = false;
+static Mutex* gc_initializer_mutex = OS::CreateMutex();
-int Heap::amount_of_external_allocated_memory_ = 0;
-int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
+Heap::Heap()
+ : isolate_(NULL),
// semispace_size_ should be a power of 2 and old_generation_size_ should be
// a multiple of Page::kPageSize.
#if defined(ANDROID)
-static const int default_max_semispace_size_ = 2*MB;
-intptr_t Heap::max_old_generation_size_ = 192*MB;
-int Heap::initial_semispace_size_ = 128*KB;
-intptr_t Heap::code_range_size_ = 0;
-intptr_t Heap::max_executable_size_ = max_old_generation_size_;
+ reserved_semispace_size_(2*MB),
+ max_semispace_size_(2*MB),
+ initial_semispace_size_(128*KB),
+ max_old_generation_size_(192*MB),
+ max_executable_size_(max_old_generation_size_),
+ code_range_size_(0),
#elif defined(V8_TARGET_ARCH_X64)
-static const int default_max_semispace_size_ = 16*MB;
-intptr_t Heap::max_old_generation_size_ = 1*GB;
-int Heap::initial_semispace_size_ = 1*MB;
-intptr_t Heap::code_range_size_ = 512*MB;
-intptr_t Heap::max_executable_size_ = 256*MB;
-#else
-static const int default_max_semispace_size_ = 8*MB;
-intptr_t Heap::max_old_generation_size_ = 512*MB;
-int Heap::initial_semispace_size_ = 512*KB;
-intptr_t Heap::code_range_size_ = 0;
-intptr_t Heap::max_executable_size_ = 128*MB;
-#endif
-
-// Allow build-time customization of the max semispace size. Building
-// V8 with snapshots and a non-default max semispace size is much
-// easier if you can define it as part of the build environment.
-#if defined(V8_MAX_SEMISPACE_SIZE)
-int Heap::max_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
+ reserved_semispace_size_(16*MB),
+ max_semispace_size_(16*MB),
+ initial_semispace_size_(1*MB),
+ max_old_generation_size_(1*GB),
+ max_executable_size_(256*MB),
+ code_range_size_(512*MB),
#else
-int Heap::max_semispace_size_ = default_max_semispace_size_;
+ reserved_semispace_size_(8*MB),
+ max_semispace_size_(8*MB),
+ initial_semispace_size_(512*KB),
+ max_old_generation_size_(512*MB),
+ max_executable_size_(128*MB),
+ code_range_size_(0),
#endif
-
-// The snapshot semispace size will be the default semispace size if
-// snapshotting is used and will be the requested semispace size as
-// set up by ConfigureHeap otherwise.
-int Heap::reserved_semispace_size_ = Heap::max_semispace_size_;
-
-List<Heap::GCPrologueCallbackPair> Heap::gc_prologue_callbacks_;
-List<Heap::GCEpilogueCallbackPair> Heap::gc_epilogue_callbacks_;
-
-GCCallback Heap::global_gc_prologue_callback_ = NULL;
-GCCallback Heap::global_gc_epilogue_callback_ = NULL;
-HeapObjectCallback Heap::gc_safe_size_of_old_object_ = NULL;
-
// Variables set based on semispace_size_ and old_generation_size_ in
-// ConfigureHeap.
-
+// ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
// Will be 4 * reserved_semispace_size_ to ensure that young
// generation can be aligned to its size.
-int Heap::survived_since_last_expansion_ = 0;
-intptr_t Heap::external_allocation_limit_ = 0;
-
-Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
-
-int Heap::mc_count_ = 0;
-int Heap::ms_count_ = 0;
-unsigned int Heap::gc_count_ = 0;
-
-GCTracer* Heap::tracer_ = NULL;
-
-int Heap::unflattened_strings_length_ = 0;
-
-int Heap::always_allocate_scope_depth_ = 0;
-int Heap::linear_allocation_scope_depth_ = 0;
-int Heap::contexts_disposed_ = 0;
+ survived_since_last_expansion_(0),
+ always_allocate_scope_depth_(0),
+ linear_allocation_scope_depth_(0),
+ contexts_disposed_(0),
+ new_space_(this),
+ old_pointer_space_(NULL),
+ old_data_space_(NULL),
+ code_space_(NULL),
+ map_space_(NULL),
+ cell_space_(NULL),
+ lo_space_(NULL),
+ gc_state_(NOT_IN_GC),
+ gc_post_processing_depth_(0),
+ mc_count_(0),
+ ms_count_(0),
+ gc_count_(0),
+ unflattened_strings_length_(0),
+#ifdef DEBUG
+ allocation_allowed_(true),
+ allocation_timeout_(0),
+ disallow_allocation_failure_(false),
+ debug_utils_(NULL),
+#endif // DEBUG
+ old_gen_promotion_limit_(kMinimumPromotionLimit),
+ old_gen_allocation_limit_(kMinimumAllocationLimit),
+ external_allocation_limit_(0),
+ amount_of_external_allocated_memory_(0),
+ amount_of_external_allocated_memory_at_last_global_gc_(0),
+ old_gen_exhausted_(false),
+ hidden_symbol_(NULL),
+ global_gc_prologue_callback_(NULL),
+ global_gc_epilogue_callback_(NULL),
+ gc_safe_size_of_old_object_(NULL),
+ total_regexp_code_generated_(0),
+ tracer_(NULL),
+ young_survivors_after_last_gc_(0),
+ high_survival_rate_period_length_(0),
+ survival_rate_(0),
+ previous_survival_rate_trend_(Heap::STABLE),
+ survival_rate_trend_(Heap::STABLE),
+ max_gc_pause_(0),
+ max_alive_after_gc_(0),
+ min_in_mutator_(kMaxInt),
+ alive_after_last_gc_(0),
+ last_gc_end_timestamp_(0.0),
+ page_watermark_invalidated_mark_(1 << Page::WATERMARK_INVALIDATED),
+ number_idle_notifications_(0),
+ last_idle_notification_gc_count_(0),
+ last_idle_notification_gc_count_init_(false),
+ configured_(false),
+ is_safe_to_read_maps_(true) {
+ // Allow build-time customization of the max semispace size. Building
+ // V8 with snapshots and a non-default max semispace size is much
+ // easier if you can define it as part of the build environment.
+#if defined(V8_MAX_SEMISPACE_SIZE)
+ max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
+#endif
-int Heap::young_survivors_after_last_gc_ = 0;
-int Heap::high_survival_rate_period_length_ = 0;
-double Heap::survival_rate_ = 0;
-Heap::SurvivalRateTrend Heap::previous_survival_rate_trend_ = Heap::STABLE;
-Heap::SurvivalRateTrend Heap::survival_rate_trend_ = Heap::STABLE;
+ intptr_t max_virtual = OS::MaxVirtualMemory();
-#ifdef DEBUG
-bool Heap::allocation_allowed_ = true;
+ if (max_virtual > 0) {
+ if (code_range_size_ > 0) {
+ // Reserve no more than 1/8 of the memory for the code range.
+ code_range_size_ = Min(code_range_size_, max_virtual >> 3);
+ }
+ }
-int Heap::allocation_timeout_ = 0;
-bool Heap::disallow_allocation_failure_ = false;
-#endif // DEBUG
+ memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
+ global_contexts_list_ = NULL;
+ mark_compact_collector_.heap_ = this;
+ external_string_table_.heap_ = this;
+}
-intptr_t GCTracer::alive_after_last_gc_ = 0;
-double GCTracer::last_gc_end_timestamp_ = 0.0;
-int GCTracer::max_gc_pause_ = 0;
-intptr_t GCTracer::max_alive_after_gc_ = 0;
-int GCTracer::min_in_mutator_ = kMaxInt;
intptr_t Heap::Capacity() {
if (!HasBeenSetup()) return 0;
@@ -190,7 +196,7 @@ intptr_t Heap::CommittedMemory() {
intptr_t Heap::CommittedMemoryExecutable() {
if (!HasBeenSetup()) return 0;
- return MemoryAllocator::SizeExecutable();
+ return isolate()->memory_allocator()->SizeExecutable();
}
@@ -217,8 +223,8 @@ bool Heap::HasBeenSetup() {
int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
- ASSERT(!Heap::InNewSpace(object)); // Code only works for old objects.
- ASSERT(!MarkCompactCollector::are_map_pointers_encoded());
+ ASSERT(!HEAP->InNewSpace(object)); // Code only works for old objects.
+ ASSERT(!HEAP->mark_compact_collector()->are_map_pointers_encoded());
MapWord map_word = object->map_word();
map_word.ClearMark();
map_word.ClearOverflow();
@@ -227,8 +233,8 @@ int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
- ASSERT(!Heap::InNewSpace(object)); // Code only works for old objects.
- ASSERT(MarkCompactCollector::are_map_pointers_encoded());
+ ASSERT(!HEAP->InNewSpace(object)); // Code only works for old objects.
+ ASSERT(HEAP->mark_compact_collector()->are_map_pointers_encoded());
uint32_t marker = Memory::uint32_at(object->address());
if (marker == MarkCompactCollector::kSingleFreeEncoding) {
return kIntSize;
@@ -236,7 +242,7 @@ int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
return Memory::int_at(object->address() + kIntSize);
} else {
MapWord map_word = object->map_word();
- Address map_address = map_word.DecodeMapAddress(Heap::map_space());
+ Address map_address = map_word.DecodeMapAddress(HEAP->map_space());
Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_address));
return object->SizeFromMap(map);
}
@@ -246,19 +252,20 @@ int Heap::GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object) {
GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
// Is global GC requested?
if (space != NEW_SPACE || FLAG_gc_global) {
- Counters::gc_compactor_caused_by_request.Increment();
+ isolate_->counters()->gc_compactor_caused_by_request()->Increment();
return MARK_COMPACTOR;
}
// Is enough data promoted to justify a global GC?
if (OldGenerationPromotionLimitReached()) {
- Counters::gc_compactor_caused_by_promoted_data.Increment();
+ isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
return MARK_COMPACTOR;
}
// Have allocation in OLD and LO failed?
if (old_gen_exhausted_) {
- Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
+ isolate_->counters()->
+ gc_compactor_caused_by_oldspace_exhaustion()->Increment();
return MARK_COMPACTOR;
}
@@ -271,8 +278,9 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
// and does not count available bytes already in the old space or code
// space. Undercounting is safe---we may get an unrequested full GC when
// a scavenge would have succeeded.
- if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) {
- Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
+ if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) {
+ isolate_->counters()->
+ gc_compactor_caused_by_oldspace_exhaustion()->Increment();
return MARK_COMPACTOR;
}
@@ -317,8 +325,8 @@ void Heap::PrintShortHeapStatistics() {
if (!FLAG_trace_gc_verbose) return;
PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
", available: %8" V8_PTR_PREFIX "d\n",
- MemoryAllocator::Size(),
- MemoryAllocator::Available());
+ isolate_->memory_allocator()->Size(),
+ isolate_->memory_allocator()->Available());
PrintF("New space, used: %8" V8_PTR_PREFIX "d"
", available: %8" V8_PTR_PREFIX "d\n",
Heap::new_space_.Size(),
@@ -383,7 +391,7 @@ void Heap::ReportStatisticsAfterGC() {
void Heap::GarbageCollectionPrologue() {
- TranscendentalCache::Clear();
+ isolate_->transcendental_cache()->Clear();
ClearJSFunctionResultCaches();
gc_count_++;
unflattened_strings_length_ = 0;
@@ -424,21 +432,24 @@ void Heap::GarbageCollectionEpilogue() {
Verify();
}
- if (FLAG_print_global_handles) GlobalHandles::Print();
+ if (FLAG_print_global_handles) isolate_->global_handles()->Print();
if (FLAG_print_handles) PrintHandles();
if (FLAG_gc_verbose) Print();
if (FLAG_code_stats) ReportCodeStatistics("After GC");
#endif
- Counters::alive_after_last_gc.Set(static_cast<int>(SizeOfObjects()));
+ isolate_->counters()->alive_after_last_gc()->Set(
+ static_cast<int>(SizeOfObjects()));
- Counters::symbol_table_capacity.Set(symbol_table()->Capacity());
- Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
+ isolate_->counters()->symbol_table_capacity()->Set(
+ symbol_table()->Capacity());
+ isolate_->counters()->number_of_symbols()->Set(
+ symbol_table()->NumberOfElements());
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
ReportStatisticsAfterGC();
#endif
#ifdef ENABLE_DEBUGGER_SUPPORT
- Debug::AfterGarbageCollection();
+ isolate_->debug()->AfterGarbageCollection();
#endif
}
@@ -447,9 +458,9 @@ void Heap::CollectAllGarbage(bool force_compaction) {
// Since we are ignoring the return value, the exact choice of space does
// not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC.
- MarkCompactCollector::SetForceCompaction(force_compaction);
+ mark_compact_collector_.SetForceCompaction(force_compaction);
CollectGarbage(OLD_POINTER_SPACE);
- MarkCompactCollector::SetForceCompaction(false);
+ mark_compact_collector_.SetForceCompaction(false);
}
@@ -457,7 +468,7 @@ void Heap::CollectAllAvailableGarbage() {
// Since we are ignoring the return value, the exact choice of space does
// not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC.
- MarkCompactCollector::SetForceCompaction(true);
+ mark_compact_collector()->SetForceCompaction(true);
// Major GC would invoke weak handle callbacks on weakly reachable
// handles, but won't collect weakly reachable objects until next
@@ -473,13 +484,13 @@ void Heap::CollectAllAvailableGarbage() {
break;
}
}
- MarkCompactCollector::SetForceCompaction(false);
+ mark_compact_collector()->SetForceCompaction(false);
}
bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
// The VM is in the GC state until exiting this function.
- VMState state(GC);
+ VMState state(isolate_, GC);
#ifdef DEBUG
// Reset the allocation timeout to the GC interval, but make sure to
@@ -492,7 +503,7 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
bool next_gc_likely_to_collect_more = false;
- { GCTracer tracer;
+ { GCTracer tracer(this);
GarbageCollectionPrologue();
// The GC count was incremented in the prologue. Tell the tracer about
// it.
@@ -502,8 +513,8 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
tracer.set_collector(collector);
HistogramTimer* rate = (collector == SCAVENGER)
- ? &Counters::gc_scavenger
- : &Counters::gc_compactor;
+ ? isolate_->counters()->gc_scavenger()
+ : isolate_->counters()->gc_compactor();
rate->Start();
next_gc_likely_to_collect_more =
PerformGarbageCollection(collector, &tracer);
@@ -512,17 +523,12 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
GarbageCollectionEpilogue();
}
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
- if (FLAG_log_gc) HeapProfiler::WriteSample();
-#endif
-
return next_gc_likely_to_collect_more;
}
void Heap::PerformScavenge() {
- GCTracer tracer;
+ GCTracer tracer(this);
PerformGarbageCollection(SCAVENGER, &tracer);
}
@@ -531,7 +537,6 @@ void Heap::PerformScavenge() {
// Helper class for verifying the symbol table.
class SymbolTableVerifier : public ObjectVisitor {
public:
- SymbolTableVerifier() { }
void VisitPointers(Object** start, Object** end) {
// Visit all HeapObject pointers in [start, end).
for (Object** p = start; p < end; p++) {
@@ -548,7 +553,7 @@ class SymbolTableVerifier : public ObjectVisitor {
static void VerifySymbolTable() {
#ifdef DEBUG
SymbolTableVerifier verifier;
- Heap::symbol_table()->IterateElements(&verifier);
+ HEAP->symbol_table()->IterateElements(&verifier);
#endif // DEBUG
}
@@ -633,7 +638,7 @@ void Heap::EnsureFromSpaceIsCommitted() {
void Heap::ClearJSFunctionResultCaches() {
- if (Bootstrapper::IsActive()) return;
+ if (isolate_->bootstrapper()->IsActive()) return;
Object* context = global_contexts_list_;
while (!context->IsUndefined()) {
@@ -651,8 +656,9 @@ void Heap::ClearJSFunctionResultCaches() {
}
+
void Heap::ClearNormalizedMapCaches() {
- if (Bootstrapper::IsActive()) return;
+ if (isolate_->bootstrapper()->IsActive()) return;
Object* context = global_contexts_list_;
while (!context->IsUndefined()) {
@@ -709,7 +715,7 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
bool next_gc_likely_to_collect_more = false;
if (collector != SCAVENGER) {
- PROFILE(CodeMovingGCEvent());
+ PROFILE(isolate_, CodeMovingGCEvent());
}
VerifySymbolTable();
@@ -768,14 +774,15 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
UpdateSurvivalRateTrend(start_new_space_size);
}
- Counters::objs_since_last_young.Set(0);
+ isolate_->counters()->objs_since_last_young()->Set(0);
- if (collector == MARK_COMPACTOR) {
- DisableAssertNoAllocation allow_allocation;
+ gc_post_processing_depth_++;
+ { DisableAssertNoAllocation allow_allocation;
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
next_gc_likely_to_collect_more =
- GlobalHandles::PostGarbageCollectionProcessing();
+ isolate_->global_handles()->PostGarbageCollectionProcessing(collector);
}
+ gc_post_processing_depth_--;
// Update relocatables.
Relocatable::PostGarbageCollectionProcessing();
@@ -808,11 +815,11 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
void Heap::MarkCompact(GCTracer* tracer) {
gc_state_ = MARK_COMPACT;
- LOG(ResourceEvent("markcompact", "begin"));
+ LOG(isolate_, ResourceEvent("markcompact", "begin"));
- MarkCompactCollector::Prepare(tracer);
+ mark_compact_collector_.Prepare(tracer);
- bool is_compacting = MarkCompactCollector::IsCompacting();
+ bool is_compacting = mark_compact_collector_.IsCompacting();
if (is_compacting) {
mc_count_++;
@@ -823,15 +830,17 @@ void Heap::MarkCompact(GCTracer* tracer) {
MarkCompactPrologue(is_compacting);
- MarkCompactCollector::CollectGarbage();
+ is_safe_to_read_maps_ = false;
+ mark_compact_collector_.CollectGarbage();
+ is_safe_to_read_maps_ = true;
- LOG(ResourceEvent("markcompact", "end"));
+ LOG(isolate_, ResourceEvent("markcompact", "end"));
gc_state_ = NOT_IN_GC;
Shrink();
- Counters::objs_since_last_full.Set(0);
+ isolate_->counters()->objs_since_last_full()->Set(0);
contexts_disposed_ = 0;
}
@@ -840,15 +849,18 @@ void Heap::MarkCompact(GCTracer* tracer) {
void Heap::MarkCompactPrologue(bool is_compacting) {
// At any old GC clear the keyed lookup cache to enable collection of unused
// maps.
- KeyedLookupCache::Clear();
- ContextSlotCache::Clear();
- DescriptorLookupCache::Clear();
+ isolate_->keyed_lookup_cache()->Clear();
+ isolate_->context_slot_cache()->Clear();
+ isolate_->descriptor_lookup_cache()->Clear();
- CompilationCache::MarkCompactPrologue();
+ isolate_->compilation_cache()->MarkCompactPrologue();
CompletelyClearInstanceofCache();
if (is_compacting) FlushNumberStringCache();
+ if (FLAG_cleanup_code_caches_at_gc) {
+ polymorphic_code_cache()->set_cache(undefined_value());
+ }
ClearNormalizedMapCaches();
}
@@ -868,6 +880,7 @@ Object* Heap::FindCodeObject(Address a) {
// Helper class for copying HeapObjects
class ScavengeVisitor: public ObjectVisitor {
public:
+ explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
void VisitPointer(Object** p) { ScavengePointer(p); }
@@ -879,48 +892,15 @@ class ScavengeVisitor: public ObjectVisitor {
private:
void ScavengePointer(Object** p) {
Object* object = *p;
- if (!Heap::InNewSpace(object)) return;
+ if (!heap_->InNewSpace(object)) return;
Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object));
}
-};
-
-
-// A queue of objects promoted during scavenge. Each object is accompanied
-// by it's size to avoid dereferencing a map pointer for scanning.
-class PromotionQueue {
- public:
- void Initialize(Address start_address) {
- front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
- }
- bool is_empty() { return front_ <= rear_; }
-
- void insert(HeapObject* target, int size) {
- *(--rear_) = reinterpret_cast<intptr_t>(target);
- *(--rear_) = size;
- // Assert no overflow into live objects.
- ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
- }
-
- void remove(HeapObject** target, int* size) {
- *target = reinterpret_cast<HeapObject*>(*(--front_));
- *size = static_cast<int>(*(--front_));
- // Assert no underflow.
- ASSERT(front_ >= rear_);
- }
-
- private:
- // The front of the queue is higher in memory than the rear.
- intptr_t* front_;
- intptr_t* rear_;
+ Heap* heap_;
};
-// Shared state read by the scavenge collector and set by ScavengeObject.
-static PromotionQueue promotion_queue;
-
-
#ifdef DEBUG
// Visitor class to verify pointers in code or data space do not point into
// new space.
@@ -929,7 +909,7 @@ class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
void VisitPointers(Object** start, Object**end) {
for (Object** current = start; current < end; current++) {
if ((*current)->IsHeapObject()) {
- ASSERT(!Heap::InNewSpace(HeapObject::cast(*current)));
+ ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current)));
}
}
}
@@ -940,12 +920,12 @@ static void VerifyNonPointerSpacePointers() {
// Verify that there are no pointers to new space in spaces where we
// do not expect them.
VerifyNonPointerSpacePointersVisitor v;
- HeapObjectIterator code_it(Heap::code_space());
+ HeapObjectIterator code_it(HEAP->code_space());
for (HeapObject* object = code_it.next();
object != NULL; object = code_it.next())
object->Iterate(&v);
- HeapObjectIterator data_it(Heap::old_data_space());
+ HeapObjectIterator data_it(HEAP->old_data_space());
for (HeapObject* object = data_it.next();
object != NULL; object = data_it.next())
object->Iterate(&v);
@@ -964,6 +944,12 @@ void Heap::CheckNewSpaceExpansionCriteria() {
}
+static bool IsUnscavengedHeapObject(Heap* heap, Object** p) {
+ return heap->InNewSpace(*p) &&
+ !HeapObject::cast(*p)->map_word().IsForwardingAddress();
+}
+
+
void Heap::Scavenge() {
#ifdef DEBUG
if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
@@ -971,7 +957,9 @@ void Heap::Scavenge() {
gc_state_ = SCAVENGE;
- Page::FlipMeaningOfInvalidatedWatermarkFlag();
+ SwitchScavengingVisitorsTableIfProfilingWasEnabled();
+
+ Page::FlipMeaningOfInvalidatedWatermarkFlag(this);
#ifdef DEBUG
VerifyPageWatermarkValidity(old_pointer_space_, ALL_VALID);
VerifyPageWatermarkValidity(map_space_, ALL_VALID);
@@ -986,10 +974,10 @@ void Heap::Scavenge() {
map_space_->FlushTopPageWatermark();
// Implements Cheney's copying algorithm
- LOG(ResourceEvent("scavenge", "begin"));
+ LOG(isolate_, ResourceEvent("scavenge", "begin"));
// Clear descriptor cache.
- DescriptorLookupCache::Clear();
+ isolate_->descriptor_lookup_cache()->Clear();
// Used for updating survived_since_last_expansion_ at function end.
intptr_t survived_watermark = PromotedSpaceSize();
@@ -1019,16 +1007,17 @@ void Heap::Scavenge() {
// frees up its size in bytes from the top of the new space, and
// objects are at least one pointer in size.
Address new_space_front = new_space_.ToSpaceLow();
- promotion_queue.Initialize(new_space_.ToSpaceHigh());
+ promotion_queue_.Initialize(new_space_.ToSpaceHigh());
- ScavengeVisitor scavenge_visitor;
+ is_safe_to_read_maps_ = false;
+ ScavengeVisitor scavenge_visitor(this);
// Copy roots.
IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
// Copy objects reachable from the old generation. By definition,
// there are no intergenerational pointers in code or data spaces.
IterateDirtyRegions(old_pointer_space_,
- &IteratePointersInDirtyRegion,
+ &Heap::IteratePointersInDirtyRegion,
&ScavengePointer,
WATERMARK_CAN_BE_INVALID);
@@ -1055,15 +1044,23 @@ void Heap::Scavenge() {
scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
+ isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles(
+ &IsUnscavengedHeapObject);
+ isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots(
+ &scavenge_visitor);
+ new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
+
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
LiveObjectList::UpdateReferencesForScavengeGC();
- RuntimeProfiler::UpdateSamplesAfterScavenge();
+ isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
ASSERT(new_space_front == new_space_.top());
+ is_safe_to_read_maps_ = true;
+
// Set age mark.
new_space_.set_age_mark(new_space_.top());
@@ -1071,18 +1068,19 @@ void Heap::Scavenge() {
IncrementYoungSurvivorsCounter(static_cast<int>(
(PromotedSpaceSize() - survived_watermark) + new_space_.Size()));
- LOG(ResourceEvent("scavenge", "end"));
+ LOG(isolate_, ResourceEvent("scavenge", "end"));
gc_state_ = NOT_IN_GC;
}
-String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) {
+String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
+ Object** p) {
MapWord first_word = HeapObject::cast(*p)->map_word();
if (!first_word.IsForwardingAddress()) {
// Unreachable external string can be finalized.
- FinalizeExternalString(String::cast(*p));
+ heap->FinalizeExternalString(String::cast(*p));
return NULL;
}
@@ -1093,48 +1091,49 @@ String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) {
void Heap::UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func) {
- ExternalStringTable::Verify();
+ external_string_table_.Verify();
- if (ExternalStringTable::new_space_strings_.is_empty()) return;
+ if (external_string_table_.new_space_strings_.is_empty()) return;
- Object** start = &ExternalStringTable::new_space_strings_[0];
- Object** end = start + ExternalStringTable::new_space_strings_.length();
+ Object** start = &external_string_table_.new_space_strings_[0];
+ Object** end = start + external_string_table_.new_space_strings_.length();
Object** last = start;
for (Object** p = start; p < end; ++p) {
- ASSERT(Heap::InFromSpace(*p));
- String* target = updater_func(p);
+ ASSERT(InFromSpace(*p));
+ String* target = updater_func(this, p);
if (target == NULL) continue;
ASSERT(target->IsExternalString());
- if (Heap::InNewSpace(target)) {
+ if (InNewSpace(target)) {
// String is still in new space. Update the table entry.
*last = target;
++last;
} else {
// String got promoted. Move it to the old string list.
- ExternalStringTable::AddOldString(target);
+ external_string_table_.AddOldString(target);
}
}
ASSERT(last <= end);
- ExternalStringTable::ShrinkNewStrings(static_cast<int>(last - start));
+ external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
}
-static Object* ProcessFunctionWeakReferences(Object* function,
+static Object* ProcessFunctionWeakReferences(Heap* heap,
+ Object* function,
WeakObjectRetainer* retainer) {
- Object* head = Heap::undefined_value();
+ Object* head = heap->undefined_value();
JSFunction* tail = NULL;
Object* candidate = function;
- while (!candidate->IsUndefined()) {
+ while (candidate != heap->undefined_value()) {
// Check whether to keep the candidate in the list.
JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
Object* retain = retainer->RetainAs(candidate);
if (retain != NULL) {
- if (head->IsUndefined()) {
+ if (head == heap->undefined_value()) {
// First element in the list.
head = candidate_function;
} else {
@@ -1151,7 +1150,7 @@ static Object* ProcessFunctionWeakReferences(Object* function,
// Terminate the list if there is one or more elements.
if (tail != NULL) {
- tail->set_next_function_link(Heap::undefined_value());
+ tail->set_next_function_link(heap->undefined_value());
}
return head;
@@ -1162,18 +1161,19 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
Object* head = undefined_value();
Context* tail = NULL;
Object* candidate = global_contexts_list_;
- while (!candidate->IsUndefined()) {
+ while (candidate != undefined_value()) {
// Check whether to keep the candidate in the list.
Context* candidate_context = reinterpret_cast<Context*>(candidate);
Object* retain = retainer->RetainAs(candidate);
if (retain != NULL) {
- if (head->IsUndefined()) {
+ if (head == undefined_value()) {
// First element in the list.
head = candidate_context;
} else {
// Subsequent elements in the list.
ASSERT(tail != NULL);
- tail->set_unchecked(Context::NEXT_CONTEXT_LINK,
+ tail->set_unchecked(this,
+ Context::NEXT_CONTEXT_LINK,
candidate_context,
UPDATE_WRITE_BARRIER);
}
@@ -1183,9 +1183,11 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
// Process the weak list of optimized functions for the context.
Object* function_list_head =
ProcessFunctionWeakReferences(
+ this,
candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
retainer);
- candidate_context->set_unchecked(Context::OPTIMIZED_FUNCTIONS_LIST,
+ candidate_context->set_unchecked(this,
+ Context::OPTIMIZED_FUNCTIONS_LIST,
function_list_head,
UPDATE_WRITE_BARRIER);
}
@@ -1195,21 +1197,22 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
// Terminate the list if there is one or more elements.
if (tail != NULL) {
- tail->set_unchecked(Context::NEXT_CONTEXT_LINK,
+ tail->set_unchecked(this,
+ Context::NEXT_CONTEXT_LINK,
Heap::undefined_value(),
UPDATE_WRITE_BARRIER);
}
// Update the head of the list of contexts.
- Heap::global_contexts_list_ = head;
+ global_contexts_list_ = head;
}
class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
public:
- static inline void VisitPointer(Object** p) {
+ static inline void VisitPointer(Heap* heap, Object** p) {
Object* object = *p;
- if (!Heap::InNewSpace(object)) return;
+ if (!heap->InNewSpace(object)) return;
Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object));
}
@@ -1230,10 +1233,10 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
}
// Promote and process all the to-be-promoted objects.
- while (!promotion_queue.is_empty()) {
+ while (!promotion_queue_.is_empty()) {
HeapObject* target;
int size;
- promotion_queue.remove(&target, &size);
+ promotion_queue_.remove(&target, &size);
// Promoted object might be already partially visited
// during dirty regions iteration. Thus we search specificly
@@ -1253,6 +1256,32 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
}
+enum LoggingAndProfiling {
+ LOGGING_AND_PROFILING_ENABLED,
+ LOGGING_AND_PROFILING_DISABLED
+};
+
+
+typedef void (*ScavengingCallback)(Map* map,
+ HeapObject** slot,
+ HeapObject* object);
+
+
+static Atomic32 scavenging_visitors_table_mode_;
+static VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
+
+
+INLINE(static void DoScavengeObject(Map* map,
+ HeapObject** slot,
+ HeapObject* obj));
+
+
+void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
+ scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
+}
+
+
+template<LoggingAndProfiling logging_and_profiling_mode>
class ScavengingVisitor : public StaticVisitorBase {
public:
static void Initialize() {
@@ -1261,23 +1290,23 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
table_.Register(kVisitByteArray, &EvacuateByteArray);
table_.Register(kVisitFixedArray, &EvacuateFixedArray);
+ table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
+
table_.Register(kVisitGlobalContext,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
- VisitSpecialized<Context::kSize>);
-
- typedef ObjectEvacuationStrategy<POINTER_OBJECT> PointerObject;
+ template VisitSpecialized<Context::kSize>);
table_.Register(kVisitConsString,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
- VisitSpecialized<ConsString::kSize>);
+ template VisitSpecialized<ConsString::kSize>);
table_.Register(kVisitSharedFunctionInfo,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
- VisitSpecialized<SharedFunctionInfo::kSize>);
+ template VisitSpecialized<SharedFunctionInfo::kSize>);
table_.Register(kVisitJSFunction,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
- VisitSpecialized<JSFunction::kSize>);
+ template VisitSpecialized<JSFunction::kSize>);
table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
kVisitDataObject,
@@ -1292,18 +1321,16 @@ class ScavengingVisitor : public StaticVisitorBase {
kVisitStructGeneric>();
}
-
- static inline void Scavenge(Map* map, HeapObject** slot, HeapObject* obj) {
- table_.GetVisitor(map)(map, slot, obj);
+ static VisitorDispatchTable<ScavengingCallback>* GetTable() {
+ return &table_;
}
-
private:
enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
enum SizeRestriction { SMALL, UNKNOWN_SIZE };
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
- static void RecordCopiedObject(HeapObject* obj) {
+ static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
bool should_record = false;
#ifdef DEBUG
should_record = FLAG_heap_stats;
@@ -1312,10 +1339,10 @@ class ScavengingVisitor : public StaticVisitorBase {
should_record = should_record || FLAG_log_gc;
#endif
if (should_record) {
- if (Heap::new_space()->Contains(obj)) {
- Heap::new_space()->RecordAllocation(obj);
+ if (heap->new_space()->Contains(obj)) {
+ heap->new_space()->RecordAllocation(obj);
} else {
- Heap::new_space()->RecordPromotion(obj);
+ heap->new_space()->RecordPromotion(obj);
}
}
}
@@ -1324,27 +1351,34 @@ class ScavengingVisitor : public StaticVisitorBase {
// Helper function used by CopyObject to copy a source object to an
// allocated target object and update the forwarding pointer in the source
// object. Returns the target object.
- INLINE(static HeapObject* MigrateObject(HeapObject* source,
+ INLINE(static HeapObject* MigrateObject(Heap* heap,
+ HeapObject* source,
HeapObject* target,
int size)) {
// Copy the content of source to target.
- Heap::CopyBlock(target->address(), source->address(), size);
+ heap->CopyBlock(target->address(), source->address(), size);
// Set the forwarding address.
source->set_map_word(MapWord::FromForwardingAddress(target));
+ if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
- // Update NewSpace stats if necessary.
- RecordCopiedObject(target);
+ // Update NewSpace stats if necessary.
+ RecordCopiedObject(heap, target);
#endif
- HEAP_PROFILE(ObjectMoveEvent(source->address(), target->address()));
+ HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
#if defined(ENABLE_LOGGING_AND_PROFILING)
- if (Logger::is_logging() || CpuProfiler::is_profiling()) {
- if (target->IsSharedFunctionInfo()) {
- PROFILE(SFIMoveEvent(source->address(), target->address()));
+ Isolate* isolate = heap->isolate();
+ if (isolate->logger()->is_logging() ||
+ CpuProfiler::is_profiling(isolate)) {
+ if (target->IsSharedFunctionInfo()) {
+ PROFILE(isolate, SharedFunctionInfoMoveEvent(
+ source->address(), target->address()));
+ }
}
- }
#endif
+ }
+
return target;
}
@@ -1358,36 +1392,37 @@ class ScavengingVisitor : public StaticVisitorBase {
(object_size <= Page::kMaxHeapObjectSize));
ASSERT(object->Size() == object_size);
- if (Heap::ShouldBePromoted(object->address(), object_size)) {
+ Heap* heap = map->heap();
+ if (heap->ShouldBePromoted(object->address(), object_size)) {
MaybeObject* maybe_result;
if ((size_restriction != SMALL) &&
(object_size > Page::kMaxHeapObjectSize)) {
- maybe_result = Heap::lo_space()->AllocateRawFixedArray(object_size);
+ maybe_result = heap->lo_space()->AllocateRawFixedArray(object_size);
} else {
if (object_contents == DATA_OBJECT) {
- maybe_result = Heap::old_data_space()->AllocateRaw(object_size);
+ maybe_result = heap->old_data_space()->AllocateRaw(object_size);
} else {
- maybe_result = Heap::old_pointer_space()->AllocateRaw(object_size);
+ maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
}
}
Object* result = NULL; // Initialization to please compiler.
if (maybe_result->ToObject(&result)) {
HeapObject* target = HeapObject::cast(result);
- *slot = MigrateObject(object, target, object_size);
+ *slot = MigrateObject(heap, object , target, object_size);
if (object_contents == POINTER_OBJECT) {
- promotion_queue.insert(target, object_size);
+ heap->promotion_queue()->insert(target, object_size);
}
- Heap::tracer()->increment_promoted_objects_size(object_size);
+ heap->tracer()->increment_promoted_objects_size(object_size);
return;
}
}
Object* result =
- Heap::new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
- *slot = MigrateObject(object, HeapObject::cast(result), object_size);
+ heap->new_space()->AllocateRaw(object_size)->ToObjectUnchecked();
+ *slot = MigrateObject(heap, object, HeapObject::cast(result), object_size);
return;
}
@@ -1403,6 +1438,18 @@ class ScavengingVisitor : public StaticVisitorBase {
}
+ static inline void EvacuateFixedDoubleArray(Map* map,
+ HeapObject** slot,
+ HeapObject* object) {
+ int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
+ int object_size = FixedDoubleArray::SizeFor(length);
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map,
+ slot,
+ object,
+ object_size);
+ }
+
+
static inline void EvacuateByteArray(Map* map,
HeapObject** slot,
HeapObject* object) {
@@ -1438,13 +1485,14 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object) {
ASSERT(IsShortcutCandidate(map->instance_type()));
- if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
+ if (ConsString::cast(object)->unchecked_second() ==
+ map->heap()->empty_string()) {
HeapObject* first =
HeapObject::cast(ConsString::cast(object)->unchecked_first());
*slot = first;
- if (!Heap::InNewSpace(first)) {
+ if (!map->heap()->InNewSpace(first)) {
object->set_map_word(MapWord::FromForwardingAddress(first));
return;
}
@@ -1458,7 +1506,7 @@ class ScavengingVisitor : public StaticVisitorBase {
return;
}
- Scavenge(first->map(), slot, first);
+ DoScavengeObject(first->map(), slot, first);
object->set_map_word(MapWord::FromForwardingAddress(*slot));
return;
}
@@ -1485,26 +1533,61 @@ class ScavengingVisitor : public StaticVisitorBase {
}
};
- typedef void (*Callback)(Map* map, HeapObject** slot, HeapObject* object);
-
- static VisitorDispatchTable<Callback> table_;
+ static VisitorDispatchTable<ScavengingCallback> table_;
};
-VisitorDispatchTable<ScavengingVisitor::Callback> ScavengingVisitor::table_;
+template<LoggingAndProfiling logging_and_profiling_mode>
+VisitorDispatchTable<ScavengingCallback>
+ ScavengingVisitor<logging_and_profiling_mode>::table_;
+
+
+static void InitializeScavengingVisitorsTables() {
+ ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::Initialize();
+ ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::Initialize();
+ scavenging_visitors_table_.CopyFrom(
+ ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::GetTable());
+ scavenging_visitors_table_mode_ = LOGGING_AND_PROFILING_DISABLED;
+}
+
+
+void Heap::SwitchScavengingVisitorsTableIfProfilingWasEnabled() {
+#ifdef ENABLE_LOGGING_AND_PROFILING
+ if (scavenging_visitors_table_mode_ == LOGGING_AND_PROFILING_ENABLED) {
+ // Table was already updated by some isolate.
+ return;
+ }
+
+ if (isolate()->logger()->is_logging() |
+ CpuProfiler::is_profiling(isolate()) ||
+ (isolate()->heap_profiler() != NULL &&
+ isolate()->heap_profiler()->is_profiling())) {
+ // If one of the isolates is doing scavenge at this moment of time
+ // it might see this table in an inconsitent state when
+ // some of the callbacks point to
+ // ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED> and others
+ // to ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>.
+ // However this does not lead to any bugs as such isolate does not have
+ // profiling enabled and any isolate with enabled profiling is guaranteed
+ // to see the table in the consistent state.
+ scavenging_visitors_table_.CopyFrom(
+ ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::GetTable());
+
+ // We use Release_Store to prevent reordering of this write before writes
+ // to the table.
+ Release_Store(&scavenging_visitors_table_mode_,
+ LOGGING_AND_PROFILING_ENABLED);
+ }
+#endif
+}
void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
- ASSERT(InFromSpace(object));
+ ASSERT(HEAP->InFromSpace(object));
MapWord first_word = object->map_word();
ASSERT(!first_word.IsForwardingAddress());
Map* map = first_word.ToMap();
- ScavengingVisitor::Scavenge(map, p, object);
-}
-
-
-void Heap::ScavengePointer(HeapObject** p) {
- ScavengeObject(p, *p);
+ DoScavengeObject(map, p, object);
}
@@ -1519,9 +1602,8 @@ MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
- reinterpret_cast<Map*>(result)->
- set_visitor_id(
- StaticVisitorBase::GetVisitorId(instance_type, instance_size));
+ reinterpret_cast<Map*>(result)->set_visitor_id(
+ StaticVisitorBase::GetVisitorId(instance_type, instance_size));
reinterpret_cast<Map*>(result)->set_inobject_properties(0);
reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
@@ -1547,11 +1629,13 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
map->set_instance_size(instance_size);
map->set_inobject_properties(0);
map->set_pre_allocated_property_fields(0);
- map->set_instance_descriptors(empty_descriptor_array());
+ map->init_instance_descriptors();
map->set_code_cache(empty_fixed_array());
+ map->set_prototype_transitions(empty_fixed_array());
map->set_unused_property_fields(0);
map->set_bit_field(0);
- map->set_bit_field2((1 << Map::kIsExtensible) | (1 << Map::kHasFastElements));
+ map->set_bit_field2(1 << Map::kIsExtensible);
+ map->set_elements_kind(JSObject::FAST_ELEMENTS);
// If the map object is aligned fill the padding area with Smi 0 objects.
if (Map::kPadStart < Map::kSize) {
@@ -1575,6 +1659,11 @@ MaybeObject* Heap::AllocateCodeCache() {
}
+MaybeObject* Heap::AllocatePolymorphicCodeCache() {
+ return AllocateStruct(POLYMORPHIC_CODE_CACHE_TYPE);
+}
+
+
const Heap::StringTypeTable Heap::string_type_table[] = {
#define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
{type, size, k##camel_name##MapRootIndex},
@@ -1630,6 +1719,7 @@ bool Heap::CreateInitialMaps() {
if (!maybe_obj->ToObject(&obj)) return false;
}
set_null_value(obj);
+ Oddball::cast(obj)->set_kind(Oddball::kNull);
// Allocate the empty descriptor array.
{ MaybeObject* maybe_obj = AllocateEmptyFixedArray();
@@ -1638,14 +1728,17 @@ bool Heap::CreateInitialMaps() {
set_empty_descriptor_array(DescriptorArray::cast(obj));
// Fix the instance_descriptors for the existing maps.
- meta_map()->set_instance_descriptors(empty_descriptor_array());
+ meta_map()->init_instance_descriptors();
meta_map()->set_code_cache(empty_fixed_array());
+ meta_map()->set_prototype_transitions(empty_fixed_array());
- fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
+ fixed_array_map()->init_instance_descriptors();
fixed_array_map()->set_code_cache(empty_fixed_array());
+ fixed_array_map()->set_prototype_transitions(empty_fixed_array());
- oddball_map()->set_instance_descriptors(empty_descriptor_array());
+ oddball_map()->init_instance_descriptors();
oddball_map()->set_code_cache(empty_fixed_array());
+ oddball_map()->set_prototype_transitions(empty_fixed_array());
// Fix prototype object for existing maps.
meta_map()->set_prototype(null_value());
@@ -1669,10 +1762,10 @@ bool Heap::CreateInitialMaps() {
}
set_heap_number_map(Map::cast(obj));
- { MaybeObject* maybe_obj = AllocateMap(PROXY_TYPE, Proxy::kSize);
+ { MaybeObject* maybe_obj = AllocateMap(FOREIGN_TYPE, Foreign::kSize);
if (!maybe_obj->ToObject(&obj)) return false;
}
- set_proxy_map(Map::cast(obj));
+ set_foreign_map(Map::cast(obj));
for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) {
const StringTypeTable& entry = string_type_table[i];
@@ -1696,6 +1789,12 @@ bool Heap::CreateInitialMaps() {
Map::cast(obj)->set_is_undetectable();
{ MaybeObject* maybe_obj =
+ AllocateMap(FIXED_DOUBLE_ARRAY_TYPE, kVariableSizeSentinel);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_fixed_double_array_map(Map::cast(obj));
+
+ { MaybeObject* maybe_obj =
AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
@@ -1707,10 +1806,10 @@ bool Heap::CreateInitialMaps() {
set_empty_byte_array(ByteArray::cast(obj));
{ MaybeObject* maybe_obj =
- AllocateMap(PIXEL_ARRAY_TYPE, PixelArray::kAlignedSize);
+ AllocateMap(EXTERNAL_PIXEL_ARRAY_TYPE, ExternalArray::kAlignedSize);
if (!maybe_obj->ToObject(&obj)) return false;
}
- set_pixel_array_map(Map::cast(obj));
+ set_external_pixel_array_map(Map::cast(obj));
{ MaybeObject* maybe_obj = AllocateMap(EXTERNAL_BYTE_ARRAY_TYPE,
ExternalArray::kAlignedSize);
@@ -1754,6 +1853,18 @@ bool Heap::CreateInitialMaps() {
}
set_external_float_array_map(Map::cast(obj));
+ { MaybeObject* maybe_obj =
+ AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_non_strict_arguments_elements_map(Map::cast(obj));
+
+ { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_DOUBLE_ARRAY_TYPE,
+ ExternalArray::kAlignedSize);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_external_double_array_map(Map::cast(obj));
+
{ MaybeObject* maybe_obj = AllocateMap(CODE_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
@@ -1793,7 +1904,7 @@ bool Heap::CreateInitialMaps() {
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
- set_context_map(Map::cast(obj));
+ set_function_context_map(Map::cast(obj));
{ MaybeObject* maybe_obj =
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
@@ -1805,6 +1916,12 @@ bool Heap::CreateInitialMaps() {
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
+ set_with_context_map(Map::cast(obj));
+
+ { MaybeObject* maybe_obj =
+ AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
Map* global_context_map = Map::cast(obj);
global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext);
set_global_context_map(global_context_map);
@@ -1821,7 +1938,7 @@ bool Heap::CreateInitialMaps() {
}
set_message_object_map(Map::cast(obj));
- ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
+ ASSERT(!InNewSpace(empty_fixed_array()));
return true;
}
@@ -1874,12 +1991,13 @@ MaybeObject* Heap::AllocateJSGlobalPropertyCell(Object* value) {
MaybeObject* Heap::CreateOddball(const char* to_string,
- Object* to_number) {
+ Object* to_number,
+ byte kind) {
Object* result;
{ MaybeObject* maybe_result = Allocate(oddball_map(), OLD_DATA_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- return Oddball::cast(result)->Initialize(to_string, to_number);
+ return Oddball::cast(result)->Initialize(to_string, to_number, kind);
}
@@ -1891,7 +2009,7 @@ bool Heap::CreateApiObjects() {
}
set_neander_map(Map::cast(obj));
- { MaybeObject* maybe_obj = Heap::AllocateJSObjectFromMap(neander_map());
+ { MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map());
if (!maybe_obj->ToObject(&obj)) return false;
}
Object* elements;
@@ -1906,20 +2024,6 @@ bool Heap::CreateApiObjects() {
}
-void Heap::CreateCEntryStub() {
- CEntryStub stub(1);
- set_c_entry_code(*stub.GetCode());
-}
-
-
-#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
-void Heap::CreateRegExpCEntryStub() {
- RegExpCEntryStub stub;
- set_re_c_entry_code(*stub.GetCode());
-}
-#endif
-
-
void Heap::CreateJSEntryStub() {
JSEntryStub stub;
set_js_entry_code(*stub.GetCode());
@@ -1932,14 +2036,6 @@ void Heap::CreateJSConstructEntryStub() {
}
-#if V8_TARGET_ARCH_ARM
-void Heap::CreateDirectCEntryStub() {
- DirectCEntryStub stub;
- set_direct_c_entry_code(*stub.GetCode());
-}
-#endif
-
-
void Heap::CreateFixedStubs() {
// Here we create roots for fixed stubs. They are needed at GC
// for cooking and uncooking (check out frames.cc).
@@ -1947,22 +2043,15 @@ void Heap::CreateFixedStubs() {
// stub cache for these stubs.
HandleScope scope;
// gcc-4.4 has problem generating correct code of following snippet:
- // { CEntryStub stub;
- // c_entry_code_ = *stub.GetCode();
+ // { JSEntryStub stub;
+ // js_entry_code_ = *stub.GetCode();
// }
- // { DebuggerStatementStub stub;
- // debugger_statement_code_ = *stub.GetCode();
+ // { JSConstructEntryStub stub;
+ // js_construct_entry_code_ = *stub.GetCode();
// }
// To workaround the problem, make separate functions without inlining.
- Heap::CreateCEntryStub();
Heap::CreateJSEntryStub();
Heap::CreateJSConstructEntryStub();
-#if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
- Heap::CreateRegExpCEntryStub();
-#endif
-#if V8_TARGET_ARCH_ARM
- Heap::CreateDirectCEntryStub();
-#endif
}
@@ -1985,6 +2074,7 @@ bool Heap::CreateInitialObjects() {
if (!maybe_obj->ToObject(&obj)) return false;
}
set_undefined_value(obj);
+ Oddball::cast(obj)->set_kind(Oddball::kUndefined);
ASSERT(!InNewSpace(undefined_value()));
// Allocate initial symbol table.
@@ -2004,39 +2094,50 @@ bool Heap::CreateInitialObjects() {
// Allocate the null_value
{ MaybeObject* maybe_obj =
- Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0));
+ Oddball::cast(null_value())->Initialize("null",
+ Smi::FromInt(0),
+ Oddball::kNull);
if (!maybe_obj->ToObject(&obj)) return false;
}
- { MaybeObject* maybe_obj = CreateOddball("true", Smi::FromInt(1));
+ { MaybeObject* maybe_obj = CreateOddball("true",
+ Smi::FromInt(1),
+ Oddball::kTrue);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_true_value(obj);
- { MaybeObject* maybe_obj = CreateOddball("false", Smi::FromInt(0));
+ { MaybeObject* maybe_obj = CreateOddball("false",
+ Smi::FromInt(0),
+ Oddball::kFalse);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_false_value(obj);
- { MaybeObject* maybe_obj = CreateOddball("hole", Smi::FromInt(-1));
+ { MaybeObject* maybe_obj = CreateOddball("hole",
+ Smi::FromInt(-1),
+ Oddball::kTheHole);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_the_hole_value(obj);
{ MaybeObject* maybe_obj = CreateOddball("arguments_marker",
- Smi::FromInt(-4));
+ Smi::FromInt(-4),
+ Oddball::kArgumentMarker);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_arguments_marker(obj);
- { MaybeObject* maybe_obj =
- CreateOddball("no_interceptor_result_sentinel", Smi::FromInt(-2));
+ { MaybeObject* maybe_obj = CreateOddball("no_interceptor_result_sentinel",
+ Smi::FromInt(-2),
+ Oddball::kOther);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_no_interceptor_result_sentinel(obj);
- { MaybeObject* maybe_obj =
- CreateOddball("termination_exception", Smi::FromInt(-3));
+ { MaybeObject* maybe_obj = CreateOddball("termination_exception",
+ Smi::FromInt(-3),
+ Oddball::kOther);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_termination_exception(obj);
@@ -2067,12 +2168,12 @@ bool Heap::CreateInitialObjects() {
}
hidden_symbol_ = String::cast(obj);
- // Allocate the proxy for __proto__.
+ // Allocate the foreign for __proto__.
{ MaybeObject* maybe_obj =
- AllocateProxy((Address) &Accessors::ObjectPrototype);
+ AllocateForeign((Address) &Accessors::ObjectPrototype);
if (!maybe_obj->ToObject(&obj)) return false;
}
- set_prototype_accessors(Proxy::cast(obj));
+ set_prototype_accessors(Foreign::cast(obj));
// Allocate the code_stubs dictionary. The initial size is set to avoid
// expanding the dictionary during bootstrapping.
@@ -2088,6 +2189,11 @@ bool Heap::CreateInitialObjects() {
}
set_non_monomorphic_cache(NumberDictionary::cast(obj));
+ { MaybeObject* maybe_obj = AllocatePolymorphicCodeCache();
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_polymorphic_code_cache(PolymorphicCodeCache::cast(obj));
+
set_instanceof_cache_function(Smi::FromInt(0));
set_instanceof_cache_map(Smi::FromInt(0));
set_instanceof_cache_answer(Smi::FromInt(0));
@@ -2098,7 +2204,8 @@ bool Heap::CreateInitialObjects() {
{ MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions);
if (!maybe_obj->ToObject(&obj)) return false;
}
- { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(obj);
+ { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(this,
+ obj);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_intrinsic_function_names(StringDictionary::cast(obj));
@@ -2118,20 +2225,20 @@ bool Heap::CreateInitialObjects() {
}
set_natives_source_cache(FixedArray::cast(obj));
- // Handling of script id generation is in Factory::NewScript.
+ // Handling of script id generation is in FACTORY->NewScript.
set_last_script_id(undefined_value());
// Initialize keyed lookup cache.
- KeyedLookupCache::Clear();
+ isolate_->keyed_lookup_cache()->Clear();
// Initialize context slot cache.
- ContextSlotCache::Clear();
+ isolate_->context_slot_cache()->Clear();
// Initialize descriptor cache.
- DescriptorLookupCache::Clear();
+ isolate_->descriptor_lookup_cache()->Clear();
// Initialize compilation cache.
- CompilationCache::Clear();
+ isolate_->compilation_cache()->Clear();
return true;
}
@@ -2155,7 +2262,7 @@ void Heap::FlushNumberStringCache() {
// Flush the number to string cache.
int len = number_string_cache()->length();
for (int i = 0; i < len; i++) {
- number_string_cache()->set_undefined(i);
+ number_string_cache()->set_undefined(this, i);
}
}
@@ -2207,7 +2314,7 @@ void Heap::SetNumberStringCache(Object* number, String* string) {
MaybeObject* Heap::NumberToString(Object* number,
bool check_number_string_cache) {
- Counters::number_to_string_runtime.Increment();
+ isolate_->counters()->number_to_string_runtime()->Increment();
if (check_number_string_cache) {
Object* cached = GetNumberStringCache(number);
if (cached != undefined_value()) {
@@ -2257,6 +2364,10 @@ Heap::RootListIndex Heap::RootIndexForExternalArrayType(
return kExternalUnsignedIntArrayMapRootIndex;
case kExternalFloatArray:
return kExternalFloatArrayMapRootIndex;
+ case kExternalDoubleArray:
+ return kExternalDoubleArrayMapRootIndex;
+ case kExternalPixelArray:
+ return kExternalPixelArrayMapRootIndex;
default:
UNREACHABLE();
return kUndefinedValueRootIndex;
@@ -2285,16 +2396,16 @@ MaybeObject* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
}
-MaybeObject* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) {
- // Statically ensure that it is safe to allocate proxies in paged spaces.
- STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize);
+MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) {
+ // Statically ensure that it is safe to allocate foreigns in paged spaces.
+ STATIC_ASSERT(Foreign::kSize <= Page::kMaxHeapObjectSize);
AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
Object* result;
- { MaybeObject* maybe_result = Allocate(proxy_map(), space);
+ { MaybeObject* maybe_result = Allocate(foreign_map(), space);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- Proxy::cast(result)->set_proxy(proxy);
+ Foreign::cast(result)->set_address(address);
return result;
}
@@ -2308,10 +2419,11 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
share->set_name(name);
- Code* illegal = Builtins::builtin(Builtins::Illegal);
+ Code* illegal = isolate_->builtins()->builtin(Builtins::kIllegal);
share->set_code(illegal);
share->set_scope_info(SerializedScopeInfo::Empty());
- Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
+ Code* construct_stub = isolate_->builtins()->builtin(
+ Builtins::kJSConstructStubGeneric);
share->set_construct_stub(construct_stub);
share->set_expected_nof_properties(0);
share->set_length(0);
@@ -2331,6 +2443,7 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
share->set_num_literals(0);
share->set_end_position(0);
share->set_function_token_position(0);
+ share->set_native(false);
return result;
}
@@ -2369,20 +2482,21 @@ static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
+ Heap* heap,
uint32_t c1,
uint32_t c2) {
String* symbol;
// Numeric strings have a different hash algorithm not known by
// LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) &&
- Heap::symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
+ heap->symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
return symbol;
// Now we know the length is 2, we might as well make use of that fact
// when building the new string.
} else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) { // We can do this
ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1)); // because of this.
Object* result;
- { MaybeObject* maybe_result = Heap::AllocateRawAsciiString(2);
+ { MaybeObject* maybe_result = heap->AllocateRawAsciiString(2);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
char* dest = SeqAsciiString::cast(result)->GetChars();
@@ -2391,7 +2505,7 @@ MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
return result;
} else {
Object* result;
- { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(2);
+ { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
uc16* dest = SeqTwoByteString::cast(result)->GetChars();
@@ -2421,7 +2535,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
if (length == 2) {
unsigned c1 = first->Get(0);
unsigned c2 = second->Get(0);
- return MakeOrFindTwoCharacterString(c1, c2);
+ return MakeOrFindTwoCharacterString(this, c1, c2);
}
bool first_is_ascii = first->IsAsciiRepresentation();
@@ -2431,7 +2545,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
// Make sure that an out of memory exception is thrown if the length
// of the new cons string is too large.
if (length > String::kMaxLength || length < 0) {
- Top::context()->mark_out_of_memory();
+ isolate()->context()->mark_out_of_memory();
return Failure::OutOfMemoryException();
}
@@ -2443,7 +2557,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
is_ascii_data_in_two_byte_string =
first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
if (is_ascii_data_in_two_byte_string) {
- Counters::string_add_runtime_ext_to_ascii.Increment();
+ isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
}
}
@@ -2484,6 +2598,7 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
char* dest = SeqAsciiString::cast(result)->GetChars();
String::WriteToFlat(first, dest, 0, first_length);
String::WriteToFlat(second, dest + first_length, 0, second_length);
+ isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
return result;
}
@@ -2519,21 +2634,21 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
MaybeObject* Heap::AllocateSubString(String* buffer,
- int start,
- int end,
- PretenureFlag pretenure) {
+ int start,
+ int end,
+ PretenureFlag pretenure) {
int length = end - start;
-
- if (length == 1) {
- return Heap::LookupSingleCharacterStringFromCode(
- buffer->Get(start));
+ if (length == 0) {
+ return empty_string();
+ } else if (length == 1) {
+ return LookupSingleCharacterStringFromCode(buffer->Get(start));
} else if (length == 2) {
// Optimization for 2-byte strings often used as keys in a decompression
// dictionary. Check whether we already have the string in the symbol
// table to prevent creation of many unneccesary strings.
unsigned c1 = buffer->Get(start);
unsigned c2 = buffer->Get(start + 1);
- return MakeOrFindTwoCharacterString(c1, c2);
+ return MakeOrFindTwoCharacterString(this, c1, c2);
}
// Make an attempt to flatten the buffer to reduce access time.
@@ -2565,7 +2680,7 @@ MaybeObject* Heap::AllocateExternalStringFromAscii(
ExternalAsciiString::Resource* resource) {
size_t length = resource->length();
if (length > static_cast<size_t>(String::kMaxLength)) {
- Top::context()->mark_out_of_memory();
+ isolate()->context()->mark_out_of_memory();
return Failure::OutOfMemoryException();
}
@@ -2588,7 +2703,7 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
ExternalTwoByteString::Resource* resource) {
size_t length = resource->length();
if (length > static_cast<size_t>(String::kMaxLength)) {
- Top::context()->mark_out_of_memory();
+ isolate()->context()->mark_out_of_memory();
return Failure::OutOfMemoryException();
}
@@ -2598,7 +2713,7 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
bool is_ascii = length <= kAsciiCheckLengthLimit &&
String::IsAscii(resource->data(), static_cast<int>(length));
Map* map = is_ascii ?
- Heap::external_string_with_ascii_data_map() : Heap::external_string_map();
+ external_string_with_ascii_data_map() : external_string_map();
Object* result;
{ MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -2615,8 +2730,8 @@ MaybeObject* Heap::AllocateExternalStringFromTwoByte(
MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
if (code <= String::kMaxAsciiCharCode) {
- Object* value = Heap::single_character_string_cache()->get(code);
- if (value != Heap::undefined_value()) return value;
+ Object* value = single_character_string_cache()->get(code);
+ if (value != undefined_value()) return value;
char buffer[1];
buffer[0] = static_cast<char>(code);
@@ -2624,12 +2739,12 @@ MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1));
if (!maybe_result->ToObject(&result)) return maybe_result;
- Heap::single_character_string_cache()->set(code, result);
+ single_character_string_cache()->set(code, result);
return result;
}
Object* result;
- { MaybeObject* maybe_result = Heap::AllocateRawTwoByteString(1);
+ { MaybeObject* maybe_result = AllocateRawTwoByteString(1);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
String* answer = String::cast(result);
@@ -2691,24 +2806,6 @@ void Heap::CreateFillerObjectAt(Address addr, int size) {
}
-MaybeObject* Heap::AllocatePixelArray(int length,
- uint8_t* external_pointer,
- PretenureFlag pretenure) {
- AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
- Object* result;
- { MaybeObject* maybe_result =
- AllocateRaw(PixelArray::kAlignedSize, space, OLD_DATA_SPACE);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
-
- reinterpret_cast<PixelArray*>(result)->set_map(pixel_array_map());
- reinterpret_cast<PixelArray*>(result)->set_length(length);
- reinterpret_cast<PixelArray*>(result)->set_external_pointer(external_pointer);
-
- return result;
-}
-
-
MaybeObject* Heap::AllocateExternalArray(int length,
ExternalArrayType array_type,
void* external_pointer,
@@ -2733,7 +2830,8 @@ MaybeObject* Heap::AllocateExternalArray(int length,
MaybeObject* Heap::CreateCode(const CodeDesc& desc,
Code::Flags flags,
- Handle<Object> self_reference) {
+ Handle<Object> self_reference,
+ bool immovable) {
// Allocate ByteArray before the Code object, so that we do not risk
// leaving uninitialized Code object (and breaking the heap).
Object* reloc_info;
@@ -2741,12 +2839,14 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
if (!maybe_reloc_info->ToObject(&reloc_info)) return maybe_reloc_info;
}
- // Compute size
+ // Compute size.
int body_size = RoundUp(desc.instr_size, kObjectAlignment);
int obj_size = Code::SizeFor(body_size);
ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment));
MaybeObject* maybe_result;
- if (obj_size > MaxObjectSizeInPagedSpace()) {
+ // Large code objects and code objects which should stay at a fixed address
+ // are allocated in large object space.
+ if (obj_size > MaxObjectSizeInPagedSpace() || immovable) {
maybe_result = lo_space_->AllocateRawCode(obj_size);
} else {
maybe_result = code_space_->AllocateRaw(obj_size);
@@ -2758,7 +2858,8 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
// Initialize the object
HeapObject::cast(result)->set_map(code_map());
Code* code = Code::cast(result);
- ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
+ ASSERT(!isolate_->code_range()->exists() ||
+ isolate_->code_range()->contains(code->address()));
code->set_instruction_size(desc.instr_size);
code->set_relocation_info(ByteArray::cast(reloc_info));
code->set_flags(flags);
@@ -2766,6 +2867,7 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
code->set_check_type(RECEIVER_MAP_CHECK);
}
code->set_deoptimization_data(empty_fixed_array());
+ code->set_next_code_flushing_candidate(undefined_value());
// Allow self references to created code object by patching the handle to
// point to the newly allocated Code object.
if (!self_reference.is_null()) {
@@ -2804,7 +2906,8 @@ MaybeObject* Heap::CopyCode(Code* code) {
CopyBlock(new_addr, old_addr, obj_size);
// Relocate the copy.
Code* new_code = Code::cast(result);
- ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
+ ASSERT(!isolate_->code_range()->exists() ||
+ isolate_->code_range()->contains(code->address()));
new_code->Relocate(new_addr - old_addr);
return new_code;
}
@@ -2853,7 +2956,8 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
// Relocate the copy.
- ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
+ ASSERT(!isolate_->code_range()->exists() ||
+ isolate_->code_range()->contains(code->address()));
new_code->Relocate(new_addr - old_addr);
#ifdef DEBUG
@@ -2876,9 +2980,6 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
if (!maybe_result->ToObject(&result)) return maybe_result;
}
HeapObject::cast(result)->set_map(map);
-#ifdef ENABLE_LOGGING_AND_PROFILING
- ProducerHeapProfile::RecordJSObjectAllocation(result);
-#endif
return result;
}
@@ -2939,22 +3040,34 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
// To get fast allocation and map sharing for arguments objects we
// allocate them based on an arguments boilerplate.
+ JSObject* boilerplate;
+ int arguments_object_size;
+ bool strict_mode_callee = callee->IsJSFunction() &&
+ JSFunction::cast(callee)->shared()->strict_mode();
+ if (strict_mode_callee) {
+ boilerplate =
+ isolate()->context()->global_context()->
+ strict_mode_arguments_boilerplate();
+ arguments_object_size = kArgumentsObjectSizeStrict;
+ } else {
+ boilerplate =
+ isolate()->context()->global_context()->arguments_boilerplate();
+ arguments_object_size = kArgumentsObjectSize;
+ }
+
// This calls Copy directly rather than using Heap::AllocateRaw so we
// duplicate the check here.
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
- JSObject* boilerplate =
- Top::context()->global_context()->arguments_boilerplate();
-
// Check that the size of the boilerplate matches our
// expectations. The ArgumentsAccessStub::GenerateNewObject relies
// on the size being a known constant.
- ASSERT(kArgumentsObjectSize == boilerplate->map()->instance_size());
+ ASSERT(arguments_object_size == boilerplate->map()->instance_size());
// Do the allocation.
Object* result;
{ MaybeObject* maybe_result =
- AllocateRaw(kArgumentsObjectSize, NEW_SPACE, OLD_POINTER_SPACE);
+ AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
@@ -2963,14 +3076,17 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
// barrier here.
CopyBlock(HeapObject::cast(result)->address(),
boilerplate->address(),
- kArgumentsObjectSize);
+ JSObject::kHeaderSize);
- // Set the two properties.
- JSObject::cast(result)->InObjectPropertyAtPut(arguments_callee_index,
- callee);
- JSObject::cast(result)->InObjectPropertyAtPut(arguments_length_index,
+ // Set the length property.
+ JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsLengthIndex,
Smi::FromInt(length),
SKIP_WRITE_BARRIER);
+ // Set the callee property for non-strict mode arguments object only.
+ if (!strict_mode_callee) {
+ JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsCalleeIndex,
+ callee);
+ }
// Check the state of the object
ASSERT(JSObject::cast(result)->HasFastProperties());
@@ -3002,8 +3118,7 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
int instance_size = fun->shared()->CalculateInstanceSize();
int in_object_properties = fun->shared()->CalculateInObjectProperties();
Object* map_obj;
- { MaybeObject* maybe_map_obj =
- Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
+ { MaybeObject* maybe_map_obj = AllocateMap(JS_OBJECT_TYPE, instance_size);
if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj;
}
@@ -3162,6 +3277,26 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
}
+MaybeObject* Heap::AllocateJSProxy(Object* handler, Object* prototype) {
+ // Allocate map.
+ // TODO(rossberg): Once we optimize proxies, think about a scheme to share
+ // maps. Will probably depend on the identity of the handler object, too.
+ Map* map;
+ MaybeObject* maybe_map_obj = AllocateMap(JS_PROXY_TYPE, JSProxy::kSize);
+ if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj;
+ map->set_prototype(prototype);
+ map->set_pre_allocated_property_fields(1);
+ map->set_inobject_properties(1);
+
+ // Allocate the proxy object.
+ Object* result;
+ MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ JSProxy::cast(result)->set_handler(handler);
+ return result;
+}
+
+
MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
ASSERT(constructor->has_initial_map());
Map* map = constructor->initial_map();
@@ -3194,12 +3329,12 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
// Fill these accessors into the dictionary.
DescriptorArray* descs = map->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- PropertyDetails details = descs->GetDetails(i);
+ PropertyDetails details(descs->GetDetails(i));
ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
PropertyDetails d =
PropertyDetails(details.attributes(), CALLBACKS, details.index());
Object* value = descs->GetCallbacksObject(i);
- { MaybeObject* maybe_value = Heap::AllocateJSGlobalPropertyCell(value);
+ { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
if (!maybe_value->ToObject(&value)) return maybe_value;
}
@@ -3225,7 +3360,7 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
// Setup the global object as a normalized object.
global->set_map(new_map);
- global->map()->set_instance_descriptors(Heap::empty_descriptor_array());
+ global->map()->clear_instance_descriptors();
global->set_properties(dictionary);
// Make sure result is a global object with properties in dictionary.
@@ -3264,7 +3399,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
{ MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
if (!maybe_clone->ToObject(&clone)) return maybe_clone;
}
- ASSERT(Heap::InNewSpace(clone));
+ ASSERT(InNewSpace(clone));
// Since we know the clone is allocated in new space, we can copy
// the contents without worrying about updating the write barrier.
CopyBlock(HeapObject::cast(clone)->address(),
@@ -3293,9 +3428,6 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
}
// Return the new clone.
-#ifdef ENABLE_LOGGING_AND_PROFILING
- ProducerHeapProfile::RecordJSObjectAllocation(clone);
-#endif
return clone;
}
@@ -3349,8 +3481,8 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
const uc32 kMaxSupportedChar = 0xFFFF;
// Count the number of characters in the UTF-8 string and check if
// it is an ASCII string.
- Access<ScannerConstants::Utf8Decoder>
- decoder(ScannerConstants::utf8_decoder());
+ Access<UnicodeCache::Utf8Decoder>
+ decoder(isolate_->unicode_cache()->utf8_decoder());
decoder->Reset(string.start(), string.length());
int chars = 0;
while (decoder->has_more()) {
@@ -3403,12 +3535,24 @@ Map* Heap::SymbolMapForString(String* string) {
// Find the corresponding symbol map for strings.
Map* map = string->map();
- if (map == ascii_string_map()) return ascii_symbol_map();
- if (map == string_map()) return symbol_map();
- if (map == cons_string_map()) return cons_symbol_map();
- if (map == cons_ascii_string_map()) return cons_ascii_symbol_map();
- if (map == external_string_map()) return external_symbol_map();
- if (map == external_ascii_string_map()) return external_ascii_symbol_map();
+ if (map == ascii_string_map()) {
+ return ascii_symbol_map();
+ }
+ if (map == string_map()) {
+ return symbol_map();
+ }
+ if (map == cons_string_map()) {
+ return cons_symbol_map();
+ }
+ if (map == cons_ascii_string_map()) {
+ return cons_ascii_symbol_map();
+ }
+ if (map == external_string_map()) {
+ return external_symbol_map();
+ }
+ if (map == external_ascii_string_map()) {
+ return external_ascii_symbol_map();
+ }
if (map == external_string_with_ascii_data_map()) {
return external_symbol_with_ascii_data_map();
}
@@ -3582,7 +3726,7 @@ MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
{ MaybeObject* maybe_obj = AllocateRawFixedArray(len);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
- if (Heap::InNewSpace(obj)) {
+ if (InNewSpace(obj)) {
HeapObject* dst = HeapObject::cast(obj);
dst->set_map(map);
CopyBlock(dst->address() + kPointerSize,
@@ -3614,7 +3758,7 @@ MaybeObject* Heap::AllocateFixedArray(int length) {
array->set_map(fixed_array_map());
array->set_length(length);
// Initialize body.
- ASSERT(!Heap::InNewSpace(undefined_value()));
+ ASSERT(!InNewSpace(undefined_value()));
MemsetPointer(array->data_start(), undefined_value(), length);
return result;
}
@@ -3645,20 +3789,21 @@ MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
+ Heap* heap,
int length,
PretenureFlag pretenure,
Object* filler) {
ASSERT(length >= 0);
- ASSERT(Heap::empty_fixed_array()->IsFixedArray());
- if (length == 0) return Heap::empty_fixed_array();
+ ASSERT(heap->empty_fixed_array()->IsFixedArray());
+ if (length == 0) return heap->empty_fixed_array();
- ASSERT(!Heap::InNewSpace(filler));
+ ASSERT(!heap->InNewSpace(filler));
Object* result;
- { MaybeObject* maybe_result = Heap::AllocateRawFixedArray(length, pretenure);
+ { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- HeapObject::cast(result)->set_map(Heap::fixed_array_map());
+ HeapObject::cast(result)->set_map(heap->fixed_array_map());
FixedArray* array = FixedArray::cast(result);
array->set_length(length);
MemsetPointer(array->data_start(), filler, length);
@@ -3667,13 +3812,19 @@ MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
- return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
+ return AllocateFixedArrayWithFiller(this,
+ length,
+ pretenure,
+ undefined_value());
}
MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
PretenureFlag pretenure) {
- return AllocateFixedArrayWithFiller(length, pretenure, the_hole_value());
+ return AllocateFixedArrayWithFiller(this,
+ length,
+ pretenure,
+ the_hole_value());
}
@@ -3691,9 +3842,65 @@ MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
}
+MaybeObject* Heap::AllocateEmptyFixedDoubleArray() {
+ int size = FixedDoubleArray::SizeFor(0);
+ Object* result;
+ { MaybeObject* maybe_result =
+ AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+ // Initialize the object.
+ reinterpret_cast<FixedDoubleArray*>(result)->set_map(
+ fixed_double_array_map());
+ reinterpret_cast<FixedDoubleArray*>(result)->set_length(0);
+ return result;
+}
+
+
+MaybeObject* Heap::AllocateUninitializedFixedDoubleArray(
+ int length,
+ PretenureFlag pretenure) {
+ if (length == 0) return empty_fixed_double_array();
+
+ Object* obj;
+ { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure);
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ }
+
+ reinterpret_cast<FixedDoubleArray*>(obj)->set_map(fixed_double_array_map());
+ FixedDoubleArray::cast(obj)->set_length(length);
+ return obj;
+}
+
+
+MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
+ PretenureFlag pretenure) {
+ if (length < 0 || length > FixedDoubleArray::kMaxLength) {
+ return Failure::OutOfMemoryException();
+ }
+
+ AllocationSpace space =
+ (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
+ int size = FixedDoubleArray::SizeFor(length);
+ if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
+ // Too big for new space.
+ space = LO_SPACE;
+ } else if (space == OLD_DATA_SPACE &&
+ size > MaxObjectSizeInPagedSpace()) {
+ // Too big for old data space.
+ space = LO_SPACE;
+ }
+
+ AllocationSpace retry_space =
+ (size <= MaxObjectSizeInPagedSpace()) ? OLD_DATA_SPACE : LO_SPACE;
+
+ return AllocateRaw(size, space, retry_space);
+}
+
+
MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
Object* result;
- { MaybeObject* maybe_result = Heap::AllocateFixedArray(length, pretenure);
+ { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map());
@@ -3705,7 +3912,7 @@ MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
MaybeObject* Heap::AllocateGlobalContext() {
Object* result;
{ MaybeObject* maybe_result =
- Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
+ AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
@@ -3719,49 +3926,62 @@ MaybeObject* Heap::AllocateGlobalContext() {
MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
Object* result;
- { MaybeObject* maybe_result = Heap::AllocateFixedArray(length);
+ { MaybeObject* maybe_result = AllocateFixedArray(length);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
- context->set_map(context_map());
+ context->set_map(function_context_map());
context->set_closure(function);
- context->set_fcontext(context);
- context->set_previous(NULL);
+ context->set_previous(function->context());
context->set_extension(NULL);
context->set_global(function->context()->global());
- ASSERT(!context->IsGlobalContext());
- ASSERT(context->is_function_context());
- ASSERT(result->IsContext());
- return result;
+ return context;
}
-MaybeObject* Heap::AllocateWithContext(Context* previous,
- JSObject* extension,
- bool is_catch_context) {
+MaybeObject* Heap::AllocateCatchContext(JSFunction* function,
+ Context* previous,
+ String* name,
+ Object* thrown_object) {
+ STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX);
Object* result;
{ MaybeObject* maybe_result =
- Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
+ AllocateFixedArray(Context::MIN_CONTEXT_SLOTS + 1);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+ Context* context = reinterpret_cast<Context*>(result);
+ context->set_map(catch_context_map());
+ context->set_closure(function);
+ context->set_previous(previous);
+ context->set_extension(name);
+ context->set_global(previous->global());
+ context->set(Context::THROWN_OBJECT_INDEX, thrown_object);
+ return context;
+}
+
+
+MaybeObject* Heap::AllocateWithContext(JSFunction* function,
+ Context* previous,
+ JSObject* extension) {
+ Object* result;
+ { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
- context->set_map(is_catch_context ? catch_context_map() : context_map());
- context->set_closure(previous->closure());
- context->set_fcontext(previous->fcontext());
+ context->set_map(with_context_map());
+ context->set_closure(function);
context->set_previous(previous);
context->set_extension(extension);
context->set_global(previous->global());
- ASSERT(!context->IsGlobalContext());
- ASSERT(!context->is_function_context());
- ASSERT(result->IsContext());
- return result;
+ return context;
}
MaybeObject* Heap::AllocateStruct(InstanceType type) {
Map* map;
switch (type) {
-#define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break;
+#define MAKE_CASE(NAME, Name, name) \
+ case NAME##_TYPE: map = name##_map(); break;
STRUCT_LIST(MAKE_CASE)
#undef MAKE_CASE
default:
@@ -3772,7 +3992,7 @@ STRUCT_LIST(MAKE_CASE)
AllocationSpace space =
(size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
Object* result;
- { MaybeObject* maybe_result = Heap::Allocate(map, space);
+ { MaybeObject* maybe_result = Allocate(map, space);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Struct::cast(result)->InitializeBody(size);
@@ -3786,8 +4006,11 @@ bool Heap::IdleNotification() {
static const int kIdlesBeforeMarkCompact = 8;
static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
static const unsigned int kGCsBetweenCleanup = 4;
- static int number_idle_notifications = 0;
- static unsigned int last_gc_count = gc_count_;
+
+ if (!last_idle_notification_gc_count_init_) {
+ last_idle_notification_gc_count_ = gc_count_;
+ last_idle_notification_gc_count_init_ = true;
+ }
bool uncommit = true;
bool finished = false;
@@ -3796,56 +4019,56 @@ bool Heap::IdleNotification() {
// GCs have taken place. This allows another round of cleanup based
// on idle notifications if enough work has been carried out to
// provoke a number of garbage collections.
- if (gc_count_ - last_gc_count < kGCsBetweenCleanup) {
- number_idle_notifications =
- Min(number_idle_notifications + 1, kMaxIdleCount);
+ if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
+ number_idle_notifications_ =
+ Min(number_idle_notifications_ + 1, kMaxIdleCount);
} else {
- number_idle_notifications = 0;
- last_gc_count = gc_count_;
+ number_idle_notifications_ = 0;
+ last_idle_notification_gc_count_ = gc_count_;
}
- if (number_idle_notifications == kIdlesBeforeScavenge) {
+ if (number_idle_notifications_ == kIdlesBeforeScavenge) {
if (contexts_disposed_ > 0) {
- HistogramTimerScope scope(&Counters::gc_context);
+ HistogramTimerScope scope(isolate_->counters()->gc_context());
CollectAllGarbage(false);
} else {
CollectGarbage(NEW_SPACE);
}
new_space_.Shrink();
- last_gc_count = gc_count_;
- } else if (number_idle_notifications == kIdlesBeforeMarkSweep) {
+ last_idle_notification_gc_count_ = gc_count_;
+ } else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
// Before doing the mark-sweep collections we clear the
// compilation cache to avoid hanging on to source code and
// generated code for cached functions.
- CompilationCache::Clear();
+ isolate_->compilation_cache()->Clear();
CollectAllGarbage(false);
new_space_.Shrink();
- last_gc_count = gc_count_;
+ last_idle_notification_gc_count_ = gc_count_;
- } else if (number_idle_notifications == kIdlesBeforeMarkCompact) {
+ } else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
CollectAllGarbage(true);
new_space_.Shrink();
- last_gc_count = gc_count_;
+ last_idle_notification_gc_count_ = gc_count_;
+ number_idle_notifications_ = 0;
finished = true;
-
} else if (contexts_disposed_ > 0) {
if (FLAG_expose_gc) {
contexts_disposed_ = 0;
} else {
- HistogramTimerScope scope(&Counters::gc_context);
+ HistogramTimerScope scope(isolate_->counters()->gc_context());
CollectAllGarbage(false);
- last_gc_count = gc_count_;
+ last_idle_notification_gc_count_ = gc_count_;
}
// If this is the first idle notification, we reset the
// notification count to avoid letting idle notifications for
// context disposal garbage collections start a potentially too
// aggressive idle GC cycle.
- if (number_idle_notifications <= 1) {
- number_idle_notifications = 0;
+ if (number_idle_notifications_ <= 1) {
+ number_idle_notifications_ = 0;
uncommit = false;
}
- } else if (number_idle_notifications > kIdlesBeforeMarkCompact) {
+ } else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
// If we have received more than kIdlesBeforeMarkCompact idle
// notifications we do not perform any cleanup because we don't
// expect to gain much by doing so.
@@ -3855,7 +4078,7 @@ bool Heap::IdleNotification() {
// Make sure that we have no pending context disposals and
// conditionally uncommit from space.
ASSERT(contexts_disposed_ == 0);
- if (uncommit) Heap::UncommitFromSpace();
+ if (uncommit) UncommitFromSpace();
return finished;
}
@@ -3864,7 +4087,7 @@ bool Heap::IdleNotification() {
void Heap::Print() {
if (!HasBeenSetup()) return;
- Top::PrintStack();
+ isolate()->PrintStack();
AllSpaces spaces;
for (Space* space = spaces.next(); space != NULL; space = spaces.next())
space->Print();
@@ -3897,11 +4120,11 @@ void Heap::ReportHeapStatistics(const char* title) {
PrintF("\n");
PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
- GlobalHandles::PrintStats();
+ isolate_->global_handles()->PrintStats();
PrintF("\n");
PrintF("Heap statistics : ");
- MemoryAllocator::ReportStatistics();
+ isolate_->memory_allocator()->ReportStatistics();
PrintF("To space : ");
new_space_.ReportStatistics();
PrintF("Old pointer space : ");
@@ -3984,7 +4207,7 @@ static void VerifyPointersUnderWatermark(
Address start = page->ObjectAreaStart();
Address end = page->AllocationWatermark();
- Heap::IterateDirtyRegions(Page::kAllRegionsDirtyMarks,
+ HEAP->IterateDirtyRegions(Page::kAllRegionsDirtyMarks,
start,
end,
visit_dirty_region,
@@ -4005,7 +4228,7 @@ static void VerifyPointersUnderWatermark(LargeObjectSpace* space) {
// When we are not in GC the Heap::InNewSpace() predicate
// checks that pointers which satisfy predicate point into
// the active semispace.
- Heap::InNewSpace(*slot);
+ HEAP->InNewSpace(*slot);
slot_address += kPointerSize;
}
}
@@ -4074,6 +4297,26 @@ MaybeObject* Heap::LookupAsciiSymbol(Vector<const char> string) {
}
+MaybeObject* Heap::LookupAsciiSymbol(Handle<SeqAsciiString> string,
+ int from,
+ int length) {
+ Object* symbol = NULL;
+ Object* new_table;
+ { MaybeObject* maybe_new_table =
+ symbol_table()->LookupSubStringAsciiSymbol(string,
+ from,
+ length,
+ &symbol);
+ if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
+ }
+ // Can't use set_symbol_table because SymbolTable::cast knows that
+ // SymbolTable is a singleton and checks for identity.
+ roots_[kSymbolTableRootIndex] = new_table;
+ ASSERT(symbol != NULL);
+ return symbol;
+}
+
+
MaybeObject* Heap::LookupTwoByteSymbol(Vector<const uc16> string) {
Object* symbol = NULL;
Object* new_table;
@@ -4126,7 +4369,8 @@ void Heap::ZapFromSpace() {
#endif // DEBUG
-bool Heap::IteratePointersInDirtyRegion(Address start,
+bool Heap::IteratePointersInDirtyRegion(Heap* heap,
+ Address start,
Address end,
ObjectSlotCallback copy_object_func) {
Address slot_address = start;
@@ -4134,10 +4378,10 @@ bool Heap::IteratePointersInDirtyRegion(Address start,
while (slot_address < end) {
Object** slot = reinterpret_cast<Object**>(slot_address);
- if (Heap::InNewSpace(*slot)) {
+ if (heap->InNewSpace(*slot)) {
ASSERT((*slot)->IsHeapObject());
copy_object_func(reinterpret_cast<HeapObject**>(slot));
- if (Heap::InNewSpace(*slot)) {
+ if (heap->InNewSpace(*slot)) {
ASSERT((*slot)->IsHeapObject());
pointers_to_new_space_found = true;
}
@@ -4171,14 +4415,16 @@ static bool IteratePointersInDirtyMaps(Address start,
Address map_address = start;
bool pointers_to_new_space_found = false;
+ Heap* heap = HEAP;
while (map_address < end) {
- ASSERT(!Heap::InNewSpace(Memory::Object_at(map_address)));
+ ASSERT(!heap->InNewSpace(Memory::Object_at(map_address)));
ASSERT(Memory::Object_at(map_address)->IsMap());
Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset;
Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset;
- if (Heap::IteratePointersInDirtyRegion(pointer_fields_start,
+ if (Heap::IteratePointersInDirtyRegion(heap,
+ pointer_fields_start,
pointer_fields_end,
copy_object_func)) {
pointers_to_new_space_found = true;
@@ -4192,6 +4438,7 @@ static bool IteratePointersInDirtyMaps(Address start,
bool Heap::IteratePointersInDirtyMapsRegion(
+ Heap* heap,
Address start,
Address end,
ObjectSlotCallback copy_object_func) {
@@ -4211,7 +4458,8 @@ bool Heap::IteratePointersInDirtyMapsRegion(
Min(prev_map + Map::kPointerFieldsEndOffset, end);
contains_pointers_to_new_space =
- IteratePointersInDirtyRegion(pointer_fields_start,
+ IteratePointersInDirtyRegion(heap,
+ pointer_fields_start,
pointer_fields_end,
copy_object_func)
|| contains_pointers_to_new_space;
@@ -4233,7 +4481,8 @@ bool Heap::IteratePointersInDirtyMapsRegion(
Min(end, map_aligned_end + Map::kPointerFieldsEndOffset);
contains_pointers_to_new_space =
- IteratePointersInDirtyRegion(pointer_fields_start,
+ IteratePointersInDirtyRegion(heap,
+ pointer_fields_start,
pointer_fields_end,
copy_object_func)
|| contains_pointers_to_new_space;
@@ -4253,10 +4502,10 @@ void Heap::IterateAndMarkPointersToFromSpace(Address start,
while (slot_address < end) {
Object** slot = reinterpret_cast<Object**>(slot_address);
- if (Heap::InFromSpace(*slot)) {
+ if (InFromSpace(*slot)) {
ASSERT((*slot)->IsHeapObject());
callback(reinterpret_cast<HeapObject**>(slot));
- if (Heap::InNewSpace(*slot)) {
+ if (InNewSpace(*slot)) {
ASSERT((*slot)->IsHeapObject());
marks |= page->GetRegionMaskForAddress(slot_address);
}
@@ -4295,7 +4544,7 @@ uint32_t Heap::IterateDirtyRegions(
Address region_end = Min(second_region, area_end);
if (marks & mask) {
- if (visit_dirty_region(region_start, region_end, copy_object_func)) {
+ if (visit_dirty_region(this, region_start, region_end, copy_object_func)) {
newmarks |= mask;
}
}
@@ -4307,7 +4556,10 @@ uint32_t Heap::IterateDirtyRegions(
while (region_end <= area_end) {
if (marks & mask) {
- if (visit_dirty_region(region_start, region_end, copy_object_func)) {
+ if (visit_dirty_region(this,
+ region_start,
+ region_end,
+ copy_object_func)) {
newmarks |= mask;
}
}
@@ -4323,7 +4575,7 @@ uint32_t Heap::IterateDirtyRegions(
// with region end. Check whether region covering last part of area is
// dirty.
if (marks & mask) {
- if (visit_dirty_region(region_start, area_end, copy_object_func)) {
+ if (visit_dirty_region(this, region_start, area_end, copy_object_func)) {
newmarks |= mask;
}
}
@@ -4387,9 +4639,10 @@ void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
v->Synchronize("symbol_table");
- if (mode != VISIT_ALL_IN_SCAVENGE) {
+ if (mode != VISIT_ALL_IN_SCAVENGE &&
+ mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
// Scavenge collections have special processing for this.
- ExternalStringTable::Iterate(v);
+ external_string_table_.Iterate(v);
}
v->Synchronize("external_string_table");
}
@@ -4402,42 +4655,50 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
v->Synchronize("symbol");
- Bootstrapper::Iterate(v);
+ isolate_->bootstrapper()->Iterate(v);
v->Synchronize("bootstrapper");
- Top::Iterate(v);
+ isolate_->Iterate(v);
v->Synchronize("top");
Relocatable::Iterate(v);
v->Synchronize("relocatable");
#ifdef ENABLE_DEBUGGER_SUPPORT
- Debug::Iterate(v);
+ isolate_->debug()->Iterate(v);
#endif
v->Synchronize("debug");
- CompilationCache::Iterate(v);
+ isolate_->compilation_cache()->Iterate(v);
v->Synchronize("compilationcache");
// Iterate over local handles in handle scopes.
- HandleScopeImplementer::Iterate(v);
+ isolate_->handle_scope_implementer()->Iterate(v);
v->Synchronize("handlescope");
// Iterate over the builtin code objects and code stubs in the
// heap. Note that it is not necessary to iterate over code objects
// on scavenge collections.
- if (mode != VISIT_ALL_IN_SCAVENGE) {
- Builtins::IterateBuiltins(v);
+ if (mode != VISIT_ALL_IN_SCAVENGE &&
+ mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
+ isolate_->builtins()->IterateBuiltins(v);
}
v->Synchronize("builtins");
// Iterate over global handles.
- if (mode == VISIT_ONLY_STRONG) {
- GlobalHandles::IterateStrongRoots(v);
- } else {
- GlobalHandles::IterateAllRoots(v);
+ switch (mode) {
+ case VISIT_ONLY_STRONG:
+ isolate_->global_handles()->IterateStrongRoots(v);
+ break;
+ case VISIT_ALL_IN_SCAVENGE:
+ isolate_->global_handles()->IterateNewSpaceStrongAndDependentRoots(v);
+ break;
+ case VISIT_ALL_IN_SWEEP_NEWSPACE:
+ case VISIT_ALL:
+ isolate_->global_handles()->IterateAllRoots(v);
+ break;
}
v->Synchronize("globalhandles");
// Iterate over pointers being held by inactive threads.
- ThreadManager::Iterate(v);
+ isolate_->thread_manager()->Iterate(v);
v->Synchronize("threadmanager");
// Iterate over the pointers the Serialization/Deserialization code is
@@ -4456,10 +4717,6 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
}
-// Flag is set when the heap has been configured. The heap can be repeatedly
-// configured through the API until it is setup.
-static bool heap_configured = false;
-
// TODO(1236194): Since the heap size is configurable on the command line
// and through the API, we should gracefully handle the case that the heap
// size is not big enough to fit all the initial objects.
@@ -4506,7 +4763,7 @@ bool Heap::ConfigureHeap(int max_semispace_size,
// The old generation is paged.
max_old_generation_size_ = RoundUp(max_old_generation_size_, Page::kPageSize);
- heap_configured = true;
+ configured_ = true;
return true;
}
@@ -4534,11 +4791,13 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->cell_space_size = cell_space_->Size();
*stats->cell_space_capacity = cell_space_->Capacity();
*stats->lo_space_size = lo_space_->Size();
- GlobalHandles::RecordStats(stats);
- *stats->memory_allocator_size = MemoryAllocator::Size();
+ isolate_->global_handles()->RecordStats(stats);
+ *stats->memory_allocator_size = isolate()->memory_allocator()->Size();
*stats->memory_allocator_capacity =
- MemoryAllocator::Size() + MemoryAllocator::Available();
+ isolate()->memory_allocator()->Size() +
+ isolate()->memory_allocator()->Available();
*stats->os_error = OS::GetLastError();
+ isolate()->memory_allocator()->Available();
if (take_snapshot) {
HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
for (HeapObject* obj = iterator.next();
@@ -4570,8 +4829,177 @@ int Heap::PromotedExternalMemorySize() {
- amount_of_external_allocated_memory_at_last_global_gc_;
}
+#ifdef DEBUG
+
+// Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
+static const int kMarkTag = 2;
+
+
+class HeapDebugUtils {
+ public:
+ explicit HeapDebugUtils(Heap* heap)
+ : search_for_any_global_(false),
+ search_target_(NULL),
+ found_target_(false),
+ object_stack_(20),
+ heap_(heap) {
+ }
+
+ class MarkObjectVisitor : public ObjectVisitor {
+ public:
+ explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
+
+ void VisitPointers(Object** start, Object** end) {
+ // Copy all HeapObject pointers in [start, end)
+ for (Object** p = start; p < end; p++) {
+ if ((*p)->IsHeapObject())
+ utils_->MarkObjectRecursively(p);
+ }
+ }
+
+ HeapDebugUtils* utils_;
+ };
+
+ void MarkObjectRecursively(Object** p) {
+ if (!(*p)->IsHeapObject()) return;
+
+ HeapObject* obj = HeapObject::cast(*p);
+
+ Object* map = obj->map();
+
+ if (!map->IsHeapObject()) return; // visited before
+
+ if (found_target_) return; // stop if target found
+ object_stack_.Add(obj);
+ if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
+ (!search_for_any_global_ && (obj == search_target_))) {
+ found_target_ = true;
+ return;
+ }
+
+ // not visited yet
+ Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
+
+ Address map_addr = map_p->address();
+
+ obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
+
+ MarkObjectRecursively(&map);
+
+ MarkObjectVisitor mark_visitor(this);
+
+ obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
+ &mark_visitor);
+
+ if (!found_target_) // don't pop if found the target
+ object_stack_.RemoveLast();
+ }
+
+
+ class UnmarkObjectVisitor : public ObjectVisitor {
+ public:
+ explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
+
+ void VisitPointers(Object** start, Object** end) {
+ // Copy all HeapObject pointers in [start, end)
+ for (Object** p = start; p < end; p++) {
+ if ((*p)->IsHeapObject())
+ utils_->UnmarkObjectRecursively(p);
+ }
+ }
+
+ HeapDebugUtils* utils_;
+ };
+
+
+ void UnmarkObjectRecursively(Object** p) {
+ if (!(*p)->IsHeapObject()) return;
+
+ HeapObject* obj = HeapObject::cast(*p);
+
+ Object* map = obj->map();
+
+ if (map->IsHeapObject()) return; // unmarked already
+
+ Address map_addr = reinterpret_cast<Address>(map);
+
+ map_addr -= kMarkTag;
+
+ ASSERT_TAG_ALIGNED(map_addr);
+
+ HeapObject* map_p = HeapObject::FromAddress(map_addr);
+
+ obj->set_map(reinterpret_cast<Map*>(map_p));
+
+ UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
+
+ UnmarkObjectVisitor unmark_visitor(this);
+
+ obj->IterateBody(Map::cast(map_p)->instance_type(),
+ obj->SizeFromMap(Map::cast(map_p)),
+ &unmark_visitor);
+ }
+
+
+ void MarkRootObjectRecursively(Object** root) {
+ if (search_for_any_global_) {
+ ASSERT(search_target_ == NULL);
+ } else {
+ ASSERT(search_target_->IsHeapObject());
+ }
+ found_target_ = false;
+ object_stack_.Clear();
+
+ MarkObjectRecursively(root);
+ UnmarkObjectRecursively(root);
+
+ if (found_target_) {
+ PrintF("=====================================\n");
+ PrintF("==== Path to object ====\n");
+ PrintF("=====================================\n\n");
+
+ ASSERT(!object_stack_.is_empty());
+ for (int i = 0; i < object_stack_.length(); i++) {
+ if (i > 0) PrintF("\n |\n |\n V\n\n");
+ Object* obj = object_stack_[i];
+ obj->Print();
+ }
+ PrintF("=====================================\n");
+ }
+ }
+
+ // Helper class for visiting HeapObjects recursively.
+ class MarkRootVisitor: public ObjectVisitor {
+ public:
+ explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
+
+ void VisitPointers(Object** start, Object** end) {
+ // Visit all HeapObject pointers in [start, end)
+ for (Object** p = start; p < end; p++) {
+ if ((*p)->IsHeapObject())
+ utils_->MarkRootObjectRecursively(p);
+ }
+ }
+
+ HeapDebugUtils* utils_;
+ };
+
+ bool search_for_any_global_;
+ Object* search_target_;
+ bool found_target_;
+ List<Object*> object_stack_;
+ Heap* heap_;
+
+ friend class Heap;
+};
+
+#endif
bool Heap::Setup(bool create_heap_objects) {
+#ifdef DEBUG
+ debug_utils_ = new HeapDebugUtils(this);
+#endif
+
// Initialize heap spaces and initial maps and objects. Whenever something
// goes wrong, just return false. The caller should check the results and
// call Heap::TearDown() to release allocated memory.
@@ -4580,13 +5008,19 @@ bool Heap::Setup(bool create_heap_objects) {
// Configuration is based on the flags new-space-size (really the semispace
// size) and old-space-size if set or the initial values of semispace_size_
// and old_generation_size_ otherwise.
- if (!heap_configured) {
+ if (!configured_) {
if (!ConfigureHeapDefault()) return false;
}
- ScavengingVisitor::Initialize();
- NewSpaceScavenger::Initialize();
- MarkCompactCollector::Initialize();
+ gc_initializer_mutex->Lock();
+ static bool initialized_gc = false;
+ if (!initialized_gc) {
+ initialized_gc = true;
+ InitializeScavengingVisitorsTables();
+ NewSpaceScavenger::Initialize();
+ MarkCompactCollector::Initialize();
+ }
+ gc_initializer_mutex->Unlock();
MarkMapPointersAsEncoded(false);
@@ -4594,9 +5028,11 @@ bool Heap::Setup(bool create_heap_objects) {
// space. The chunk is double the size of the requested reserved
// new space size to ensure that we can find a pair of semispaces that
// are contiguous and aligned to their size.
- if (!MemoryAllocator::Setup(MaxReserved(), MaxExecutableSize())) return false;
+ if (!isolate_->memory_allocator()->Setup(MaxReserved(), MaxExecutableSize()))
+ return false;
void* chunk =
- MemoryAllocator::ReserveInitialChunk(4 * reserved_semispace_size_);
+ isolate_->memory_allocator()->ReserveInitialChunk(
+ 4 * reserved_semispace_size_);
if (chunk == NULL) return false;
// Align the pair of semispaces to their size, which must be a power
@@ -4609,13 +5045,19 @@ bool Heap::Setup(bool create_heap_objects) {
// Initialize old pointer space.
old_pointer_space_ =
- new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
+ new OldSpace(this,
+ max_old_generation_size_,
+ OLD_POINTER_SPACE,
+ NOT_EXECUTABLE);
if (old_pointer_space_ == NULL) return false;
if (!old_pointer_space_->Setup(NULL, 0)) return false;
// Initialize old data space.
old_data_space_ =
- new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
+ new OldSpace(this,
+ max_old_generation_size_,
+ OLD_DATA_SPACE,
+ NOT_EXECUTABLE);
if (old_data_space_ == NULL) return false;
if (!old_data_space_->Setup(NULL, 0)) return false;
@@ -4624,18 +5066,18 @@ bool Heap::Setup(bool create_heap_objects) {
// On 64-bit platform(s), we put all code objects in a 2 GB range of
// virtual address space, so that they can call each other with near calls.
if (code_range_size_ > 0) {
- if (!CodeRange::Setup(code_range_size_)) {
+ if (!isolate_->code_range()->Setup(code_range_size_)) {
return false;
}
}
code_space_ =
- new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE);
+ new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
if (code_space_ == NULL) return false;
if (!code_space_->Setup(NULL, 0)) return false;
// Initialize map space.
- map_space_ = new MapSpace(FLAG_use_big_map_space
+ map_space_ = new MapSpace(this, FLAG_use_big_map_space
? max_old_generation_size_
: MapSpace::kMaxMapPageIndex * Page::kPageSize,
FLAG_max_map_space_pages,
@@ -4644,14 +5086,14 @@ bool Heap::Setup(bool create_heap_objects) {
if (!map_space_->Setup(NULL, 0)) return false;
// Initialize global property cell space.
- cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE);
+ cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
if (cell_space_ == NULL) return false;
if (!cell_space_->Setup(NULL, 0)) return false;
// The large object code space may contain code or data. We set the memory
// to be non-executable here for safety, but this means we need to enable it
// explicitly when allocating large code objects.
- lo_space_ = new LargeObjectSpace(LO_SPACE);
+ lo_space_ = new LargeObjectSpace(this, LO_SPACE);
if (lo_space_ == NULL) return false;
if (!lo_space_->Setup()) return false;
@@ -4666,19 +5108,16 @@ bool Heap::Setup(bool create_heap_objects) {
global_contexts_list_ = undefined_value();
}
- LOG(IntPtrTEvent("heap-capacity", Capacity()));
- LOG(IntPtrTEvent("heap-available", Available()));
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
- // This should be called only after initial objects have been created.
- ProducerHeapProfile::Setup();
-#endif
+ LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
+ LOG(isolate_, IntPtrTEvent("heap-available", Available()));
return true;
}
void Heap::SetStackLimits() {
+ ASSERT(isolate_ != NULL);
+ ASSERT(isolate_ == isolate());
// On 64 bit machines, pointers are generally out of range of Smis. We write
// something that looks like an out of range Smi to the GC.
@@ -4686,10 +5125,10 @@ void Heap::SetStackLimits() {
// These are actually addresses, but the tag makes the GC ignore it.
roots_[kStackLimitRootIndex] =
reinterpret_cast<Object*>(
- (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag);
+ (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
roots_[kRealStackLimitRootIndex] =
reinterpret_cast<Object*>(
- (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag);
+ (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
}
@@ -4699,16 +5138,16 @@ void Heap::TearDown() {
PrintF("gc_count=%d ", gc_count_);
PrintF("mark_sweep_count=%d ", ms_count_);
PrintF("mark_compact_count=%d ", mc_count_);
- PrintF("max_gc_pause=%d ", GCTracer::get_max_gc_pause());
- PrintF("min_in_mutator=%d ", GCTracer::get_min_in_mutator());
+ PrintF("max_gc_pause=%d ", get_max_gc_pause());
+ PrintF("min_in_mutator=%d ", get_min_in_mutator());
PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
- GCTracer::get_max_alive_after_gc());
+ get_max_alive_after_gc());
PrintF("\n\n");
}
- GlobalHandles::TearDown();
+ isolate_->global_handles()->TearDown();
- ExternalStringTable::TearDown();
+ external_string_table_.TearDown();
new_space_.TearDown();
@@ -4748,7 +5187,12 @@ void Heap::TearDown() {
lo_space_ = NULL;
}
- MemoryAllocator::TearDown();
+ isolate_->memory_allocator()->TearDown();
+
+#ifdef DEBUG
+ delete debug_utils_;
+ debug_utils_ = NULL;
+#endif
}
@@ -4837,7 +5281,7 @@ class PrintHandleVisitor: public ObjectVisitor {
void Heap::PrintHandles() {
PrintF("Handles:\n");
PrintHandleVisitor v;
- HandleScopeImplementer::Iterate(&v);
+ isolate_->handle_scope_implementer()->Iterate(&v);
}
#endif
@@ -4846,19 +5290,19 @@ void Heap::PrintHandles() {
Space* AllSpaces::next() {
switch (counter_++) {
case NEW_SPACE:
- return Heap::new_space();
+ return HEAP->new_space();
case OLD_POINTER_SPACE:
- return Heap::old_pointer_space();
+ return HEAP->old_pointer_space();
case OLD_DATA_SPACE:
- return Heap::old_data_space();
+ return HEAP->old_data_space();
case CODE_SPACE:
- return Heap::code_space();
+ return HEAP->code_space();
case MAP_SPACE:
- return Heap::map_space();
+ return HEAP->map_space();
case CELL_SPACE:
- return Heap::cell_space();
+ return HEAP->cell_space();
case LO_SPACE:
- return Heap::lo_space();
+ return HEAP->lo_space();
default:
return NULL;
}
@@ -4868,15 +5312,15 @@ Space* AllSpaces::next() {
PagedSpace* PagedSpaces::next() {
switch (counter_++) {
case OLD_POINTER_SPACE:
- return Heap::old_pointer_space();
+ return HEAP->old_pointer_space();
case OLD_DATA_SPACE:
- return Heap::old_data_space();
+ return HEAP->old_data_space();
case CODE_SPACE:
- return Heap::code_space();
+ return HEAP->code_space();
case MAP_SPACE:
- return Heap::map_space();
+ return HEAP->map_space();
case CELL_SPACE:
- return Heap::cell_space();
+ return HEAP->cell_space();
default:
return NULL;
}
@@ -4887,11 +5331,11 @@ PagedSpace* PagedSpaces::next() {
OldSpace* OldSpaces::next() {
switch (counter_++) {
case OLD_POINTER_SPACE:
- return Heap::old_pointer_space();
+ return HEAP->old_pointer_space();
case OLD_DATA_SPACE:
- return Heap::old_data_space();
+ return HEAP->old_data_space();
case CODE_SPACE:
- return Heap::code_space();
+ return HEAP->code_space();
default:
return NULL;
}
@@ -4946,25 +5390,25 @@ ObjectIterator* SpaceIterator::CreateIterator() {
switch (current_space_) {
case NEW_SPACE:
- iterator_ = new SemiSpaceIterator(Heap::new_space(), size_func_);
+ iterator_ = new SemiSpaceIterator(HEAP->new_space(), size_func_);
break;
case OLD_POINTER_SPACE:
- iterator_ = new HeapObjectIterator(Heap::old_pointer_space(), size_func_);
+ iterator_ = new HeapObjectIterator(HEAP->old_pointer_space(), size_func_);
break;
case OLD_DATA_SPACE:
- iterator_ = new HeapObjectIterator(Heap::old_data_space(), size_func_);
+ iterator_ = new HeapObjectIterator(HEAP->old_data_space(), size_func_);
break;
case CODE_SPACE:
- iterator_ = new HeapObjectIterator(Heap::code_space(), size_func_);
+ iterator_ = new HeapObjectIterator(HEAP->code_space(), size_func_);
break;
case MAP_SPACE:
- iterator_ = new HeapObjectIterator(Heap::map_space(), size_func_);
+ iterator_ = new HeapObjectIterator(HEAP->map_space(), size_func_);
break;
case CELL_SPACE:
- iterator_ = new HeapObjectIterator(Heap::cell_space(), size_func_);
+ iterator_ = new HeapObjectIterator(HEAP->cell_space(), size_func_);
break;
case LO_SPACE:
- iterator_ = new LargeObjectIterator(Heap::lo_space(), size_func_);
+ iterator_ = new LargeObjectIterator(HEAP->lo_space(), size_func_);
break;
}
@@ -4998,16 +5442,17 @@ class FreeListNodesFilter : public HeapObjectsFilter {
private:
void MarkFreeListNodes() {
- Heap::old_pointer_space()->MarkFreeListNodes();
- Heap::old_data_space()->MarkFreeListNodes();
- MarkCodeSpaceFreeListNodes();
- Heap::map_space()->MarkFreeListNodes();
- Heap::cell_space()->MarkFreeListNodes();
+ Heap* heap = HEAP;
+ heap->old_pointer_space()->MarkFreeListNodes();
+ heap->old_data_space()->MarkFreeListNodes();
+ MarkCodeSpaceFreeListNodes(heap);
+ heap->map_space()->MarkFreeListNodes();
+ heap->cell_space()->MarkFreeListNodes();
}
- void MarkCodeSpaceFreeListNodes() {
+ void MarkCodeSpaceFreeListNodes(Heap* heap) {
// For code space, using FreeListNode::IsFreeListNode is OK.
- HeapObjectIterator iter(Heap::code_space());
+ HeapObjectIterator iter(heap->code_space());
for (HeapObject* obj = iter.next_object();
obj != NULL;
obj = iter.next_object()) {
@@ -5069,7 +5514,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
obj->SetMark();
}
UnmarkingVisitor visitor;
- Heap::IterateRoots(&visitor, VISIT_ALL);
+ HEAP->IterateRoots(&visitor, VISIT_ALL);
while (visitor.can_process())
visitor.ProcessNext();
}
@@ -5372,7 +5817,7 @@ static intptr_t CountTotalHolesSize() {
}
-GCTracer::GCTracer()
+GCTracer::GCTracer(Heap* heap)
: start_time_(0.0),
start_size_(0),
gc_count_(0),
@@ -5381,14 +5826,16 @@ GCTracer::GCTracer()
marked_count_(0),
allocated_since_last_gc_(0),
spent_in_mutator_(0),
- promoted_objects_size_(0) {
+ promoted_objects_size_(0),
+ heap_(heap) {
// These two fields reflect the state of the previous full collection.
// Set them before they are changed by the collector.
- previous_has_compacted_ = MarkCompactCollector::HasCompacted();
- previous_marked_count_ = MarkCompactCollector::previous_marked_count();
+ previous_has_compacted_ = heap_->mark_compact_collector_.HasCompacted();
+ previous_marked_count_ =
+ heap_->mark_compact_collector_.previous_marked_count();
if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
start_time_ = OS::TimeCurrentMillis();
- start_size_ = Heap::SizeOfObjects();
+ start_size_ = heap_->SizeOfObjects();
for (int i = 0; i < Scope::kNumberOfScopes; i++) {
scopes_[i] = 0;
@@ -5396,10 +5843,11 @@ GCTracer::GCTracer()
in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
- allocated_since_last_gc_ = Heap::SizeOfObjects() - alive_after_last_gc_;
+ allocated_since_last_gc_ =
+ heap_->SizeOfObjects() - heap_->alive_after_last_gc_;
- if (last_gc_end_timestamp_ > 0) {
- spent_in_mutator_ = Max(start_time_ - last_gc_end_timestamp_, 0.0);
+ if (heap_->last_gc_end_timestamp_ > 0) {
+ spent_in_mutator_ = Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
}
}
@@ -5408,20 +5856,21 @@ GCTracer::~GCTracer() {
// Printf ONE line iff flag is set.
if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
- bool first_gc = (last_gc_end_timestamp_ == 0);
+ bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
- alive_after_last_gc_ = Heap::SizeOfObjects();
- last_gc_end_timestamp_ = OS::TimeCurrentMillis();
+ heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
+ heap_->last_gc_end_timestamp_ = OS::TimeCurrentMillis();
- int time = static_cast<int>(last_gc_end_timestamp_ - start_time_);
+ int time = static_cast<int>(heap_->last_gc_end_timestamp_ - start_time_);
// Update cumulative GC statistics if required.
if (FLAG_print_cumulative_gc_stat) {
- max_gc_pause_ = Max(max_gc_pause_, time);
- max_alive_after_gc_ = Max(max_alive_after_gc_, alive_after_last_gc_);
+ heap_->max_gc_pause_ = Max(heap_->max_gc_pause_, time);
+ heap_->max_alive_after_gc_ = Max(heap_->max_alive_after_gc_,
+ heap_->alive_after_last_gc_);
if (!first_gc) {
- min_in_mutator_ = Min(min_in_mutator_,
- static_cast<int>(spent_in_mutator_));
+ heap_->min_in_mutator_ = Min(heap_->min_in_mutator_,
+ static_cast<int>(spent_in_mutator_));
}
}
@@ -5446,7 +5895,8 @@ GCTracer::~GCTracer() {
PrintF("s");
break;
case MARK_COMPACTOR:
- PrintF(MarkCompactCollector::HasCompacted() ? "mc" : "ms");
+ PrintF("%s",
+ heap_->mark_compact_collector_.HasCompacted() ? "mc" : "ms");
break;
default:
UNREACHABLE();
@@ -5460,7 +5910,7 @@ GCTracer::~GCTracer() {
PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_);
- PrintF("total_size_after=%" V8_PTR_PREFIX "d ", Heap::SizeOfObjects());
+ PrintF("total_size_after=%" V8_PTR_PREFIX "d ", heap_->SizeOfObjects());
PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
in_free_list_or_wasted_before_gc_);
PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
@@ -5472,7 +5922,7 @@ GCTracer::~GCTracer() {
}
#if defined(ENABLE_LOGGING_AND_PROFILING)
- Heap::PrintShortHeapStatistics();
+ heap_->PrintShortHeapStatistics();
#endif
}
@@ -5482,8 +5932,8 @@ const char* GCTracer::CollectorString() {
case SCAVENGER:
return "Scavenge";
case MARK_COMPACTOR:
- return MarkCompactCollector::HasCompacted() ? "Mark-compact"
- : "Mark-sweep";
+ return heap_->mark_compact_collector_.HasCompacted() ? "Mark-compact"
+ : "Mark-sweep";
}
return "Unknown GC";
}
@@ -5503,13 +5953,13 @@ int KeyedLookupCache::Lookup(Map* map, String* name) {
if ((key.map == map) && key.name->Equals(name)) {
return field_offsets_[index];
}
- return -1;
+ return kNotFound;
}
void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
String* symbol;
- if (Heap::LookupSymbolIfExists(name, &symbol)) {
+ if (HEAP->LookupSymbolIfExists(name, &symbol)) {
int index = Hash(map, symbol);
Key& key = keys_[index];
key.map = map;
@@ -5524,35 +5974,24 @@ void KeyedLookupCache::Clear() {
}
-KeyedLookupCache::Key KeyedLookupCache::keys_[KeyedLookupCache::kLength];
-
-
-int KeyedLookupCache::field_offsets_[KeyedLookupCache::kLength];
-
-
void DescriptorLookupCache::Clear() {
for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
}
-DescriptorLookupCache::Key
-DescriptorLookupCache::keys_[DescriptorLookupCache::kLength];
-
-int DescriptorLookupCache::results_[DescriptorLookupCache::kLength];
-
-
#ifdef DEBUG
void Heap::GarbageCollectionGreedyCheck() {
ASSERT(FLAG_gc_greedy);
- if (Bootstrapper::IsActive()) return;
+ if (isolate_->bootstrapper()->IsActive()) return;
if (disallow_allocation_failure()) return;
CollectGarbage(NEW_SPACE);
}
#endif
-TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t)
- : type_(t) {
+TranscendentalCache::SubCache::SubCache(Type t)
+ : type_(t),
+ isolate_(Isolate::Current()) {
uint32_t in0 = 0xffffffffu; // Bit-pattern for a NaN that isn't
uint32_t in1 = 0xffffffffu; // generated by the FPU.
for (int i = 0; i < kCacheSize; i++) {
@@ -5563,9 +6002,6 @@ TranscendentalCache::TranscendentalCache(TranscendentalCache::Type t)
}
-TranscendentalCache* TranscendentalCache::caches_[kNumberOfCaches];
-
-
void TranscendentalCache::Clear() {
for (int i = 0; i < kNumberOfCaches; i++) {
if (caches_[i] != NULL) {
@@ -5579,8 +6015,8 @@ void TranscendentalCache::Clear() {
void ExternalStringTable::CleanUp() {
int last = 0;
for (int i = 0; i < new_space_strings_.length(); ++i) {
- if (new_space_strings_[i] == Heap::raw_unchecked_null_value()) continue;
- if (Heap::InNewSpace(new_space_strings_[i])) {
+ if (new_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
+ if (heap_->InNewSpace(new_space_strings_[i])) {
new_space_strings_[last++] = new_space_strings_[i];
} else {
old_space_strings_.Add(new_space_strings_[i]);
@@ -5589,8 +6025,8 @@ void ExternalStringTable::CleanUp() {
new_space_strings_.Rewind(last);
last = 0;
for (int i = 0; i < old_space_strings_.length(); ++i) {
- if (old_space_strings_[i] == Heap::raw_unchecked_null_value()) continue;
- ASSERT(!Heap::InNewSpace(old_space_strings_[i]));
+ if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
+ ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
old_space_strings_[last++] = old_space_strings_[i];
}
old_space_strings_.Rewind(last);
@@ -5604,7 +6040,4 @@ void ExternalStringTable::TearDown() {
}
-List<Object*> ExternalStringTable::new_space_strings_;
-List<Object*> ExternalStringTable::old_space_strings_;
-
} } // namespace v8::internal