summaryrefslogtreecommitdiff
path: root/src/3rdparty/v8/test/cctest/test-heap.cc
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/v8/test/cctest/test-heap.cc')
-rw-r--r--src/3rdparty/v8/test/cctest/test-heap.cc876
1 files changed, 792 insertions, 84 deletions
diff --git a/src/3rdparty/v8/test/cctest/test-heap.cc b/src/3rdparty/v8/test/cctest/test-heap.cc
index baee7e4..0d72ff7 100644
--- a/src/3rdparty/v8/test/cctest/test-heap.cc
+++ b/src/3rdparty/v8/test/cctest/test-heap.cc
@@ -4,10 +4,12 @@
#include "v8.h"
+#include "compilation-cache.h"
#include "execution.h"
#include "factory.h"
#include "macro-assembler.h"
#include "global-handles.h"
+#include "stub-cache.h"
#include "cctest.h"
#include "snapshot.h"
@@ -22,6 +24,19 @@ static void InitializeVM() {
}
+// Go through all incremental marking steps in one swoop.
+static void SimulateIncrementalMarking() {
+ IncrementalMarking* marking = HEAP->incremental_marking();
+ CHECK(marking->IsStopped());
+ marking->Start();
+ CHECK(marking->IsMarking());
+ while (!marking->IsComplete()) {
+ marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ }
+ CHECK(marking->IsComplete());
+}
+
+
static void CheckMap(Map* map, int type, int instance_size) {
CHECK(map->IsHeapObject());
#ifdef DEBUG
@@ -158,7 +173,8 @@ TEST(HeapObjects) {
String* object_symbol = String::cast(HEAP->Object_symbol());
CHECK(
- Isolate::Current()->context()->global()->HasLocalProperty(object_symbol));
+ Isolate::Current()->context()->global_object()->HasLocalProperty(
+ object_symbol));
// Check ToString for oddballs
CheckOddball(HEAP->true_value(), "true");
@@ -214,7 +230,7 @@ TEST(GarbageCollection) {
Handle<Map> initial_map =
FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
function->set_initial_map(*initial_map);
- Isolate::Current()->context()->global()->SetProperty(
+ Isolate::Current()->context()->global_object()->SetProperty(
*name, *function, NONE, kNonStrictMode)->ToObjectChecked();
// Allocate an object. Unrooted after leaving the scope.
Handle<JSObject> obj = FACTORY->NewJSObject(function);
@@ -230,9 +246,10 @@ TEST(GarbageCollection) {
HEAP->CollectGarbage(NEW_SPACE);
// Function should be alive.
- CHECK(Isolate::Current()->context()->global()->HasLocalProperty(*name));
+ CHECK(Isolate::Current()->context()->global_object()->
+ HasLocalProperty(*name));
// Check function is retained.
- Object* func_value = Isolate::Current()->context()->global()->
+ Object* func_value = Isolate::Current()->context()->global_object()->
GetProperty(*name)->ToObjectChecked();
CHECK(func_value->IsJSFunction());
Handle<JSFunction> function(JSFunction::cast(func_value));
@@ -241,7 +258,7 @@ TEST(GarbageCollection) {
HandleScope inner_scope;
// Allocate another object, make it reachable from global.
Handle<JSObject> obj = FACTORY->NewJSObject(function);
- Isolate::Current()->context()->global()->SetProperty(
+ Isolate::Current()->context()->global_object()->SetProperty(
*obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked();
obj->SetProperty(
*prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
@@ -250,10 +267,11 @@ TEST(GarbageCollection) {
// After gc, it should survive.
HEAP->CollectGarbage(NEW_SPACE);
- CHECK(Isolate::Current()->context()->global()->HasLocalProperty(*obj_name));
- CHECK(Isolate::Current()->context()->global()->
+ CHECK(Isolate::Current()->context()->global_object()->
+ HasLocalProperty(*obj_name));
+ CHECK(Isolate::Current()->context()->global_object()->
GetProperty(*obj_name)->ToObjectChecked()->IsJSObject());
- Object* obj = Isolate::Current()->context()->global()->
+ Object* obj = Isolate::Current()->context()->global_object()->
GetProperty(*obj_name)->ToObjectChecked();
JSObject* js_obj = JSObject::cast(obj);
CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name));
@@ -416,6 +434,7 @@ TEST(WeakGlobalHandlesMark) {
global_handles->Destroy(h1.location());
}
+
TEST(DeleteWeakGlobalHandle) {
InitializeVM();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
@@ -446,6 +465,7 @@ TEST(DeleteWeakGlobalHandle) {
CHECK(WeakPointerCleared);
}
+
static const char* not_so_random_string_table[] = {
"abstract",
"boolean",
@@ -562,7 +582,7 @@ TEST(ObjectProperties) {
v8::HandleScope sc;
String* object_symbol = String::cast(HEAP->Object_symbol());
- Object* raw_object = Isolate::Current()->context()->global()->
+ Object* raw_object = Isolate::Current()->context()->global_object()->
GetProperty(object_symbol)->ToObjectChecked();
JSFunction* object_function = JSFunction::cast(raw_object);
Handle<JSFunction> constructor(object_function);
@@ -659,7 +679,7 @@ TEST(JSArray) {
v8::HandleScope sc;
Handle<String> name = FACTORY->LookupAsciiSymbol("Array");
- Object* raw_object = Isolate::Current()->context()->global()->
+ Object* raw_object = Isolate::Current()->context()->global_object()->
GetProperty(*name)->ToObjectChecked();
Handle<JSFunction> function = Handle<JSFunction>(
JSFunction::cast(raw_object));
@@ -674,7 +694,7 @@ TEST(JSArray) {
array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked();
CHECK_EQ(Smi::FromInt(0), array->length());
// Must be in fast mode.
- CHECK(array->HasFastTypeElements());
+ CHECK(array->HasFastSmiOrObjectElements());
// array[length] = name.
array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
@@ -706,7 +726,7 @@ TEST(JSObjectCopy) {
v8::HandleScope sc;
String* object_symbol = String::cast(HEAP->Object_symbol());
- Object* raw_object = Isolate::Current()->context()->global()->
+ Object* raw_object = Isolate::Current()->context()->global_object()->
GetProperty(object_symbol)->ToObjectChecked();
JSFunction* object_function = JSFunction::cast(raw_object);
Handle<JSFunction> constructor(object_function);
@@ -812,7 +832,9 @@ TEST(Iteration) {
// Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
objs[next_objs_index++] = FACTORY->NewJSArray(10);
- objs[next_objs_index++] = FACTORY->NewJSArray(10, FAST_ELEMENTS, TENURED);
+ objs[next_objs_index++] = FACTORY->NewJSArray(10,
+ FAST_HOLEY_ELEMENTS,
+ TENURED);
// Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
objs[next_objs_index++] =
@@ -873,7 +895,7 @@ TEST(Regression39128) {
// Step 1: prepare a map for the object. We add 1 inobject property to it.
Handle<JSFunction> object_ctor(
- Isolate::Current()->global_context()->object_function());
+ Isolate::Current()->native_context()->object_function());
CHECK(object_ctor->has_initial_map());
Handle<Map> object_map(object_ctor->initial_map());
// Create a map with single inobject property.
@@ -934,9 +956,9 @@ TEST(Regression39128) {
TEST(TestCodeFlushing) {
- i::FLAG_allow_natives_syntax = true;
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
+ i::FLAG_allow_natives_syntax = true;
InitializeVM();
v8::HandleScope scope;
const char* source = "function foo() {"
@@ -953,24 +975,22 @@ TEST(TestCodeFlushing) {
}
// Check function is compiled.
- Object* func_value = Isolate::Current()->context()->global()->
+ Object* func_value = Isolate::Current()->context()->global_object()->
GetProperty(*foo_name)->ToObjectChecked();
CHECK(func_value->IsJSFunction());
Handle<JSFunction> function(JSFunction::cast(func_value));
CHECK(function->shared()->is_compiled());
- // TODO(1609) Currently incremental marker does not support code flushing.
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
-
+ // The code will survive at least two GCs.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
CHECK(function->shared()->is_compiled());
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ // Simulate several GCs that use full marking.
+ const int kAgingThreshold = 6;
+ for (int i = 0; i < kAgingThreshold; i++) {
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ }
// foo should no longer be in the compilation cache
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
@@ -982,10 +1002,201 @@ TEST(TestCodeFlushing) {
}
-// Count the number of global contexts in the weak list of global contexts.
-static int CountGlobalContexts() {
+TEST(TestCodeFlushingIncremental) {
+ // If we do not flush code this test is invalid.
+ if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ v8::HandleScope scope;
+ const char* source = "function foo() {"
+ " var x = 42;"
+ " var y = 42;"
+ " var z = x + y;"
+ "};"
+ "foo()";
+ Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
+
+ // This compile will add the code to the compilation cache.
+ { v8::HandleScope scope;
+ CompileRun(source);
+ }
+
+ // Check function is compiled.
+ Object* func_value = Isolate::Current()->context()->global_object()->
+ GetProperty(*foo_name)->ToObjectChecked();
+ CHECK(func_value->IsJSFunction());
+ Handle<JSFunction> function(JSFunction::cast(func_value));
+ CHECK(function->shared()->is_compiled());
+
+ // The code will survive at least two GCs.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(function->shared()->is_compiled());
+
+ // Simulate several GCs that use incremental marking.
+ const int kAgingThreshold = 6;
+ for (int i = 0; i < kAgingThreshold; i++) {
+ HEAP->incremental_marking()->Abort();
+ SimulateIncrementalMarking();
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ }
+ CHECK(!function->shared()->is_compiled() || function->IsOptimized());
+ CHECK(!function->is_compiled() || function->IsOptimized());
+
+ // This compile will compile the function again.
+ { v8::HandleScope scope;
+ CompileRun("foo();");
+ }
+
+ // Simulate several GCs that use incremental marking but make sure
+ // the loop breaks once the function is enqueued as a candidate.
+ for (int i = 0; i < kAgingThreshold; i++) {
+ HEAP->incremental_marking()->Abort();
+ SimulateIncrementalMarking();
+ if (!function->next_function_link()->IsUndefined()) break;
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ }
+
+ // Force optimization while incremental marking is active and while
+ // the function is enqueued as a candidate.
+ { v8::HandleScope scope;
+ CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
+ }
+
+ // Simulate one final GC to make sure the candidate queue is sane.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(function->shared()->is_compiled() || !function->IsOptimized());
+ CHECK(function->is_compiled() || !function->IsOptimized());
+}
+
+
+TEST(TestCodeFlushingIncrementalScavenge) {
+ // If we do not flush code this test is invalid.
+ if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ v8::HandleScope scope;
+ const char* source = "var foo = function() {"
+ " var x = 42;"
+ " var y = 42;"
+ " var z = x + y;"
+ "};"
+ "foo();"
+ "var bar = function() {"
+ " var x = 23;"
+ "};"
+ "bar();";
+ Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
+ Handle<String> bar_name = FACTORY->LookupAsciiSymbol("bar");
+
+ // Perfrom one initial GC to enable code flushing.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ // This compile will add the code to the compilation cache.
+ { v8::HandleScope scope;
+ CompileRun(source);
+ }
+
+ // Check functions are compiled.
+ Object* func_value = Isolate::Current()->context()->global_object()->
+ GetProperty(*foo_name)->ToObjectChecked();
+ CHECK(func_value->IsJSFunction());
+ Handle<JSFunction> function(JSFunction::cast(func_value));
+ CHECK(function->shared()->is_compiled());
+ Object* func_value2 = Isolate::Current()->context()->global_object()->
+ GetProperty(*bar_name)->ToObjectChecked();
+ CHECK(func_value2->IsJSFunction());
+ Handle<JSFunction> function2(JSFunction::cast(func_value2));
+ CHECK(function2->shared()->is_compiled());
+
+ // Clear references to functions so that one of them can die.
+ { v8::HandleScope scope;
+ CompileRun("foo = 0; bar = 0;");
+ }
+
+ // Bump the code age so that flushing is triggered while the function
+ // object is still located in new-space.
+ const int kAgingThreshold = 6;
+ function->shared()->set_code_age(kAgingThreshold);
+ function2->shared()->set_code_age(kAgingThreshold);
+
+ // Simulate incremental marking so that the functions are enqueued as
+ // code flushing candidates. Then kill one of the functions. Finally
+ // perform a scavenge while incremental marking is still running.
+ SimulateIncrementalMarking();
+ *function2.location() = NULL;
+ HEAP->CollectGarbage(NEW_SPACE, "test scavenge while marking");
+
+ // Simulate one final GC to make sure the candidate queue is sane.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(!function->shared()->is_compiled() || function->IsOptimized());
+ CHECK(!function->is_compiled() || function->IsOptimized());
+}
+
+
+TEST(TestCodeFlushingIncrementalAbort) {
+ // If we do not flush code this test is invalid.
+ if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ v8::HandleScope scope;
+ const char* source = "function foo() {"
+ " var x = 42;"
+ " var y = 42;"
+ " var z = x + y;"
+ "};"
+ "foo()";
+ Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
+
+ // This compile will add the code to the compilation cache.
+ { v8::HandleScope scope;
+ CompileRun(source);
+ }
+
+ // Check function is compiled.
+ Object* func_value = Isolate::Current()->context()->global_object()->
+ GetProperty(*foo_name)->ToObjectChecked();
+ CHECK(func_value->IsJSFunction());
+ Handle<JSFunction> function(JSFunction::cast(func_value));
+ CHECK(function->shared()->is_compiled());
+
+ // The code will survive at least two GCs.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(function->shared()->is_compiled());
+
+ // Bump the code age so that flushing is triggered.
+ const int kAgingThreshold = 6;
+ function->shared()->set_code_age(kAgingThreshold);
+
+ // Simulate incremental marking so that the function is enqueued as
+ // code flushing candidate.
+ SimulateIncrementalMarking();
+
+ // Enable the debugger and add a breakpoint while incremental marking
+ // is running so that incremental marking aborts and code flushing is
+ // disabled.
+ int position = 0;
+ Handle<Object> breakpoint_object(Smi::FromInt(0));
+ ISOLATE->debug()->SetBreakPoint(function, breakpoint_object, &position);
+ ISOLATE->debug()->ClearAllBreakPoints();
+
+ // Force optimization now that code flushing is disabled.
+ { v8::HandleScope scope;
+ CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
+ }
+
+ // Simulate one final GC to make sure the candidate queue is sane.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(function->shared()->is_compiled() || !function->IsOptimized());
+ CHECK(function->is_compiled() || !function->IsOptimized());
+}
+
+
+// Count the number of native contexts in the weak list of native contexts.
+int CountNativeContexts() {
int count = 0;
- Object* object = HEAP->global_contexts_list();
+ Object* object = HEAP->native_contexts_list();
while (!object->IsUndefined()) {
count++;
object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
@@ -995,7 +1206,7 @@ static int CountGlobalContexts() {
// Count the number of user functions in the weak list of optimized
-// functions attached to a global context.
+// functions attached to a native context.
static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
int count = 0;
Handle<Context> icontext = v8::Utils::OpenHandle(*context);
@@ -1016,7 +1227,7 @@ TEST(TestInternalWeakLists) {
v8::HandleScope scope;
v8::Persistent<v8::Context> ctx[kNumTestContexts];
- CHECK_EQ(0, CountGlobalContexts());
+ CHECK_EQ(0, CountNativeContexts());
// Create a number of global contests which gets linked together.
for (int i = 0; i < kNumTestContexts; i++) {
@@ -1024,7 +1235,7 @@ TEST(TestInternalWeakLists) {
bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
- CHECK_EQ(i + 1, CountGlobalContexts());
+ CHECK_EQ(i + 1, CountNativeContexts());
ctx[i]->Enter();
@@ -1059,6 +1270,7 @@ TEST(TestInternalWeakLists) {
}
// Mark compact handles the weak references.
+ ISOLATE->compilation_cache()->Clear();
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
@@ -1084,7 +1296,7 @@ TEST(TestInternalWeakLists) {
// Force compilation cache cleanup.
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- // Dispose the global contexts one by one.
+ // Dispose the native contexts one by one.
for (int i = 0; i < kNumTestContexts; i++) {
ctx[i].Dispose();
ctx[i].Clear();
@@ -1092,23 +1304,23 @@ TEST(TestInternalWeakLists) {
// Scavenge treats these references as strong.
for (int j = 0; j < 10; j++) {
HEAP->PerformScavenge();
- CHECK_EQ(kNumTestContexts - i, CountGlobalContexts());
+ CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
}
// Mark compact handles the weak references.
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK_EQ(kNumTestContexts - i - 1, CountGlobalContexts());
+ CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
}
- CHECK_EQ(0, CountGlobalContexts());
+ CHECK_EQ(0, CountNativeContexts());
}
-// Count the number of global contexts in the weak list of global contexts
+// Count the number of native contexts in the weak list of native contexts
// causing a GC after the specified number of elements.
-static int CountGlobalContextsWithGC(int n) {
+static int CountNativeContextsWithGC(int n) {
int count = 0;
- Handle<Object> object(HEAP->global_contexts_list());
+ Handle<Object> object(HEAP->native_contexts_list());
while (!object->IsUndefined()) {
count++;
if (count == n) HEAP->CollectAllGarbage(Heap::kNoGCFlags);
@@ -1120,7 +1332,7 @@ static int CountGlobalContextsWithGC(int n) {
// Count the number of user functions in the weak list of optimized
-// functions attached to a global context causing a GC after the
+// functions attached to a native context causing a GC after the
// specified number of elements.
static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
int n) {
@@ -1146,14 +1358,14 @@ TEST(TestInternalWeakListsTraverseWithGC) {
v8::HandleScope scope;
v8::Persistent<v8::Context> ctx[kNumTestContexts];
- CHECK_EQ(0, CountGlobalContexts());
+ CHECK_EQ(0, CountNativeContexts());
// Create an number of contexts and check the length of the weak list both
// with and without GCs while iterating the list.
for (int i = 0; i < kNumTestContexts; i++) {
ctx[i] = v8::Context::New();
- CHECK_EQ(i + 1, CountGlobalContexts());
- CHECK_EQ(i + 1, CountGlobalContextsWithGC(i / 2 + 1));
+ CHECK_EQ(i + 1, CountNativeContexts());
+ CHECK_EQ(i + 1, CountNativeContextsWithGC(i / 2 + 1));
}
bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
@@ -1197,6 +1409,7 @@ TEST(TestSizeOfObjects) {
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
CHECK(HEAP->old_pointer_space()->IsSweepingComplete());
int initial_size = static_cast<int>(HEAP->SizeOfObjects());
@@ -1237,7 +1450,9 @@ TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
for (HeapObject* obj = iterator.next();
obj != NULL;
obj = iterator.next()) {
- size_of_objects_2 += obj->Size();
+ if (!obj->IsFreeSpace()) {
+ size_of_objects_2 += obj->Size();
+ }
}
// Delta must be within 5% of the larger result.
// TODO(gc): Tighten this up by distinguishing between byte
@@ -1278,7 +1493,8 @@ TEST(GrowAndShrinkNewSpace) {
InitializeVM();
NewSpace* new_space = HEAP->new_space();
- if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
+ if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
+ HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
// The max size cannot exceed the reserved size, since semispaces must be
// always within the reserved space. We can't test new space growing and
// shrinking if the reserved size is the same as the minimum (initial) size.
@@ -1326,7 +1542,8 @@ TEST(GrowAndShrinkNewSpace) {
TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
InitializeVM();
- if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
+ if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
+ HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
// The max size cannot exceed the reserved size, since semispaces must be
// always within the reserved space. We can't test new space growing and
// shrinking if the reserved size is the same as the minimum (initial) size.
@@ -1359,7 +1576,7 @@ static int NumberOfGlobalObjects() {
// Test that we don't embed maps from foreign contexts into
// optimized code.
-TEST(LeakGlobalContextViaMap) {
+TEST(LeakNativeContextViaMap) {
i::FLAG_allow_natives_syntax = true;
bool snapshot_enabled = i::Snapshot::IsEnabled();
v8::HandleScope outer_scope;
@@ -1386,6 +1603,7 @@ TEST(LeakGlobalContextViaMap) {
ctx2->Exit();
ctx1->Exit();
ctx1.Dispose();
+ v8::V8::ContextDisposedNotification();
}
HEAP->CollectAllAvailableGarbage();
CHECK_EQ((snapshot_enabled ? 3 : 2), NumberOfGlobalObjects());
@@ -1397,7 +1615,7 @@ TEST(LeakGlobalContextViaMap) {
// Test that we don't embed functions from foreign contexts into
// optimized code.
-TEST(LeakGlobalContextViaFunction) {
+TEST(LeakNativeContextViaFunction) {
i::FLAG_allow_natives_syntax = true;
bool snapshot_enabled = i::Snapshot::IsEnabled();
v8::HandleScope outer_scope;
@@ -1424,6 +1642,7 @@ TEST(LeakGlobalContextViaFunction) {
ctx2->Exit();
ctx1->Exit();
ctx1.Dispose();
+ v8::V8::ContextDisposedNotification();
}
HEAP->CollectAllAvailableGarbage();
CHECK_EQ((snapshot_enabled ? 3 : 2), NumberOfGlobalObjects());
@@ -1433,7 +1652,7 @@ TEST(LeakGlobalContextViaFunction) {
}
-TEST(LeakGlobalContextViaMapKeyed) {
+TEST(LeakNativeContextViaMapKeyed) {
i::FLAG_allow_natives_syntax = true;
bool snapshot_enabled = i::Snapshot::IsEnabled();
v8::HandleScope outer_scope;
@@ -1460,6 +1679,7 @@ TEST(LeakGlobalContextViaMapKeyed) {
ctx2->Exit();
ctx1->Exit();
ctx1.Dispose();
+ v8::V8::ContextDisposedNotification();
}
HEAP->CollectAllAvailableGarbage();
CHECK_EQ((snapshot_enabled ? 3 : 2), NumberOfGlobalObjects());
@@ -1469,7 +1689,7 @@ TEST(LeakGlobalContextViaMapKeyed) {
}
-TEST(LeakGlobalContextViaMapProto) {
+TEST(LeakNativeContextViaMapProto) {
i::FLAG_allow_natives_syntax = true;
bool snapshot_enabled = i::Snapshot::IsEnabled();
v8::HandleScope outer_scope;
@@ -1500,6 +1720,7 @@ TEST(LeakGlobalContextViaMapProto) {
ctx2->Exit();
ctx1->Exit();
ctx1.Dispose();
+ v8::V8::ContextDisposedNotification();
}
HEAP->CollectAllAvailableGarbage();
CHECK_EQ((snapshot_enabled ? 3 : 2), NumberOfGlobalObjects());
@@ -1511,9 +1732,10 @@ TEST(LeakGlobalContextViaMapProto) {
TEST(InstanceOfStubWriteBarrier) {
i::FLAG_allow_natives_syntax = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
#endif
+
InitializeVM();
if (!i::V8::UseCrankshaft()) return;
v8::HandleScope outer_scope;
@@ -1585,22 +1807,24 @@ TEST(PrototypeTransitionClearing) {
// Verify that only dead prototype transitions are cleared.
CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK_EQ(10 - 3, baseObject->map()->NumberOfProtoTransitions());
+ const int transitions = 10 - 3;
+ CHECK_EQ(transitions, baseObject->map()->NumberOfProtoTransitions());
// Verify that prototype transitions array was compacted.
- FixedArray* trans = baseObject->map()->prototype_transitions();
- for (int i = 0; i < 10 - 3; i++) {
+ FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
+ for (int i = 0; i < transitions; i++) {
int j = Map::kProtoTransitionHeaderSize +
i * Map::kProtoTransitionElementsPerEntry;
CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
- CHECK(trans->get(j + Map::kProtoTransitionPrototypeOffset)->IsJSObject());
+ Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
+ CHECK(proto->IsTheHole() || proto->IsJSObject());
}
// Make sure next prototype is placed on an old-space evacuation candidate.
Handle<JSObject> prototype;
PagedSpace* space = HEAP->old_pointer_space();
do {
- prototype = FACTORY->NewJSArray(32 * KB, FAST_ELEMENTS, TENURED);
+ prototype = FACTORY->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
} while (space->FirstPage() == space->LastPage() ||
!space->LastPage()->Contains(prototype->address()));
@@ -1608,7 +1832,8 @@ TEST(PrototypeTransitionClearing) {
// clearing correctly records slots in prototype transition array.
i::FLAG_always_compact = true;
Handle<Map> map(baseObject->map());
- CHECK(!space->LastPage()->Contains(map->prototype_transitions()->address()));
+ CHECK(!space->LastPage()->Contains(
+ map->GetPrototypeTransitions()->address()));
CHECK(space->LastPage()->Contains(prototype->address()));
baseObject->SetPrototype(*prototype, false)->ToObjectChecked();
CHECK(map->GetPrototypeTransition(*prototype)->IsMap());
@@ -1619,9 +1844,10 @@ TEST(PrototypeTransitionClearing) {
TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
i::FLAG_allow_natives_syntax = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
#endif
+
InitializeVM();
if (!i::V8::UseCrankshaft()) return;
v8::HandleScope outer_scope;
@@ -1674,9 +1900,10 @@ TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
i::FLAG_allow_natives_syntax = true;
-#ifdef DEBUG
+#ifdef VERIFY_HEAP
i::FLAG_verify_heap = true;
#endif
+
InitializeVM();
if (!i::V8::UseCrankshaft()) return;
v8::HandleScope outer_scope;
@@ -1743,14 +1970,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
static int CountMapTransitions(Map* map) {
- int result = 0;
- DescriptorArray* descs = map->instance_descriptors();
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (descs->IsTransitionOnly(i)) {
- result++;
- }
- }
- return result;
+ return map->transitions()->number_of_transitions();
}
@@ -1761,14 +1981,18 @@ TEST(Regress1465) {
i::FLAG_trace_incremental_marking = true;
InitializeVM();
v8::HandleScope scope;
+ static const int transitions_count = 256;
- #define TRANSITION_COUNT 256
- for (int i = 0; i < TRANSITION_COUNT; i++) {
- EmbeddedVector<char, 64> buffer;
- OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
- CompileRun(buffer.start());
+ {
+ AlwaysAllocateScope always_allocate;
+ for (int i = 0; i < transitions_count; i++) {
+ EmbeddedVector<char, 64> buffer;
+ OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
+ CompileRun(buffer.start());
+ }
+ CompileRun("var root = new Object;");
}
- CompileRun("var root = new Object;");
+
Handle<JSObject> root =
v8::Utils::OpenHandle(
*v8::Handle<v8::Object>::Cast(
@@ -1777,19 +2001,10 @@ TEST(Regress1465) {
// Count number of live transitions before marking.
int transitions_before = CountMapTransitions(root->map());
CompileRun("%DebugPrint(root);");
- CHECK_EQ(TRANSITION_COUNT, transitions_before);
+ CHECK_EQ(transitions_count, transitions_before);
- // Go through all incremental marking steps in one swoop.
- IncrementalMarking* marking = HEAP->incremental_marking();
- CHECK(marking->IsStopped());
- marking->Start();
- CHECK(marking->IsMarking());
- while (!marking->IsComplete()) {
- marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
- }
- CHECK(marking->IsComplete());
+ SimulateIncrementalMarking();
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK(marking->IsStopped());
// Count number of live transitions after marking. Note that one transition
// is left, because 'o' still holds an instance of one transition target.
@@ -1797,3 +2012,496 @@ TEST(Regress1465) {
CompileRun("%DebugPrint(root);");
CHECK_EQ(1, transitions_after);
}
+
+
+TEST(Regress2143a) {
+ i::FLAG_collect_maps = true;
+ i::FLAG_incremental_marking = true;
+ InitializeVM();
+ v8::HandleScope scope;
+
+ // Prepare a map transition from the root object together with a yet
+ // untransitioned root object.
+ CompileRun("var root = new Object;"
+ "root.foo = 0;"
+ "root = new Object;");
+
+ SimulateIncrementalMarking();
+
+ // Compile a StoreIC that performs the prepared map transition. This
+ // will restart incremental marking and should make sure the root is
+ // marked grey again.
+ CompileRun("function f(o) {"
+ " o.foo = 0;"
+ "}"
+ "f(new Object);"
+ "f(root);");
+
+ // This bug only triggers with aggressive IC clearing.
+ HEAP->AgeInlineCaches();
+
+ // Explicitly request GC to perform final marking step and sweeping.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ Handle<JSObject> root =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Object>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
+
+ // The root object should be in a sane state.
+ CHECK(root->IsJSObject());
+ CHECK(root->map()->IsMap());
+}
+
+
+TEST(Regress2143b) {
+ i::FLAG_collect_maps = true;
+ i::FLAG_incremental_marking = true;
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ v8::HandleScope scope;
+
+ // Prepare a map transition from the root object together with a yet
+ // untransitioned root object.
+ CompileRun("var root = new Object;"
+ "root.foo = 0;"
+ "root = new Object;");
+
+ SimulateIncrementalMarking();
+
+ // Compile an optimized LStoreNamedField that performs the prepared
+ // map transition. This will restart incremental marking and should
+ // make sure the root is marked grey again.
+ CompileRun("function f(o) {"
+ " o.foo = 0;"
+ "}"
+ "f(new Object);"
+ "f(new Object);"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f(root);"
+ "%DeoptimizeFunction(f);");
+
+ // This bug only triggers with aggressive IC clearing.
+ HEAP->AgeInlineCaches();
+
+ // Explicitly request GC to perform final marking step and sweeping.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ Handle<JSObject> root =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Object>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
+
+ // The root object should be in a sane state.
+ CHECK(root->IsJSObject());
+ CHECK(root->map()->IsMap());
+}
+
+
+// Implemented in the test-alloc.cc test suite.
+void SimulateFullSpace(PagedSpace* space);
+
+
+TEST(ReleaseOverReservedPages) {
+ i::FLAG_trace_gc = true;
+ // The optimizer can allocate stuff, messing up the test.
+ i::FLAG_crankshaft = false;
+ i::FLAG_always_opt = false;
+ InitializeVM();
+ v8::HandleScope scope;
+ static const int number_of_test_pages = 20;
+
+ // Prepare many pages with low live-bytes count.
+ PagedSpace* old_pointer_space = HEAP->old_pointer_space();
+ CHECK_EQ(1, old_pointer_space->CountTotalPages());
+ for (int i = 0; i < number_of_test_pages; i++) {
+ AlwaysAllocateScope always_allocate;
+ SimulateFullSpace(old_pointer_space);
+ FACTORY->NewFixedArray(1, TENURED);
+ }
+ CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+
+ // Triggering one GC will cause a lot of garbage to be discovered but
+ // even spread across all allocated pages.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
+ CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+
+ // Triggering subsequent GCs should cause at least half of the pages
+ // to be released to the OS after at most two cycles.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
+ CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
+ CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
+
+ // Triggering a last-resort GC should cause all pages to be released to the
+ // OS so that other processes can seize the memory. If we get a failure here
+ // where there are 2 pages left instead of 1, then we should increase the
+ // size of the first page a little in SizeOfFirstPage in spaces.cc. The
+ // first page should be small in order to reduce memory used when the VM
+ // boots, but if the 20 small arrays don't fit on the first page then that's
+ // an indication that it is too small.
+ HEAP->CollectAllAvailableGarbage("triggered really hard");
+ CHECK_EQ(1, old_pointer_space->CountTotalPages());
+}
+
+
+TEST(Regress2237) {
+ InitializeVM();
+ v8::HandleScope scope;
+ Handle<String> slice(HEAP->empty_string());
+
+ {
+ // Generate a parent that lives in new-space.
+ v8::HandleScope inner_scope;
+ const char* c = "This text is long enough to trigger sliced strings.";
+ Handle<String> s = FACTORY->NewStringFromAscii(CStrVector(c));
+ CHECK(s->IsSeqAsciiString());
+ CHECK(HEAP->InNewSpace(*s));
+
+ // Generate a sliced string that is based on the above parent and
+ // lives in old-space.
+ FillUpNewSpace(HEAP->new_space());
+ AlwaysAllocateScope always_allocate;
+ Handle<String> t;
+ // TODO(mstarzinger): Unfortunately FillUpNewSpace() still leaves
+ // some slack, so we need to allocate a few sliced strings.
+ for (int i = 0; i < 16; i++) {
+ t = FACTORY->NewProperSubString(s, 5, 35);
+ }
+ CHECK(t->IsSlicedString());
+ CHECK(!HEAP->InNewSpace(*t));
+ *slice.location() = *t.location();
+ }
+
+ CHECK(SlicedString::cast(*slice)->parent()->IsSeqAsciiString());
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(SlicedString::cast(*slice)->parent()->IsSeqAsciiString());
+}
+
+
+#ifdef OBJECT_PRINT
+TEST(PrintSharedFunctionInfo) {
+ InitializeVM();
+ v8::HandleScope scope;
+ const char* source = "f = function() { return 987654321; }\n"
+ "g = function() { return 123456789; }\n";
+ CompileRun(source);
+ Handle<JSFunction> g =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
+
+ AssertNoAllocation no_alloc;
+ g->shared()->PrintLn();
+}
+#endif // OBJECT_PRINT
+
+
+TEST(Regress2211) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ v8::Handle<v8::String> value = v8_str("val string");
+ Smi* hash = Smi::FromInt(321);
+ Heap* heap = Isolate::Current()->heap();
+
+ for (int i = 0; i < 2; i++) {
+ // Store identity hash first and common hidden property second.
+ v8::Handle<v8::Object> obj = v8::Object::New();
+ Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
+ CHECK(internal_obj->HasFastProperties());
+
+ // In the first iteration, set hidden value first and identity hash second.
+ // In the second iteration, reverse the order.
+ if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
+ MaybeObject* maybe_obj = internal_obj->SetIdentityHash(hash,
+ ALLOW_CREATION);
+ CHECK(!maybe_obj->IsFailure());
+ if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
+
+ // Check values.
+ CHECK_EQ(hash,
+ internal_obj->GetHiddenProperty(heap->identity_hash_symbol()));
+ CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
+
+ // Check size.
+ DescriptorArray* descriptors = internal_obj->map()->instance_descriptors();
+ ObjectHashTable* hashtable = ObjectHashTable::cast(
+ internal_obj->FastPropertyAt(descriptors->GetFieldIndex(0)));
+ // HashTable header (5) and 4 initial entries (8).
+ CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
+ }
+}
+
+
+TEST(IncrementalMarkingClearsTypeFeedbackCells) {
+ if (i::FLAG_always_opt) return;
+ InitializeVM();
+ v8::HandleScope scope;
+ v8::Local<v8::Value> fun1, fun2;
+
+ {
+ LocalContext env;
+ CompileRun("function fun() {};");
+ fun1 = env->Global()->Get(v8_str("fun"));
+ }
+
+ {
+ LocalContext env;
+ CompileRun("function fun() {};");
+ fun2 = env->Global()->Get(v8_str("fun"));
+ }
+
+ // Prepare function f that contains type feedback for closures
+ // originating from two different native contexts.
+ v8::Context::GetCurrent()->Global()->Set(v8_str("fun1"), fun1);
+ v8::Context::GetCurrent()->Global()->Set(v8_str("fun2"), fun2);
+ CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+ Handle<TypeFeedbackCells> cells(TypeFeedbackInfo::cast(
+ f->shared()->code()->type_feedback_info())->type_feedback_cells());
+
+ CHECK_EQ(2, cells->CellCount());
+ CHECK(cells->Cell(0)->value()->IsJSFunction());
+ CHECK(cells->Cell(1)->value()->IsJSFunction());
+
+ SimulateIncrementalMarking();
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ CHECK_EQ(2, cells->CellCount());
+ CHECK(cells->Cell(0)->value()->IsTheHole());
+ CHECK(cells->Cell(1)->value()->IsTheHole());
+}
+
+
+static Code* FindFirstIC(Code* code, Code::Kind kind) {
+ int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
+ RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
+ RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
+ RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
+ for (RelocIterator it(code, mask); !it.done(); it.next()) {
+ RelocInfo* info = it.rinfo();
+ Code* target = Code::GetCodeFromTargetAddress(info->target_address());
+ if (target->is_inline_cache_stub() && target->kind() == kind) {
+ return target;
+ }
+ }
+ return NULL;
+}
+
+
+TEST(IncrementalMarkingPreservesMonomorhpicIC) {
+ if (i::FLAG_always_opt) return;
+ InitializeVM();
+ v8::HandleScope scope;
+
+ // Prepare function f that contains a monomorphic IC for object
+ // originating from the same native context.
+ CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
+ "function f(o) { return o.x; } f(obj); f(obj);");
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+
+ Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_before->ic_state() == MONOMORPHIC);
+
+ SimulateIncrementalMarking();
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_after->ic_state() == MONOMORPHIC);
+}
+
+
+TEST(IncrementalMarkingClearsMonomorhpicIC) {
+ if (i::FLAG_always_opt) return;
+ InitializeVM();
+ v8::HandleScope scope;
+ v8::Local<v8::Value> obj1;
+
+ {
+ LocalContext env;
+ CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
+ obj1 = env->Global()->Get(v8_str("obj"));
+ }
+
+ // Prepare function f that contains a monomorphic IC for object
+ // originating from a different native context.
+ v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
+ CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+
+ Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_before->ic_state() == MONOMORPHIC);
+
+ // Fire context dispose notification.
+ v8::V8::ContextDisposedNotification();
+ SimulateIncrementalMarking();
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_after->ic_state() == UNINITIALIZED);
+}
+
+
+TEST(IncrementalMarkingClearsPolymorhpicIC) {
+ if (i::FLAG_always_opt) return;
+ InitializeVM();
+ v8::HandleScope scope;
+ v8::Local<v8::Value> obj1, obj2;
+
+ {
+ LocalContext env;
+ CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
+ obj1 = env->Global()->Get(v8_str("obj"));
+ }
+
+ {
+ LocalContext env;
+ CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
+ obj2 = env->Global()->Get(v8_str("obj"));
+ }
+
+ // Prepare function f that contains a polymorphic IC for objects
+ // originating from two different native contexts.
+ v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
+ v8::Context::GetCurrent()->Global()->Set(v8_str("obj2"), obj2);
+ CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+
+ Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_before->ic_state() == MEGAMORPHIC);
+
+ // Fire context dispose notification.
+ v8::V8::ContextDisposedNotification();
+ SimulateIncrementalMarking();
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
+ CHECK(ic_after->ic_state() == UNINITIALIZED);
+}
+
+
+class SourceResource: public v8::String::ExternalAsciiStringResource {
+ public:
+ explicit SourceResource(const char* data)
+ : data_(data), length_(strlen(data)) { }
+
+ virtual void Dispose() {
+ i::DeleteArray(data_);
+ data_ = NULL;
+ }
+
+ const char* data() const { return data_; }
+
+ size_t length() const { return length_; }
+
+ bool IsDisposed() { return data_ == NULL; }
+
+ private:
+ const char* data_;
+ size_t length_;
+};
+
+
+TEST(ReleaseStackTraceData) {
+ // Test that the data retained by the Error.stack accessor is released
+ // after the first time the accessor is fired. We use external string
+ // to check whether the data is being released since the external string
+ // resource's callback is fired when the external string is GC'ed.
+ InitializeVM();
+ v8::HandleScope scope;
+ static const char* source = "var error = 1; "
+ "try { "
+ " throw new Error(); "
+ "} catch (e) { "
+ " error = e; "
+ "} ";
+ SourceResource* resource = new SourceResource(i::StrDup(source));
+ {
+ v8::HandleScope scope;
+ v8::Handle<v8::String> source_string = v8::String::NewExternal(resource);
+ v8::Script::Compile(source_string)->Run();
+ CHECK(!resource->IsDisposed());
+ }
+ HEAP->CollectAllAvailableGarbage();
+ // External source is being retained by the stack trace.
+ CHECK(!resource->IsDisposed());
+
+ CompileRun("error.stack; error.stack;");
+ HEAP->CollectAllAvailableGarbage();
+ // External source has been released.
+ CHECK(resource->IsDisposed());
+
+ delete resource;
+}
+
+
+TEST(Regression144230) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ // First make sure that the uninitialized CallIC stub is on a single page
+ // that will later be selected as an evacuation candidate.
+ {
+ v8::HandleScope inner_scope;
+ AlwaysAllocateScope always_allocate;
+ SimulateFullSpace(HEAP->code_space());
+ ISOLATE->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET);
+ }
+
+ // Second compile a CallIC and execute it once so that it gets patched to
+ // the pre-monomorphic stub. These code objects are on yet another page.
+ {
+ v8::HandleScope inner_scope;
+ AlwaysAllocateScope always_allocate;
+ SimulateFullSpace(HEAP->code_space());
+ CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};"
+ "function call() { o.f(1,2,3,4,5,6,7,8,9); };"
+ "call();");
+ }
+
+ // Third we fill up the last page of the code space so that it does not get
+ // chosen as an evacuation candidate.
+ {
+ v8::HandleScope inner_scope;
+ AlwaysAllocateScope always_allocate;
+ CompileRun("for (var i = 0; i < 2000; i++) {"
+ " eval('function f' + i + '() { return ' + i +'; };' +"
+ " 'f' + i + '();');"
+ "}");
+ }
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+
+ // Fourth is the tricky part. Make sure the code containing the CallIC is
+ // visited first without clearing the IC. The shared function info is then
+ // visited later, causing the CallIC to be cleared.
+ Handle<String> name = FACTORY->LookupAsciiSymbol("call");
+ Handle<GlobalObject> global(ISOLATE->context()->global_object());
+ MaybeObject* maybe_call = global->GetProperty(*name);
+ JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked());
+ USE(global->SetProperty(*name, Smi::FromInt(0), NONE, kNonStrictMode));
+ ISOLATE->compilation_cache()->Clear();
+ call->shared()->set_ic_age(HEAP->global_ic_age() + 1);
+ Handle<Object> call_code(call->code());
+ Handle<Object> call_function(call);
+
+ // Now we are ready to mess up the heap.
+ HEAP->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
+
+ // Either heap verification caught the problem already or we go kaboom once
+ // the CallIC is executed the next time.
+ USE(global->SetProperty(*name, *call_function, NONE, kNonStrictMode));
+ CompileRun("call();");
+}