summaryrefslogtreecommitdiff
path: root/deps/v8/src/stub-cache.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/stub-cache.h')
-rw-r--r--deps/v8/src/stub-cache.h40
1 files changed, 28 insertions, 12 deletions
diff --git a/deps/v8/src/stub-cache.h b/deps/v8/src/stub-cache.h
index 369b15da90..b79841a366 100644
--- a/deps/v8/src/stub-cache.h
+++ b/deps/v8/src/stub-cache.h
@@ -30,7 +30,8 @@
#include "macro-assembler.h"
-namespace v8 { namespace internal {
+namespace v8 {
+namespace internal {
// The stub cache is used for megamorphic calls and property accesses.
@@ -127,18 +128,23 @@ class StubCache : public AllStatic {
// ---
static Object* ComputeCallField(int argc,
+ InLoopFlag in_loop,
String* name,
Object* object,
JSObject* holder,
int index);
static Object* ComputeCallConstant(int argc,
+ InLoopFlag in_loop,
String* name,
Object* object,
JSObject* holder,
JSFunction* function);
- static Object* ComputeCallNormal(int argc, String* name, JSObject* receiver);
+ static Object* ComputeCallNormal(int argc,
+ InLoopFlag in_loop,
+ String* name,
+ JSObject* receiver);
static Object* ComputeCallInterceptor(int argc,
String* name,
@@ -147,15 +153,14 @@ class StubCache : public AllStatic {
// ---
- static Object* ComputeCallInitialize(int argc);
- static Object* ComputeCallInitializeInLoop(int argc);
- static Object* ComputeCallPreMonomorphic(int argc);
- static Object* ComputeCallNormal(int argc);
- static Object* ComputeCallMegamorphic(int argc);
+ static Object* ComputeCallInitialize(int argc, InLoopFlag in_loop);
+ static Object* ComputeCallPreMonomorphic(int argc, InLoopFlag in_loop);
+ static Object* ComputeCallNormal(int argc, InLoopFlag in_loop);
+ static Object* ComputeCallMegamorphic(int argc, InLoopFlag in_loop);
static Object* ComputeCallMiss(int argc);
// Finds the Code object stored in the Heap::non_monomorphic_cache().
- static Code* FindCallInitialize(int argc);
+ static Code* FindCallInitialize(int argc, InLoopFlag in_loop);
#ifdef ENABLE_DEBUGGER_SUPPORT
static Object* ComputeCallDebugBreak(int argc);
@@ -208,8 +213,12 @@ class StubCache : public AllStatic {
// 4Gb (and not at all if it isn't).
uint32_t map_low32bits =
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map));
+ // We always set the in_loop bit to zero when generating the lookup code
+ // so do it here too so the hash codes match.
+ uint32_t iflags =
+ (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
// Base the offset on a simple combination of name, flags, and map.
- uint32_t key = (map_low32bits + field) ^ flags;
+ uint32_t key = (map_low32bits + field) ^ iflags;
return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize);
}
@@ -217,7 +226,11 @@ class StubCache : public AllStatic {
// Use the seed from the primary cache in the secondary cache.
uint32_t string_low32bits =
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
- uint32_t key = seed - string_low32bits + flags;
+ // We always set the in_loop bit to zero when generating the lookup code
+ // so do it here too so the hash codes match.
+ uint32_t iflags =
+ (static_cast<uint32_t>(flags) & ~Code::kFlagsICInLoopMask);
+ uint32_t key = seed - string_low32bits + iflags;
return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize);
}
@@ -338,6 +351,7 @@ class StubCompiler BASE_EMBEDDED {
static void GenerateLoadInterceptor(MacroAssembler* masm,
JSObject* object,
JSObject* holder,
+ Smi* lookup_hint,
Register receiver,
Register name,
Register scratch1,
@@ -468,11 +482,13 @@ class CallStubCompiler: public StubCompiler {
Object* CompileCallField(Object* object,
JSObject* holder,
int index,
- String* name);
+ String* name,
+ Code::Flags flags);
Object* CompileCallConstant(Object* object,
JSObject* holder,
JSFunction* function,
- CheckType check);
+ CheckType check,
+ Code::Flags flags);
Object* CompileCallInterceptor(Object* object,
JSObject* holder,
String* name);