summaryrefslogtreecommitdiff
path: root/deps/v8/src/arm/stub-cache-arm.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/arm/stub-cache-arm.cc')
-rw-r--r--deps/v8/src/arm/stub-cache-arm.cc968
1 files changed, 312 insertions, 656 deletions
diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc
index fd53b9782..38f391a33 100644
--- a/deps/v8/src/arm/stub-cache-arm.cc
+++ b/deps/v8/src/arm/stub-cache-arm.cc
@@ -2,13 +2,13 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "v8.h"
+#include "src/v8.h"
#if V8_TARGET_ARCH_ARM
-#include "ic-inl.h"
-#include "codegen.h"
-#include "stub-cache.h"
+#include "src/codegen.h"
+#include "src/ic-inl.h"
+#include "src/stub-cache.h"
namespace v8 {
namespace internal {
@@ -36,12 +36,12 @@ static void ProbeTable(Isolate* isolate,
uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
// Check the relative positions of the address fields.
- ASSERT(value_off_addr > key_off_addr);
- ASSERT((value_off_addr - key_off_addr) % 4 == 0);
- ASSERT((value_off_addr - key_off_addr) < (256 * 4));
- ASSERT(map_off_addr > key_off_addr);
- ASSERT((map_off_addr - key_off_addr) % 4 == 0);
- ASSERT((map_off_addr - key_off_addr) < (256 * 4));
+ DCHECK(value_off_addr > key_off_addr);
+ DCHECK((value_off_addr - key_off_addr) % 4 == 0);
+ DCHECK((value_off_addr - key_off_addr) < (256 * 4));
+ DCHECK(map_off_addr > key_off_addr);
+ DCHECK((map_off_addr - key_off_addr) % 4 == 0);
+ DCHECK((map_off_addr - key_off_addr) < (256 * 4));
Label miss;
Register base_addr = scratch;
@@ -77,7 +77,7 @@ static void ProbeTable(Isolate* isolate,
// It's a nice optimization if this constant is encodable in the bic insn.
uint32_t mask = Code::kFlagsNotUsedInLookup;
- ASSERT(__ ImmediateFitsAddrMode1Instruction(mask));
+ DCHECK(__ ImmediateFitsAddrMode1Instruction(mask));
__ bic(flags_reg, flags_reg, Operand(mask));
__ cmp(flags_reg, Operand(flags));
__ b(ne, &miss);
@@ -98,14 +98,11 @@ static void ProbeTable(Isolate* isolate,
}
-void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
- Label* miss_label,
- Register receiver,
- Handle<Name> name,
- Register scratch0,
- Register scratch1) {
- ASSERT(name->IsUniqueName());
- ASSERT(!receiver.is(scratch0));
+void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
+ MacroAssembler* masm, Label* miss_label, Register receiver,
+ Handle<Name> name, Register scratch0, Register scratch1) {
+ DCHECK(name->IsUniqueName());
+ DCHECK(!receiver.is(scratch0));
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
__ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
@@ -166,27 +163,27 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
// Make sure that code is valid. The multiplying code relies on the
// entry size being 12.
- ASSERT(sizeof(Entry) == 12);
+ DCHECK(sizeof(Entry) == 12);
// Make sure the flags does not name a specific type.
- ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
+ DCHECK(Code::ExtractTypeFromFlags(flags) == 0);
// Make sure that there are no register conflicts.
- ASSERT(!scratch.is(receiver));
- ASSERT(!scratch.is(name));
- ASSERT(!extra.is(receiver));
- ASSERT(!extra.is(name));
- ASSERT(!extra.is(scratch));
- ASSERT(!extra2.is(receiver));
- ASSERT(!extra2.is(name));
- ASSERT(!extra2.is(scratch));
- ASSERT(!extra2.is(extra));
+ DCHECK(!scratch.is(receiver));
+ DCHECK(!scratch.is(name));
+ DCHECK(!extra.is(receiver));
+ DCHECK(!extra.is(name));
+ DCHECK(!extra.is(scratch));
+ DCHECK(!extra2.is(receiver));
+ DCHECK(!extra2.is(name));
+ DCHECK(!extra2.is(scratch));
+ DCHECK(!extra2.is(extra));
// Check scratch, extra and extra2 registers are valid.
- ASSERT(!scratch.is(no_reg));
- ASSERT(!extra.is(no_reg));
- ASSERT(!extra2.is(no_reg));
- ASSERT(!extra3.is(no_reg));
+ DCHECK(!scratch.is(no_reg));
+ DCHECK(!extra.is(no_reg));
+ DCHECK(!extra2.is(no_reg));
+ DCHECK(!extra3.is(no_reg));
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
@@ -202,10 +199,10 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
uint32_t mask = kPrimaryTableSize - 1;
// We shift out the last two bits because they are not part of the hash and
// they are always 01 for maps.
- __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize));
+ __ mov(scratch, Operand(scratch, LSR, kCacheIndexShift));
// Mask down the eor argument to the minimum to keep the immediate
// ARM-encodable.
- __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
+ __ eor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask));
// Prefer and_ to ubfx here because ubfx takes 2 cycles.
__ and_(scratch, scratch, Operand(mask));
@@ -222,9 +219,9 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
extra3);
// Primary miss: Compute hash for secondary probe.
- __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
+ __ sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift));
uint32_t mask2 = kSecondaryTableSize - 1;
- __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
+ __ add(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2));
__ and_(scratch, scratch, Operand(mask2));
// Probe the secondary table.
@@ -247,30 +244,8 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
}
-void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
- int index,
- Register prototype) {
- // Load the global or builtins object from the current context.
- __ ldr(prototype,
- MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
- // Load the native context from the global or builtins object.
- __ ldr(prototype,
- FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
- // Load the function from the native context.
- __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
- // Load the initial map. The global functions all have initial maps.
- __ ldr(prototype,
- FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
- // Load the prototype from the initial map.
- __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
-}
-
-
-void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
- MacroAssembler* masm,
- int index,
- Register prototype,
- Label* miss) {
+void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
+ MacroAssembler* masm, int index, Register prototype, Label* miss) {
Isolate* isolate = masm->isolate();
// Get the global function with the given index.
Handle<JSFunction> function(
@@ -293,46 +268,9 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
}
-void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
- Register dst,
- Register src,
- bool inobject,
- int index,
- Representation representation) {
- ASSERT(!representation.IsDouble());
- int offset = index * kPointerSize;
- if (!inobject) {
- // Calculate the offset into the properties array.
- offset = offset + FixedArray::kHeaderSize;
- __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
- src = dst;
- }
- __ ldr(dst, FieldMemOperand(src, offset));
-}
-
-
-void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
- Register receiver,
- Register scratch,
- Label* miss_label) {
- // Check that the receiver isn't a smi.
- __ JumpIfSmi(receiver, miss_label);
-
- // Check that the object is a JS array.
- __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
- __ b(ne, miss_label);
-
- // Load length directly from the JS array.
- __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- __ Ret();
-}
-
-
-void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
- Register receiver,
- Register scratch1,
- Register scratch2,
- Label* miss_label) {
+void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
+ MacroAssembler* masm, Register receiver, Register scratch1,
+ Register scratch2, Label* miss_label) {
__ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
__ mov(r0, scratch1);
__ Ret();
@@ -342,13 +280,11 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
// Generate code to check that a global property cell is empty. Create
// the property cell at compilation time if no cell exists for the
// property.
-void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
- Handle<JSGlobalObject> global,
- Handle<Name> name,
- Register scratch,
- Label* miss) {
+void PropertyHandlerCompiler::GenerateCheckPropertyCell(
+ MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
+ Register scratch, Label* miss) {
Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
- ASSERT(cell->value()->IsTheHole());
+ DCHECK(cell->value()->IsTheHole());
__ mov(scratch, Operand(cell));
__ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
@@ -357,18 +293,120 @@ void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
}
-void StoreStubCompiler::GenerateNegativeHolderLookup(
- MacroAssembler* masm,
- Handle<JSObject> holder,
- Register holder_reg,
- Handle<Name> name,
- Label* miss) {
- if (holder->IsJSGlobalObject()) {
- GenerateCheckPropertyCell(
- masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
- } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
- GenerateDictionaryNegativeLookup(
- masm, miss, holder_reg, name, scratch1(), scratch2());
+static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
+ Register holder, Register name,
+ Handle<JSObject> holder_obj) {
+ STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
+ STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1);
+ STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2);
+ STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3);
+ STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4);
+ __ push(name);
+ Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
+ DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
+ Register scratch = name;
+ __ mov(scratch, Operand(interceptor));
+ __ push(scratch);
+ __ push(receiver);
+ __ push(holder);
+}
+
+
+static void CompileCallLoadPropertyWithInterceptor(
+ MacroAssembler* masm, Register receiver, Register holder, Register name,
+ Handle<JSObject> holder_obj, IC::UtilityId id) {
+ PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
+ __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()),
+ NamedLoadHandlerCompiler::kInterceptorArgsLength);
+}
+
+
+// Generate call to api function.
+void PropertyHandlerCompiler::GenerateFastApiCall(
+ MacroAssembler* masm, const CallOptimization& optimization,
+ Handle<Map> receiver_map, Register receiver, Register scratch_in,
+ bool is_store, int argc, Register* values) {
+ DCHECK(!receiver.is(scratch_in));
+ __ push(receiver);
+ // Write the arguments to stack frame.
+ for (int i = 0; i < argc; i++) {
+ Register arg = values[argc - 1 - i];
+ DCHECK(!receiver.is(arg));
+ DCHECK(!scratch_in.is(arg));
+ __ push(arg);
+ }
+ DCHECK(optimization.is_simple_api_call());
+
+ // Abi for CallApiFunctionStub.
+ Register callee = r0;
+ Register call_data = r4;
+ Register holder = r2;
+ Register api_function_address = r1;
+
+ // Put holder in place.
+ CallOptimization::HolderLookup holder_lookup;
+ Handle<JSObject> api_holder =
+ optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
+ switch (holder_lookup) {
+ case CallOptimization::kHolderIsReceiver:
+ __ Move(holder, receiver);
+ break;
+ case CallOptimization::kHolderFound:
+ __ Move(holder, api_holder);
+ break;
+ case CallOptimization::kHolderNotFound:
+ UNREACHABLE();
+ break;
+ }
+
+ Isolate* isolate = masm->isolate();
+ Handle<JSFunction> function = optimization.constant_function();
+ Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
+ Handle<Object> call_data_obj(api_call_info->data(), isolate);
+
+ // Put callee in place.
+ __ Move(callee, function);
+
+ bool call_data_undefined = false;
+ // Put call_data in place.
+ if (isolate->heap()->InNewSpace(*call_data_obj)) {
+ __ Move(call_data, api_call_info);
+ __ ldr(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
+ } else if (call_data_obj->IsUndefined()) {
+ call_data_undefined = true;
+ __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
+ } else {
+ __ Move(call_data, call_data_obj);
+ }
+
+ // Put api_function_address in place.
+ Address function_address = v8::ToCData<Address>(api_call_info->callback());
+ ApiFunction fun(function_address);
+ ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
+ ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
+ __ mov(api_function_address, Operand(ref));
+
+ // Jump to stub.
+ CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
+ __ TailCallStub(&stub);
+}
+
+
+void PropertyAccessCompiler::GenerateTailCall(MacroAssembler* masm,
+ Handle<Code> code) {
+ __ Jump(code, RelocInfo::CODE_TARGET);
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm())
+
+
+void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
+ Handle<Name> name) {
+ if (!label->is_unused()) {
+ __ bind(label);
+ __ mov(this->name(), Operand(name));
}
}
@@ -377,19 +415,10 @@ void StoreStubCompiler::GenerateNegativeHolderLookup(
// When leaving generated code after success, the receiver_reg and name_reg
// may be clobbered. Upon branch to miss_label, the receiver and name
// registers have their original values.
-void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
- Handle<JSObject> object,
- LookupResult* lookup,
- Handle<Map> transition,
- Handle<Name> name,
- Register receiver_reg,
- Register storage_reg,
- Register value_reg,
- Register scratch1,
- Register scratch2,
- Register scratch3,
- Label* miss_label,
- Label* slow) {
+void NamedStoreHandlerCompiler::GenerateStoreTransition(
+ Handle<Map> transition, Handle<Name> name, Register receiver_reg,
+ Register storage_reg, Register value_reg, Register scratch1,
+ Register scratch2, Register scratch3, Label* miss_label, Label* slow) {
// r0 : value
Label exit;
@@ -397,10 +426,10 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
DescriptorArray* descriptors = transition->instance_descriptors();
PropertyDetails details = descriptors->GetDetails(descriptor);
Representation representation = details.representation();
- ASSERT(!representation.IsNone());
+ DCHECK(!representation.IsNone());
if (details.type() == CONSTANT) {
- Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
+ Handle<Object> constant(descriptors->GetValue(descriptor), isolate());
__ Move(scratch1, constant);
__ cmp(value_reg, scratch1);
__ b(ne, miss_label);
@@ -426,8 +455,9 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
}
} else if (representation.IsDouble()) {
Label do_store, heap_number;
- __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
+ __ LoadRoot(scratch3, Heap::kMutableHeapNumberMapRootIndex);
+ __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow,
+ TAG_RESULT, MUTABLE);
__ JumpIfNotSmi(value_reg, &heap_number);
__ SmiUntag(scratch1, value_reg);
@@ -444,13 +474,12 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
__ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
}
- // Stub never generated for non-global objects that require access
- // checks.
- ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
+ // Stub never generated for objects that require access checks.
+ DCHECK(!transition->is_access_check_needed());
// Perform map transition for the receiver if necessary.
if (details.type() == FIELD &&
- object->map()->unused_property_fields() == 0) {
+ Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
// The properties must be extended before we can store the value.
// We jump to a runtime call that extends the properties array.
__ push(receiver_reg);
@@ -458,9 +487,8 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
__ Push(r2, r0);
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
- masm->isolate()),
- 3,
- 1);
+ isolate()),
+ 3, 1);
return;
}
@@ -479,7 +507,7 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
OMIT_SMI_CHECK);
if (details.type() == CONSTANT) {
- ASSERT(value_reg.is(r0));
+ DCHECK(value_reg.is(r0));
__ Ret();
return;
}
@@ -490,14 +518,14 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
// Adjust for the number of properties stored in the object. Even in the
// face of a transition we can use the old map here because the size of the
// object and the number of in-object properties is not going to change.
- index -= object->map()->inobject_properties();
+ index -= transition->inobject_properties();
// TODO(verwaest): Share this code as a code stub.
SmiCheck smi_check = representation.IsTagged()
? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
if (index < 0) {
// Set the property straight into the object.
- int offset = object->map()->instance_size() + (index * kPointerSize);
+ int offset = transition->instance_size() + (index * kPointerSize);
if (representation.IsDouble()) {
__ str(storage_reg, FieldMemOperand(receiver_reg, offset));
} else {
@@ -547,297 +575,46 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
}
// Return the value (register r0).
- ASSERT(value_reg.is(r0));
+ DCHECK(value_reg.is(r0));
__ bind(&exit);
__ Ret();
}
-// Generate StoreField code, value is passed in r0 register.
-// When leaving generated code after success, the receiver_reg and name_reg
-// may be clobbered. Upon branch to miss_label, the receiver and name
-// registers have their original values.
-void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
- Handle<JSObject> object,
- LookupResult* lookup,
- Register receiver_reg,
- Register name_reg,
- Register value_reg,
- Register scratch1,
- Register scratch2,
- Label* miss_label) {
- // r0 : value
- Label exit;
-
- // Stub never generated for non-global objects that require access
- // checks.
- ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
-
- int index = lookup->GetFieldIndex().field_index();
-
- // Adjust for the number of properties stored in the object. Even in the
- // face of a transition we can use the old map here because the size of the
- // object and the number of in-object properties is not going to change.
- index -= object->map()->inobject_properties();
-
- Representation representation = lookup->representation();
- ASSERT(!representation.IsNone());
- if (representation.IsSmi()) {
- __ JumpIfNotSmi(value_reg, miss_label);
- } else if (representation.IsHeapObject()) {
- __ JumpIfSmi(value_reg, miss_label);
- HeapType* field_type = lookup->GetFieldType();
- HeapType::Iterator<Map> it = field_type->Classes();
- if (!it.Done()) {
- __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
- Label do_store;
- while (true) {
- __ CompareMap(scratch1, it.Current(), &do_store);
- it.Advance();
- if (it.Done()) {
- __ b(ne, miss_label);
- break;
- }
- __ b(eq, &do_store);
- }
- __ bind(&do_store);
- }
- } else if (representation.IsDouble()) {
- // Load the double storage.
- if (index < 0) {
- int offset = object->map()->instance_size() + (index * kPointerSize);
- __ ldr(scratch1, FieldMemOperand(receiver_reg, offset));
- } else {
- __ ldr(scratch1,
- FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
- int offset = index * kPointerSize + FixedArray::kHeaderSize;
- __ ldr(scratch1, FieldMemOperand(scratch1, offset));
- }
-
- // Store the value into the storage.
- Label do_store, heap_number;
- __ JumpIfNotSmi(value_reg, &heap_number);
- __ SmiUntag(scratch2, value_reg);
- __ vmov(s0, scratch2);
- __ vcvt_f64_s32(d0, s0);
- __ jmp(&do_store);
-
- __ bind(&heap_number);
- __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
- miss_label, DONT_DO_SMI_CHECK);
- __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
-
- __ bind(&do_store);
- __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
- // Return the value (register r0).
- ASSERT(value_reg.is(r0));
- __ Ret();
- return;
- }
-
- // TODO(verwaest): Share this code as a code stub.
- SmiCheck smi_check = representation.IsTagged()
- ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
- if (index < 0) {
- // Set the property straight into the object.
- int offset = object->map()->instance_size() + (index * kPointerSize);
- __ str(value_reg, FieldMemOperand(receiver_reg, offset));
-
- if (!representation.IsSmi()) {
- // Skip updating write barrier if storing a smi.
- __ JumpIfSmi(value_reg, &exit);
-
- // Update the write barrier for the array address.
- // Pass the now unused name_reg as a scratch register.
- __ mov(name_reg, value_reg);
- __ RecordWriteField(receiver_reg,
- offset,
- name_reg,
- scratch1,
- kLRHasNotBeenSaved,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- smi_check);
- }
- } else {
- // Write to the properties array.
- int offset = index * kPointerSize + FixedArray::kHeaderSize;
- // Get the properties array
- __ ldr(scratch1,
- FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
- __ str(value_reg, FieldMemOperand(scratch1, offset));
-
- if (!representation.IsSmi()) {
- // Skip updating write barrier if storing a smi.
- __ JumpIfSmi(value_reg, &exit);
-
- // Update the write barrier for the array address.
- // Ok to clobber receiver_reg and name_reg, since we return.
- __ mov(name_reg, value_reg);
- __ RecordWriteField(scratch1,
- offset,
- name_reg,
- receiver_reg,
- kLRHasNotBeenSaved,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- smi_check);
- }
- }
-
- // Return the value (register r0).
- ASSERT(value_reg.is(r0));
- __ bind(&exit);
- __ Ret();
-}
-
-
-void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
- Label* label,
- Handle<Name> name) {
- if (!label->is_unused()) {
- __ bind(label);
- __ mov(this->name(), Operand(name));
- }
-}
-
-
-static void PushInterceptorArguments(MacroAssembler* masm,
- Register receiver,
- Register holder,
- Register name,
- Handle<JSObject> holder_obj) {
- STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
- STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
- STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
- STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
- STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
- __ push(name);
- Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
- ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
- Register scratch = name;
- __ mov(scratch, Operand(interceptor));
- __ push(scratch);
- __ push(receiver);
- __ push(holder);
-}
-
-
-static void CompileCallLoadPropertyWithInterceptor(
- MacroAssembler* masm,
- Register receiver,
- Register holder,
- Register name,
- Handle<JSObject> holder_obj,
- IC::UtilityId id) {
- PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
- __ CallExternalReference(
- ExternalReference(IC_Utility(id), masm->isolate()),
- StubCache::kInterceptorArgsLength);
-}
-
-
-// Generate call to api function.
-void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
- const CallOptimization& optimization,
- Handle<Map> receiver_map,
- Register receiver,
- Register scratch_in,
- bool is_store,
- int argc,
- Register* values) {
- ASSERT(!receiver.is(scratch_in));
- __ push(receiver);
- // Write the arguments to stack frame.
- for (int i = 0; i < argc; i++) {
- Register arg = values[argc-1-i];
- ASSERT(!receiver.is(arg));
- ASSERT(!scratch_in.is(arg));
- __ push(arg);
- }
- ASSERT(optimization.is_simple_api_call());
-
- // Abi for CallApiFunctionStub.
- Register callee = r0;
- Register call_data = r4;
- Register holder = r2;
- Register api_function_address = r1;
-
- // Put holder in place.
- CallOptimization::HolderLookup holder_lookup;
- Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
- receiver_map,
- &holder_lookup);
- switch (holder_lookup) {
- case CallOptimization::kHolderIsReceiver:
- __ Move(holder, receiver);
- break;
- case CallOptimization::kHolderFound:
- __ Move(holder, api_holder);
- break;
- case CallOptimization::kHolderNotFound:
- UNREACHABLE();
+void NamedStoreHandlerCompiler::GenerateStoreField(LookupResult* lookup,
+ Register value_reg,
+ Label* miss_label) {
+ DCHECK(lookup->representation().IsHeapObject());
+ __ JumpIfSmi(value_reg, miss_label);
+ HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes();
+ __ ldr(scratch1(), FieldMemOperand(value_reg, HeapObject::kMapOffset));
+ Label do_store;
+ while (true) {
+ __ CompareMap(scratch1(), it.Current(), &do_store);
+ it.Advance();
+ if (it.Done()) {
+ __ b(ne, miss_label);
break;
+ }
+ __ b(eq, &do_store);
}
+ __ bind(&do_store);
- Isolate* isolate = masm->isolate();
- Handle<JSFunction> function = optimization.constant_function();
- Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
- Handle<Object> call_data_obj(api_call_info->data(), isolate);
-
- // Put callee in place.
- __ Move(callee, function);
-
- bool call_data_undefined = false;
- // Put call_data in place.
- if (isolate->heap()->InNewSpace(*call_data_obj)) {
- __ Move(call_data, api_call_info);
- __ ldr(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
- } else if (call_data_obj->IsUndefined()) {
- call_data_undefined = true;
- __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
- } else {
- __ Move(call_data, call_data_obj);
- }
-
- // Put api_function_address in place.
- Address function_address = v8::ToCData<Address>(api_call_info->callback());
- ApiFunction fun(function_address);
- ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
- ExternalReference ref = ExternalReference(&fun,
- type,
- masm->isolate());
- __ mov(api_function_address, Operand(ref));
-
- // Jump to stub.
- CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
- __ TailCallStub(&stub);
-}
-
-
-void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
- __ Jump(code, RelocInfo::CODE_TARGET);
+ StoreFieldStub stub(isolate(), lookup->GetFieldIndex(),
+ lookup->representation());
+ GenerateTailCall(masm(), stub.GetCode());
}
-#undef __
-#define __ ACCESS_MASM(masm())
-
-
-Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
- Register object_reg,
- Handle<JSObject> holder,
- Register holder_reg,
- Register scratch1,
- Register scratch2,
- Handle<Name> name,
- Label* miss,
- PrototypeCheckType check) {
- Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
+Register PropertyHandlerCompiler::CheckPrototypes(
+ Register object_reg, Register holder_reg, Register scratch1,
+ Register scratch2, Handle<Name> name, Label* miss,
+ PrototypeCheckType check) {
+ Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
// Make sure there's no overlap between holder and object registers.
- ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
- ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
+ DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
+ DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
&& !scratch2.is(scratch1));
// Keep track of the current object in register reg.
@@ -845,12 +622,12 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
int depth = 0;
Handle<JSObject> current = Handle<JSObject>::null();
- if (type->IsConstant()) {
- current = Handle<JSObject>::cast(type->AsConstant()->Value());
+ if (type()->IsConstant()) {
+ current = Handle<JSObject>::cast(type()->AsConstant()->Value());
}
Handle<JSObject> prototype = Handle<JSObject>::null();
Handle<Map> current_map = receiver_map;
- Handle<Map> holder_map(holder->map());
+ Handle<Map> holder_map(holder()->map());
// Traverse the prototype chain and check the maps in the prototype chain for
// fast and global objects or do negative lookup for normal objects.
while (!current_map.is_identical_to(holder_map)) {
@@ -858,18 +635,18 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
// Only global objects and objects that do not require access
// checks are allowed in stubs.
- ASSERT(current_map->IsJSGlobalProxyMap() ||
+ DCHECK(current_map->IsJSGlobalProxyMap() ||
!current_map->is_access_check_needed());
prototype = handle(JSObject::cast(current_map->prototype()));
if (current_map->is_dictionary_map() &&
- !current_map->IsJSGlobalObjectMap() &&
- !current_map->IsJSGlobalProxyMap()) {
+ !current_map->IsJSGlobalObjectMap()) {
+ DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast.
if (!name->IsUniqueName()) {
- ASSERT(name->IsString());
+ DCHECK(name->IsString());
name = factory()->InternalizeString(Handle<String>::cast(name));
}
- ASSERT(current.is_null() ||
+ DCHECK(current.is_null() ||
current->property_dictionary()->FindEntry(name) ==
NameDictionary::kNotFound);
@@ -891,6 +668,9 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
// Check access rights to the global object. This has to happen after
// the map check so that we know that the object is actually a global
// object.
+ // This allows us to install generated handlers for accesses to the
+ // global proxy (as opposed to using slow ICs). See corresponding code
+ // in LookupForRead().
if (current_map->IsJSGlobalProxyMap()) {
__ CheckAccessGlobalProxy(reg, scratch2, miss);
} else if (current_map->IsJSGlobalObjectMap()) {
@@ -901,12 +681,15 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
reg = holder_reg; // From now on the object will be in holder_reg.
- if (heap()->InNewSpace(*prototype)) {
- // The prototype is in new space; we cannot store a reference to it
- // in the code. Load it from the map.
+ // Two possible reasons for loading the prototype from the map:
+ // (1) Can't store references to new space in code.
+ // (2) Handler is shared for all receivers with the same prototype
+ // map (but not necessarily the same prototype instance).
+ bool load_prototype_from_map =
+ heap()->InNewSpace(*prototype) || depth == 1;
+ if (load_prototype_from_map) {
__ ldr(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
} else {
- // The prototype is in old space; load it directly.
__ mov(reg, Operand(prototype));
}
}
@@ -925,7 +708,7 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
}
// Perform security check for access to the global object.
- ASSERT(current_map->IsJSGlobalProxyMap() ||
+ DCHECK(current_map->IsJSGlobalProxyMap() ||
!current_map->is_access_check_needed());
if (current_map->IsJSGlobalProxyMap()) {
__ CheckAccessGlobalProxy(reg, scratch1, miss);
@@ -936,7 +719,7 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
}
-void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
+void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
if (!miss->is_unused()) {
Label success;
__ b(&success);
@@ -947,94 +730,26 @@ void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
}
-void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
+void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
if (!miss->is_unused()) {
Label success;
__ b(&success);
- GenerateRestoreName(masm(), miss, name);
+ GenerateRestoreName(miss, name);
TailCallBuiltin(masm(), MissBuiltin(kind()));
__ bind(&success);
}
}
-Register LoadStubCompiler::CallbackHandlerFrontend(
- Handle<HeapType> type,
- Register object_reg,
- Handle<JSObject> holder,
- Handle<Name> name,
- Handle<Object> callback) {
- Label miss;
-
- Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
-
- if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
- ASSERT(!reg.is(scratch2()));
- ASSERT(!reg.is(scratch3()));
- ASSERT(!reg.is(scratch4()));
-
- // Load the properties dictionary.
- Register dictionary = scratch4();
- __ ldr(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
-
- // Probe the dictionary.
- Label probe_done;
- NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
- &miss,
- &probe_done,
- dictionary,
- this->name(),
- scratch2(),
- scratch3());
- __ bind(&probe_done);
-
- // If probing finds an entry in the dictionary, scratch3 contains the
- // pointer into the dictionary. Check that the value is the callback.
- Register pointer = scratch3();
- const int kElementsStartOffset = NameDictionary::kHeaderSize +
- NameDictionary::kElementsStartIndex * kPointerSize;
- const int kValueOffset = kElementsStartOffset + kPointerSize;
- __ ldr(scratch2(), FieldMemOperand(pointer, kValueOffset));
- __ cmp(scratch2(), Operand(callback));
- __ b(ne, &miss);
- }
-
- HandlerFrontendFooter(name, &miss);
- return reg;
-}
-
-
-void LoadStubCompiler::GenerateLoadField(Register reg,
- Handle<JSObject> holder,
- PropertyIndex field,
- Representation representation) {
- if (!reg.is(receiver())) __ mov(receiver(), reg);
- if (kind() == Code::LOAD_IC) {
- LoadFieldStub stub(isolate(),
- field.is_inobject(holder),
- field.translate(holder),
- representation);
- GenerateTailCall(masm(), stub.GetCode());
- } else {
- KeyedLoadFieldStub stub(isolate(),
- field.is_inobject(holder),
- field.translate(holder),
- representation);
- GenerateTailCall(masm(), stub.GetCode());
- }
-}
-
-
-void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
+void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
// Return the constant value.
__ Move(r0, value);
__ Ret();
}
-void LoadStubCompiler::GenerateLoadCallback(
- Register reg,
- Handle<ExecutableAccessorInfo> callback) {
+void NamedLoadHandlerCompiler::GenerateLoadCallback(
+ Register reg, Handle<ExecutableAccessorInfo> callback) {
// Build AccessorInfo::args_ list on the stack and push property name below
// the exit frame to make GC aware of them and store pointers to them.
STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
@@ -1044,9 +759,9 @@ void LoadStubCompiler::GenerateLoadCallback(
STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
- ASSERT(!scratch2().is(reg));
- ASSERT(!scratch3().is(reg));
- ASSERT(!scratch4().is(reg));
+ DCHECK(!scratch2().is(reg));
+ DCHECK(!scratch3().is(reg));
+ DCHECK(!scratch4().is(reg));
__ push(receiver());
if (heap()->InNewSpace(callback->data())) {
__ Move(scratch3(), callback);
@@ -1079,14 +794,11 @@ void LoadStubCompiler::GenerateLoadCallback(
}
-void LoadStubCompiler::GenerateLoadInterceptor(
- Register holder_reg,
- Handle<Object> object,
- Handle<JSObject> interceptor_holder,
- LookupResult* lookup,
- Handle<Name> name) {
- ASSERT(interceptor_holder->HasNamedInterceptor());
- ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
+void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg,
+ LookupResult* lookup,
+ Handle<Name> name) {
+ DCHECK(holder()->HasNamedInterceptor());
+ DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
// So far the most popular follow ups for interceptor loads are FIELD
// and CALLBACKS, so inline only them, other cases may be added
@@ -1097,10 +809,12 @@ void LoadStubCompiler::GenerateLoadInterceptor(
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
- ExecutableAccessorInfo* callback =
- ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
- compile_followup_inline = callback->getter() != NULL &&
- callback->IsCompatibleReceiver(*object);
+ Handle<ExecutableAccessorInfo> callback(
+ ExecutableAccessorInfo::cast(lookup->GetCallbackObject()));
+ compile_followup_inline =
+ callback->getter() != NULL &&
+ ExecutableAccessorInfo::IsCompatibleReceiverType(isolate(), callback,
+ type());
}
}
@@ -1108,13 +822,13 @@ void LoadStubCompiler::GenerateLoadInterceptor(
// Compile the interceptor call, followed by inline code to load the
// property from further up the prototype chain if the call fails.
// Check that the maps haven't changed.
- ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
+ DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
// Preserve the receiver register explicitly whenever it is different from
// the holder and it is needed should the interceptor return without any
// result. The CALLBACKS case needs the receiver to be passed into C++ code,
// the FIELD case might cause a miss during the prototype check.
- bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
+ bool must_perfrom_prototype_check = *holder() != lookup->holder();
bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
(lookup->type() == CALLBACKS || must_perfrom_prototype_check);
@@ -1131,7 +845,7 @@ void LoadStubCompiler::GenerateLoadInterceptor(
// interceptor's holder has been compiled before (see a caller
// of this method.)
CompileCallLoadPropertyWithInterceptor(
- masm(), receiver(), holder_reg, this->name(), interceptor_holder,
+ masm(), receiver(), holder_reg, this->name(), holder(),
IC::kLoadPropertyWithInterceptorOnly);
// Check if interceptor provided a value for property. If it's
@@ -1152,31 +866,26 @@ void LoadStubCompiler::GenerateLoadInterceptor(
// Leave the internal frame.
}
- GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
+ GenerateLoadPostInterceptor(holder_reg, name, lookup);
} else { // !compile_followup_inline
// Call the runtime system to load the interceptor.
// Check that the maps haven't changed.
- PushInterceptorArguments(masm(), receiver(), holder_reg,
- this->name(), interceptor_holder);
+ PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
+ holder());
ExternalReference ref =
- ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptor),
isolate());
- __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
+ __ TailCallExternalReference(
+ ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1);
}
}
-Handle<Code> StoreStubCompiler::CompileStoreCallback(
- Handle<JSObject> object,
- Handle<JSObject> holder,
- Handle<Name> name,
+Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
+ Handle<JSObject> object, Handle<Name> name,
Handle<ExecutableAccessorInfo> callback) {
- Register holder_reg = HandlerFrontend(
- IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
-
- // Stub never generated for non-global objects that require access checks.
- ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
+ Register holder_reg = Frontend(receiver(), name);
__ push(receiver()); // receiver
__ push(holder_reg);
@@ -1199,10 +908,8 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
#define __ ACCESS_MASM(masm)
-void StoreStubCompiler::GenerateStoreViaSetter(
- MacroAssembler* masm,
- Handle<HeapType> type,
- Register receiver,
+void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
+ MacroAssembler* masm, Handle<HeapType> type, Register receiver,
Handle<JSFunction> setter) {
// ----------- S t a t e -------------
// -- lr : return address
@@ -1218,8 +925,7 @@ void StoreStubCompiler::GenerateStoreViaSetter(
if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
// Swap in the global receiver.
__ ldr(receiver,
- FieldMemOperand(
- receiver, JSGlobalObject::kGlobalReceiverOffset));
+ FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
}
__ Push(receiver, value());
ParameterCount actual(1);
@@ -1246,14 +952,13 @@ void StoreStubCompiler::GenerateStoreViaSetter(
#define __ ACCESS_MASM(masm())
-Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
- Handle<JSObject> object,
+Handle<Code> NamedStoreHandlerCompiler::CompileStoreInterceptor(
Handle<Name> name) {
__ Push(receiver(), this->name(), value());
// Do tail-call to the runtime system.
- ExternalReference store_ic_property =
- ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
+ ExternalReference store_ic_property = ExternalReference(
+ IC_Utility(IC::kStorePropertyWithInterceptor), isolate());
__ TailCallExternalReference(store_ic_property, 3, 1);
// Return the generated code.
@@ -1261,62 +966,35 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
}
-Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
- Handle<JSObject> last,
- Handle<Name> name) {
- NonexistentHandlerFrontend(type, last, name);
-
- // Return undefined if maps of the full prototype chain are still the
- // same and no global property with this name contains a value.
- __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
- __ Ret();
-
- // Return the generated code.
- return GetCode(kind(), Code::FAST, name);
-}
-
-
-Register* LoadStubCompiler::registers() {
- // receiver, name, scratch1, scratch2, scratch3, scratch4.
- static Register registers[] = { r0, r2, r3, r1, r4, r5 };
- return registers;
-}
-
-
-Register* KeyedLoadStubCompiler::registers() {
+Register* PropertyAccessCompiler::load_calling_convention() {
// receiver, name, scratch1, scratch2, scratch3, scratch4.
- static Register registers[] = { r1, r0, r2, r3, r4, r5 };
+ Register receiver = LoadIC::ReceiverRegister();
+ Register name = LoadIC::NameRegister();
+ static Register registers[] = { receiver, name, r3, r0, r4, r5 };
return registers;
}
-Register StoreStubCompiler::value() {
- return r0;
-}
-
-
-Register* StoreStubCompiler::registers() {
+Register* PropertyAccessCompiler::store_calling_convention() {
// receiver, name, scratch1, scratch2, scratch3.
- static Register registers[] = { r1, r2, r3, r4, r5 };
+ Register receiver = StoreIC::ReceiverRegister();
+ Register name = StoreIC::NameRegister();
+ DCHECK(r3.is(KeyedStoreIC::MapRegister()));
+ static Register registers[] = { receiver, name, r3, r4, r5 };
return registers;
}
-Register* KeyedStoreStubCompiler::registers() {
- // receiver, name, scratch1, scratch2, scratch3.
- static Register registers[] = { r2, r1, r3, r4, r5 };
- return registers;
-}
+Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); }
#undef __
#define __ ACCESS_MASM(masm)
-void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
- Handle<HeapType> type,
- Register receiver,
- Handle<JSFunction> getter) {
+void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
+ MacroAssembler* masm, Handle<HeapType> type, Register receiver,
+ Handle<JSFunction> getter) {
// ----------- S t a t e -------------
// -- r0 : receiver
// -- r2 : name
@@ -1330,8 +1008,7 @@ void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
// Swap in the global receiver.
__ ldr(receiver,
- FieldMemOperand(
- receiver, JSGlobalObject::kGlobalReceiverOffset));
+ FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
}
__ push(receiver);
ParameterCount actual(0);
@@ -1355,57 +1032,61 @@ void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
#define __ ACCESS_MASM(masm())
-Handle<Code> LoadStubCompiler::CompileLoadGlobal(
- Handle<HeapType> type,
- Handle<GlobalObject> global,
- Handle<PropertyCell> cell,
- Handle<Name> name,
- bool is_dont_delete) {
+Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
+ Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
Label miss;
- HandlerFrontendHeader(type, receiver(), global, name, &miss);
+ FrontendHeader(receiver(), name, &miss);
// Get the value from the cell.
- __ mov(r3, Operand(cell));
- __ ldr(r4, FieldMemOperand(r3, Cell::kValueOffset));
+ Register result = StoreIC::ValueRegister();
+ __ mov(result, Operand(cell));
+ __ ldr(result, FieldMemOperand(result, Cell::kValueOffset));
// Check for deleted property if property can actually be deleted.
- if (!is_dont_delete) {
+ if (is_configurable) {
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r4, ip);
+ __ cmp(result, ip);
__ b(eq, &miss);
}
Counters* counters = isolate()->counters();
__ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
- __ mov(r0, r4);
__ Ret();
- HandlerFrontendFooter(name, &miss);
+ FrontendFooter(name, &miss);
// Return the generated code.
return GetCode(kind(), Code::NORMAL, name);
}
-Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
- TypeHandleList* types,
- CodeHandleList* handlers,
- Handle<Name> name,
- Code::StubType type,
- IcCheckType check) {
+Handle<Code> PropertyICCompiler::CompilePolymorphic(TypeHandleList* types,
+ CodeHandleList* handlers,
+ Handle<Name> name,
+ Code::StubType type,
+ IcCheckType check) {
Label miss;
if (check == PROPERTY &&
(kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
- __ cmp(this->name(), Operand(name));
- __ b(ne, &miss);
+ // In case we are compiling an IC for dictionary loads and stores, just
+ // check whether the name is unique.
+ if (name.is_identical_to(isolate()->factory()->normal_ic_symbol())) {
+ __ JumpIfNotUniqueName(this->name(), &miss);
+ } else {
+ __ cmp(this->name(), Operand(name));
+ __ b(ne, &miss);
+ }
}
Label number_case;
Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
__ JumpIfSmi(receiver(), smi_target);
+ // Polymorphic keyed stores may use the map register
Register map_reg = scratch1();
+ DCHECK(kind() != Code::KEYED_STORE_IC ||
+ map_reg.is(KeyedStoreIC::MapRegister()));
int receiver_count = types->length();
int number_of_handled_maps = 0;
@@ -1418,13 +1099,13 @@ Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
__ mov(ip, Operand(map));
__ cmp(map_reg, ip);
if (type->Is(HeapType::Number())) {
- ASSERT(!number_case.is_unused());
+ DCHECK(!number_case.is_unused());
__ bind(&number_case);
}
__ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq);
}
}
- ASSERT(number_of_handled_maps != 0);
+ DCHECK(number_of_handled_maps != 0);
__ bind(&miss);
TailCallBuiltin(masm(), MissBuiltin(kind()));
@@ -1432,24 +1113,12 @@ Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
// Return the generated code.
InlineCacheState state =
number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
- return GetICCode(kind(), type, name, state);
-}
-
-
-void StoreStubCompiler::GenerateStoreArrayLength() {
- // Prepare tail call to StoreIC_ArrayLength.
- __ Push(receiver(), value());
-
- ExternalReference ref =
- ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
- masm()->isolate());
- __ TailCallExternalReference(ref, 2, 1);
+ return GetCode(kind(), type, name, state);
}
-Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
- MapHandleList* receiver_maps,
- CodeHandleList* handler_stubs,
+Handle<Code> PropertyICCompiler::CompileKeyedStorePolymorphic(
+ MapHandleList* receiver_maps, CodeHandleList* handler_stubs,
MapHandleList* transitioned_maps) {
Label miss;
__ JumpIfSmi(receiver(), &miss);
@@ -1474,8 +1143,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
TailCallBuiltin(masm(), MissBuiltin(kind()));
// Return the generated code.
- return GetICCode(
- kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
+ return GetCode(kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
}
@@ -1483,21 +1151,19 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
#define __ ACCESS_MASM(masm)
-void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
+void ElementHandlerCompiler::GenerateLoadDictionaryElement(
MacroAssembler* masm) {
- // ---------- S t a t e --------------
- // -- lr : return address
- // -- r0 : key
- // -- r1 : receiver
- // -----------------------------------
+ // The return address is in lr.
Label slow, miss;
- Register key = r0;
- Register receiver = r1;
+ Register key = LoadIC::NameRegister();
+ Register receiver = LoadIC::ReceiverRegister();
+ DCHECK(receiver.is(r1));
+ DCHECK(key.is(r2));
- __ UntagAndJumpIfNotSmi(r2, key, &miss);
+ __ UntagAndJumpIfNotSmi(r6, key, &miss);
__ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
- __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
+ __ LoadFromNumberDictionary(&slow, r4, key, r0, r6, r3, r5);
__ Ret();
__ bind(&slow);
@@ -1505,21 +1171,11 @@ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
masm->isolate()->counters()->keyed_load_external_array_slow(),
1, r2, r3);
- // ---------- S t a t e --------------
- // -- lr : return address
- // -- r0 : key
- // -- r1 : receiver
- // -----------------------------------
TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
// Miss case, call the runtime.
__ bind(&miss);
- // ---------- S t a t e --------------
- // -- lr : return address
- // -- r0 : key
- // -- r1 : receiver
- // -----------------------------------
TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
}