summaryrefslogtreecommitdiff
path: root/deps/v8/src/mips/full-codegen-mips.cc
diff options
context:
space:
mode:
authorisaacs <i@izs.me>2012-03-28 19:51:38 -0700
committerisaacs <i@izs.me>2012-03-28 19:51:38 -0700
commit4b64542fe09477fc5c70e974eb1a78cdce755eb7 (patch)
treeb4d4cdfd5b07efbdae51098b422fde7844ff4715 /deps/v8/src/mips/full-codegen-mips.cc
parent8a15147bc53849417f8737dd873877d497867c9f (diff)
downloadnode-new-4b64542fe09477fc5c70e974eb1a78cdce755eb7.tar.gz
Upgrade V8 to 3.9.24.6
Diffstat (limited to 'deps/v8/src/mips/full-codegen-mips.cc')
-rw-r--r--deps/v8/src/mips/full-codegen-mips.cc264
1 files changed, 193 insertions, 71 deletions
diff --git a/deps/v8/src/mips/full-codegen-mips.cc b/deps/v8/src/mips/full-codegen-mips.cc
index c5ef2ccbf7..657bee8657 100644
--- a/deps/v8/src/mips/full-codegen-mips.cc
+++ b/deps/v8/src/mips/full-codegen-mips.cc
@@ -42,6 +42,7 @@
#include "compiler.h"
#include "debug.h"
#include "full-codegen.h"
+#include "isolate-inl.h"
#include "parser.h"
#include "scopes.h"
#include "stub-cache.h"
@@ -119,8 +120,10 @@ class JumpPatchSite BASE_EMBEDDED {
};
+// TODO(jkummerow): Obsolete as soon as x64 is updated. Remove.
int FullCodeGenerator::self_optimization_header_size() {
- return 11 * Instruction::kInstrSize;
+ UNREACHABLE();
+ return 10 * Instruction::kInstrSize;
}
@@ -142,32 +145,11 @@ void FullCodeGenerator::Generate() {
CompilationInfo* info = info_;
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
+ profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
- // We can optionally optimize based on counters rather than statistical
- // sampling.
- if (info->ShouldSelfOptimize()) {
- if (FLAG_trace_opt_verbose) {
- PrintF("[adding self-optimization header to %s]\n",
- *info->function()->debug_name()->ToCString());
- }
- has_self_optimization_header_ = true;
- MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
- Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
- JSGlobalPropertyCell* cell;
- if (maybe_cell->To(&cell)) {
- __ li(a2, Handle<JSGlobalPropertyCell>(cell));
- __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
- __ Subu(a3, a3, Operand(Smi::FromInt(1)));
- __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
- Handle<Code> compile_stub(
- isolate()->builtins()->builtin(Builtins::kLazyRecompile));
- __ Jump(compile_stub, RelocInfo::CODE_TARGET, eq, a3, Operand(zero_reg));
- ASSERT_EQ(masm_->pc_offset(), self_optimization_header_size());
- }
- }
-
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -341,6 +323,34 @@ void FullCodeGenerator::ClearAccumulator() {
}
+void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
+ __ li(a2, Operand(profiling_counter_));
+ __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+ __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
+ __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+}
+
+
+void FullCodeGenerator::EmitProfilingCounterReset() {
+ int reset_value = FLAG_interrupt_budget;
+ if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
+ // Self-optimization is a one-off thing: if it fails, don't try again.
+ reset_value = Smi::kMaxValue;
+ }
+ if (isolate()->IsDebuggerActive()) {
+ // Detect debug break requests as soon as possible.
+ reset_value = 10;
+ }
+ __ li(a2, Operand(profiling_counter_));
+ __ li(a3, Operand(Smi::FromInt(reset_value)));
+ __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+}
+
+
+static const int kMaxBackEdgeWeight = 127;
+static const int kBackEdgeDistanceDivisor = 142;
+
+
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Label* back_edge_target) {
// The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
@@ -351,16 +361,35 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
Comment cmnt(masm_, "[ Stack check");
Label ok;
- __ LoadRoot(t0, Heap::kStackLimitRootIndex);
- __ sltu(at, sp, t0);
- __ beq(at, zero_reg, &ok);
- // CallStub will emit a li t9, ... first, so it is safe to use the delay slot.
- StackCheckStub stub;
- __ CallStub(&stub);
+ if (FLAG_count_based_interrupts) {
+ int weight = 1;
+ if (FLAG_weighted_back_edges) {
+ ASSERT(back_edge_target->is_bound());
+ int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ __ slt(at, a3, zero_reg);
+ __ beq(at, zero_reg, &ok);
+ // CallStub will emit a li t9 first, so it is safe to use the delay slot.
+ InterruptStub stub;
+ __ CallStub(&stub);
+ } else {
+ __ LoadRoot(t0, Heap::kStackLimitRootIndex);
+ __ sltu(at, sp, t0);
+ __ beq(at, zero_reg, &ok);
+ // CallStub will emit a li t9 first, so it is safe to use the delay slot.
+ StackCheckStub stub;
+ __ CallStub(&stub);
+ }
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
RecordStackCheck(stmt->OsrEntryId());
+ if (FLAG_count_based_interrupts) {
+ EmitProfilingCounterReset();
+ }
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
@@ -383,6 +412,32 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(v0);
__ CallRuntime(Runtime::kTraceExit, 1);
}
+ if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
+ // Pretend that the exit is a backwards jump to the entry.
+ int weight = 1;
+ if (info_->ShouldSelfOptimize()) {
+ weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+ } else if (FLAG_weighted_back_edges) {
+ int distance = masm_->pc_offset();
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ Label ok;
+ __ Branch(&ok, ge, a3, Operand(zero_reg));
+ __ push(v0);
+ if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
+ __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(a2);
+ __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
+ } else {
+ InterruptStub stub;
+ __ CallStub(&stub);
+ }
+ __ pop(v0);
+ EmitProfilingCounterReset();
+ __ bind(&ok);
+ }
#ifdef DEBUG
// Add a label for checking the size of the code used for returning.
@@ -902,7 +957,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- __ Call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
+ CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
patch_site.EmitPatchInfo();
__ Branch(&next_test, ne, v0, Operand(zero_reg));
@@ -1195,7 +1250,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, mode);
+ CallIC(ic, mode);
}
@@ -1251,7 +1306,7 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
__ subu(at, v0, at); // Sub as compare: at == 0 on eq.
if (local->mode() == CONST) {
__ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
- __ movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
+ __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
} else { // LET || CONST_HARMONY
__ Branch(done, ne, at, Operand(zero_reg));
__ li(a0, Operand(var->name()));
@@ -1279,7 +1334,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
__ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(var->name()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(v0);
break;
}
@@ -1343,7 +1398,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
// Uninitalized const bindings outside of harmony mode are unholed.
ASSERT(var->mode() == CONST);
__ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
- __ movz(v0, a0, at); // Conditional move: Undefined if TheHole.
+ __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
}
context()->Plug(v0);
break;
@@ -1421,6 +1476,16 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
}
+void FullCodeGenerator::EmitAccessor(Expression* expression) {
+ if (expression == NULL) {
+ __ LoadRoot(a1, Heap::kNullValueRootIndex);
+ __ push(a1);
+ } else {
+ VisitForStackValue(expression);
+ }
+}
+
+
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
Handle<FixedArray> constant_properties = expr->constant_properties();
@@ -1456,6 +1521,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// marked expressions, no store code is emitted.
expr->CalculateEmitStore();
+ AccessorTable accessor_table(isolate()->zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
@@ -1482,7 +1548,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- __ Call(ic, RelocInfo::CODE_TARGET, key->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, key->id());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1505,27 +1571,29 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
}
break;
case ObjectLiteral::Property::GETTER:
+ accessor_table.lookup(key)->second->getter = value;
+ break;
case ObjectLiteral::Property::SETTER:
- // Duplicate receiver on stack.
- __ lw(a0, MemOperand(sp));
- __ push(a0);
- VisitForStackValue(key);
- if (property->kind() == ObjectLiteral::Property::GETTER) {
- VisitForStackValue(value);
- __ LoadRoot(a1, Heap::kNullValueRootIndex);
- __ push(a1);
- } else {
- __ LoadRoot(a1, Heap::kNullValueRootIndex);
- __ push(a1);
- VisitForStackValue(value);
- }
- __ li(a0, Operand(Smi::FromInt(NONE)));
- __ push(a0);
- __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
+ accessor_table.lookup(key)->second->setter = value;
break;
}
}
+ // Emit code to define accessors, using only a single call to the runtime for
+ // each pair of corresponding getters and setters.
+ for (AccessorTable::Iterator it = accessor_table.begin();
+ it != accessor_table.end();
+ ++it) {
+ __ lw(a0, MemOperand(sp)); // Duplicate receiver.
+ __ push(a0);
+ VisitForStackValue(it->first);
+ EmitAccessor(it->second->getter);
+ EmitAccessor(it->second->setter);
+ __ li(a0, Operand(Smi::FromInt(NONE)));
+ __ push(a0);
+ __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
+ }
+
if (expr->has_function()) {
ASSERT(result_saved);
__ lw(a0, MemOperand(sp));
@@ -1753,7 +1821,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
__ li(a2, Operand(key->handle()));
// Call load IC. It has arguments receiver and property name a0 and a2.
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
@@ -1762,7 +1830,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
__ mov(a0, result_register());
// Call keyed load IC. It has arguments key and receiver in a0 and a1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
@@ -1790,7 +1858,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
BinaryOpStub stub(op, mode);
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
__ jmp(&done);
@@ -1873,7 +1941,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(a1);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
context()->Plug(v0);
}
@@ -1914,7 +1982,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- __ Call(ic);
+ CallIC(ic);
break;
}
case KEYED_PROPERTY: {
@@ -1927,7 +1995,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- __ Call(ic);
+ CallIC(ic);
break;
}
}
@@ -1945,7 +2013,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Const initializers need a write barrier.
@@ -2064,7 +2132,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2116,7 +2184,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2151,6 +2219,14 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
}
+void FullCodeGenerator::CallIC(Handle<Code> code,
+ RelocInfo::Mode rmode,
+ unsigned ast_id) {
+ ic_total_count_++;
+ __ Call(code, rmode, ast_id);
+}
+
+
void FullCodeGenerator::EmitCallWithIC(Call* expr,
Handle<Object> name,
RelocInfo::Mode mode) {
@@ -2168,7 +2244,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
// Call the IC initialization code.
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ Call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2201,7 +2277,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2600,7 +2676,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
Label entry, loop;
// The use of t2 to store the valueOf symbol asumes that it is not otherwise
// used in the loop below.
- __ li(t2, Operand(FACTORY->value_of_symbol()));
+ __ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
__ jmp(&entry);
__ bind(&loop);
__ lw(a3, MemOperand(t0, 0));
@@ -2970,6 +3046,52 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 2);
+ ASSERT_NE(NULL, args->at(1)->AsLiteral());
+ Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
+
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
+
+ Label runtime, done;
+ Register object = v0;
+ Register result = v0;
+ Register scratch0 = t5;
+ Register scratch1 = a1;
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ GetObjectType(object, scratch1, scratch1);
+ __ Assert(eq, "Trying to get date field from non-date.",
+ scratch1, Operand(JS_DATE_TYPE));
+#endif
+
+ if (index->value() == 0) {
+ __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ li(scratch1, Operand(stamp));
+ __ lw(scratch1, MemOperand(scratch1));
+ __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
+ __ Branch(&runtime, ne, scratch1, Operand(scratch0));
+ __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch1);
+ __ li(a1, Operand(index));
+ __ Move(a0, object);
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+
+ context()->Plug(v0);
+}
+
+
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
// Load the arguments on the stack and call the runtime function.
ZoneList<Expression*>* args = expr->arguments();
@@ -3769,7 +3891,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ Call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
} else {
@@ -3925,7 +4047,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
__ mov(a0, result_register());
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
context()->Plug(v0);
}
@@ -4036,7 +4158,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
SetSourcePosition(expr->position());
BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -4069,7 +4191,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4087,7 +4209,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4113,7 +4235,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- __ Call(ic);
+ CallIC(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(v0);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
@@ -4291,7 +4413,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);