summaryrefslogtreecommitdiff
path: root/deps/v8/src/mips/full-codegen-mips.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/mips/full-codegen-mips.cc')
-rw-r--r--deps/v8/src/mips/full-codegen-mips.cc169
1 files changed, 86 insertions, 83 deletions
diff --git a/deps/v8/src/mips/full-codegen-mips.cc b/deps/v8/src/mips/full-codegen-mips.cc
index 41bc68e6d0..18ee02dc5c 100644
--- a/deps/v8/src/mips/full-codegen-mips.cc
+++ b/deps/v8/src/mips/full-codegen-mips.cc
@@ -138,9 +138,6 @@ void FullCodeGenerator::Generate() {
CompilationInfo* info = info_;
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
-
- InitializeFeedbackVector();
-
profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(function());
@@ -682,7 +679,7 @@ void FullCodeGenerator::DoTest(Expression* condition,
Label* fall_through) {
__ mov(a0, result_register());
Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
- CallIC(ic, condition->test_id());
+ CallIC(ic, NOT_CONTEXTUAL, condition->test_id());
__ mov(at, zero_reg);
Split(ne, v0, Operand(at), if_true, if_false, fall_through);
}
@@ -1047,7 +1044,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
- CallIC(ic, clause->CompareId());
+ CallIC(ic, NOT_CONTEXTUAL, clause->CompareId());
patch_site.EmitPatchInfo();
Label skip;
@@ -1090,7 +1087,6 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Comment cmnt(masm_, "[ ForInStatement");
- int slot = stmt->ForInFeedbackSlot();
SetStatementPosition(stmt);
Label loop, exit;
@@ -1176,13 +1172,13 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy;
__ bind(&fixed_array);
- Handle<Object> feedback = Handle<Object>(
- Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
- isolate());
- StoreFeedbackVectorSlot(slot, feedback);
- __ li(a1, FeedbackVector());
- __ li(a2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
- __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot)));
+ Handle<Cell> cell = isolate()->factory()->NewCell(
+ Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
+ isolate()));
+ RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
+ __ li(a1, cell);
+ __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
+ __ sw(a2, FieldMemOperand(a1, Cell::kValueOffset));
__ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
__ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
@@ -1490,7 +1486,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
// variables.
switch (var->location()) {
case Variable::UNALLOCATED: {
- Comment cmnt(masm_, "[ Global variable");
+ Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in a2 and the global
// object (receiver) in a0.
__ lw(a0, GlobalObjectOperand());
@@ -1503,8 +1499,9 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
case Variable::PARAMETER:
case Variable::LOCAL:
case Variable::CONTEXT: {
- Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
- : "[ Stack variable");
+ Comment cmnt(masm_, var->IsContextSlot()
+ ? "Context variable"
+ : "Stack variable");
if (var->binding_needs_init()) {
// var->scope() may be NULL when the proxy is located in eval code and
// refers to a potential outside binding. Currently those bindings are
@@ -1569,12 +1566,12 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
}
case Variable::LOOKUP: {
- Comment cmnt(masm_, "[ Lookup variable");
Label done, slow;
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
__ bind(&slow);
+ Comment cmnt(masm_, "Lookup variable");
__ li(a1, Operand(var->name()));
__ Push(cp, a1); // Context and name.
__ CallRuntime(Runtime::kLoadContextSlot, 2);
@@ -1706,7 +1703,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(a0, result_register());
__ li(a2, Operand(key->value()));
__ lw(a1, MemOperand(sp));
- CallStoreIC(key->LiteralFeedbackId());
+ CallStoreIC(NOT_CONTEXTUAL, key->LiteralFeedbackId());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -2114,7 +2111,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
__ lw(a1, MemOperand(sp, kPointerSize));
__ lw(a0, MemOperand(sp, 2 * kPointerSize));
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- CallIC(ic, TypeFeedbackId::None());
+ CallIC(ic, NOT_CONTEXTUAL, TypeFeedbackId::None());
__ mov(a0, v0);
__ mov(a1, a0);
__ sw(a1, MemOperand(sp, 2 * kPointerSize));
@@ -2312,7 +2309,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
__ mov(a0, result_register());
// Call keyed load IC. It has arguments key and receiver in a0 and a1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- CallIC(ic, prop->PropertyFeedbackId());
+ CallIC(ic, NOT_CONTEXTUAL, prop->PropertyFeedbackId());
}
@@ -2340,7 +2337,8 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
BinaryOpICStub stub(op, mode);
- CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
+ CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done);
@@ -2419,7 +2417,8 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(a1);
BinaryOpICStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
+ CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
+ expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
context()->Plug(v0);
}
@@ -2457,7 +2456,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
__ mov(a1, result_register());
__ pop(a0); // Restore value.
__ li(a2, Operand(prop->key()->AsLiteral()->value()));
- CallStoreIC();
+ CallStoreIC(NOT_CONTEXTUAL);
break;
}
case KEYED_PROPERTY: {
@@ -2477,28 +2476,6 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
}
-void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
- Variable* var, MemOperand location) {
- __ sw(result_register(), location);
- if (var->IsContextSlot()) {
- // RecordWrite may destroy all its register arguments.
- __ Move(a3, result_register());
- int offset = Context::SlotOffset(var->index());
- __ RecordWriteContextSlot(
- a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
- }
-}
-
-
-void FullCodeGenerator::EmitCallStoreContextSlot(
- Handle<String> name, LanguageMode mode) {
- __ li(a1, Operand(name));
- __ li(a0, Operand(Smi::FromInt(mode)));
- __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
- __ CallRuntime(Runtime::kStoreContextSlot, 4);
-}
-
-
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
if (var->IsUnallocated()) {
@@ -2506,30 +2483,36 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ mov(a0, result_register());
__ li(a2, Operand(var->name()));
__ lw(a1, GlobalObjectOperand());
- CallStoreIC();
-
+ CallStoreIC(CONTEXTUAL);
} else if (op == Token::INIT_CONST) {
// Const initializers need a write barrier.
ASSERT(!var->IsParameter()); // No const parameters.
- if (var->IsLookupSlot()) {
+ if (var->IsStackLocal()) {
+ Label skip;
+ __ lw(a1, StackOperand(var));
+ __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
+ __ Branch(&skip, ne, a1, Operand(t0));
+ __ sw(result_register(), StackOperand(var));
+ __ bind(&skip);
+ } else {
+ ASSERT(var->IsContextSlot() || var->IsLookupSlot());
+ // Like var declarations, const declarations are hoisted to function
+ // scope. However, unlike var initializers, const initializers are
+ // able to drill a hole to that function context, even from inside a
+ // 'with' context. We thus bypass the normal static scope lookup for
+ // var->IsContextSlot().
__ li(a0, Operand(var->name()));
__ Push(v0, cp, a0); // Context and name.
__ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
- } else {
- ASSERT(var->IsStackAllocated() || var->IsContextSlot());
- Label skip;
- MemOperand location = VarOperand(var, a1);
- __ lw(a2, location);
- __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
- __ Branch(&skip, ne, a2, Operand(at));
- EmitStoreToStackLocalOrContextSlot(var, location);
- __ bind(&skip);
}
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.
if (var->IsLookupSlot()) {
- EmitCallStoreContextSlot(var->name(), language_mode());
+ __ li(a1, Operand(var->name()));
+ __ li(a0, Operand(Smi::FromInt(language_mode())));
+ __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
} else {
ASSERT(var->IsStackAllocated() || var->IsContextSlot());
Label assign;
@@ -2542,16 +2525,20 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ CallRuntime(Runtime::kThrowReferenceError, 1);
// Perform the assignment.
__ bind(&assign);
- EmitStoreToStackLocalOrContextSlot(var, location);
+ __ sw(result_register(), location);
+ if (var->IsContextSlot()) {
+ // RecordWrite may destroy all its register arguments.
+ __ mov(a3, result_register());
+ int offset = Context::SlotOffset(var->index());
+ __ RecordWriteContextSlot(
+ a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
+ }
}
} else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
// Assignment to var or initializing assignment to let/const
// in harmony mode.
- if (var->IsLookupSlot()) {
- EmitCallStoreContextSlot(var->name(), language_mode());
- } else {
- ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
+ if (var->IsStackAllocated() || var->IsContextSlot()) {
MemOperand location = VarOperand(var, a1);
if (generate_debug_code_ && op == Token::INIT_LET) {
// Check for an uninitialized let binding.
@@ -2559,10 +2546,23 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
__ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
}
- EmitStoreToStackLocalOrContextSlot(var, location);
+ // Perform the assignment.
+ __ sw(v0, location);
+ if (var->IsContextSlot()) {
+ __ mov(a3, v0);
+ int offset = Context::SlotOffset(var->index());
+ __ RecordWriteContextSlot(
+ a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
+ }
+ } else {
+ ASSERT(var->IsLookupSlot());
+ __ li(a1, Operand(var->name()));
+ __ li(a0, Operand(Smi::FromInt(language_mode())));
+ __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
}
}
- // Non-initializing assignments to consts are ignored.
+ // Non-initializing assignments to consts are ignored.
}
@@ -2578,7 +2578,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
__ li(a2, Operand(prop->key()->AsLiteral()->value()));
__ pop(a1);
- CallStoreIC(expr->AssignmentFeedbackId());
+ CallStoreIC(NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(v0);
@@ -2601,7 +2601,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, expr->AssignmentFeedbackId());
+ CallIC(ic, NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(v0);
@@ -2628,8 +2628,10 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
void FullCodeGenerator::CallIC(Handle<Code> code,
+ ContextualMode mode,
TypeFeedbackId id) {
ic_total_count_++;
+ ASSERT(mode != CONTEXTUAL || id.IsNone());
__ Call(code, RelocInfo::CODE_TARGET, id);
}
@@ -2739,15 +2741,15 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
SetSourcePosition(expr->position());
Handle<Object> uninitialized =
- TypeFeedbackInfo::UninitializedSentinel(isolate());
- StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
- __ li(a2, FeedbackVector());
- __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
+ __ li(a2, Operand(cell));
// Record call targets in unoptimized code.
CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
- __ CallStub(&stub);
+ __ CallStub(&stub, expr->CallFeedbackId());
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2926,10 +2928,10 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
// Record call targets in unoptimized code.
Handle<Object> uninitialized =
- TypeFeedbackInfo::UninitializedSentinel(isolate());
- StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
- __ li(a2, FeedbackVector());
- __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
+ RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
+ __ li(a2, Operand(cell));
CallConstructStub stub(RECORD_CALL_TARGET);
__ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
@@ -4469,7 +4471,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
SetSourcePosition(expr->position());
BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
- CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
+ CallIC(stub.GetCode(isolate()),
+ NOT_CONTEXTUAL,
+ expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -4499,7 +4503,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ mov(a0, result_register()); // Value.
__ li(a2, Operand(prop->key()->AsLiteral()->value())); // Name.
__ pop(a1); // Receiver.
- CallStoreIC(expr->CountStoreFeedbackId());
+ CallStoreIC(NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4516,7 +4520,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- CallIC(ic, expr->CountStoreFeedbackId());
+ CallIC(ic, NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4536,7 +4540,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
ASSERT(!context()->IsTest());
VariableProxy* proxy = expr->AsVariableProxy();
if (proxy != NULL && proxy->var()->IsUnallocated()) {
- Comment cmnt(masm_, "[ Global variable");
+ Comment cmnt(masm_, "Global variable");
__ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(proxy->name()));
// Use a regular load, not a contextual load, to avoid a reference
@@ -4545,7 +4549,6 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
PrepareForBailout(expr, TOS_REG);
context()->Plug(v0);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
- Comment cmnt(masm_, "[ Lookup slot");
Label done, slow;
// Generate code for loading from variables potentially shadowed
@@ -4702,7 +4705,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
- CallIC(ic, expr->CompareOperationFeedbackId());
+ CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
@@ -4736,7 +4739,7 @@ void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
} else {
Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
- CallIC(ic, expr->CompareOperationFeedbackId());
+ CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
}
context()->Plug(if_true, if_false);