summaryrefslogtreecommitdiff
path: root/deps/v8/src/compiler/arm64
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2017-02-14 11:27:26 +0100
committerMichaël Zasso <targos@protonmail.com>2017-02-22 15:55:42 +0100
commit7a77daf24344db7942e34c962b0f1ee729ab7af5 (patch)
treee7cbe7bf4e2f4b802a8f5bc18336c546cd6a0d7f /deps/v8/src/compiler/arm64
parent5f08871ee93ea739148cc49e0f7679e33c70295a (diff)
downloadnode-new-7a77daf24344db7942e34c962b0f1ee729ab7af5.tar.gz
deps: update V8 to 5.6.326.55
PR-URL: https://github.com/nodejs/node/pull/10992 Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Diffstat (limited to 'deps/v8/src/compiler/arm64')
-rw-r--r--deps/v8/src/compiler/arm64/code-generator-arm64.cc114
-rw-r--r--deps/v8/src/compiler/arm64/instruction-selector-arm64.cc61
-rw-r--r--deps/v8/src/compiler/arm64/unwinding-info-writer-arm64.cc5
3 files changed, 113 insertions, 67 deletions
diff --git a/deps/v8/src/compiler/arm64/code-generator-arm64.cc b/deps/v8/src/compiler/arm64/code-generator-arm64.cc
index f543b18682..8b1cb578e0 100644
--- a/deps/v8/src/compiler/arm64/code-generator-arm64.cc
+++ b/deps/v8/src/compiler/arm64/code-generator-arm64.cc
@@ -709,8 +709,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
frame_access_state()->ClearSPDelta();
break;
}
- case kArchTailCallJSFunctionFromJSFunction:
- case kArchTailCallJSFunction: {
+ case kArchTailCallJSFunctionFromJSFunction: {
Register func = i.InputRegister(0);
if (FLAG_debug_code) {
// Check the function's context matches the context argument.
@@ -720,11 +719,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ cmp(cp, temp);
__ Assert(eq, kWrongFunctionContext);
}
- if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
- AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
- i.TempRegister(0), i.TempRegister(1),
- i.TempRegister(2));
- }
+ AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
+ i.TempRegister(0), i.TempRegister(1),
+ i.TempRegister(2));
__ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
__ Jump(x10);
frame_access_state()->ClearSPDelta();
@@ -786,7 +783,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
- AssembleReturn();
+ AssembleReturn(instr->InputAt(0));
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), masm()->StackPointer());
@@ -1759,7 +1756,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
DeoptimizeReason deoptimization_reason =
GetDeoptimizationReason(deoptimization_id);
- __ RecordDeoptReason(deoptimization_reason, pos.raw(), deoptimization_id);
+ __ RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
return kSuccess;
}
@@ -1798,43 +1795,57 @@ void CodeGenerator::AssembleConstructFrame() {
__ AssertCspAligned();
}
+ int fixed_frame_size = descriptor->CalculateFixedFrameSize();
+ int shrink_slots =
+ frame()->GetTotalFrameSlotCount() - descriptor->CalculateFixedFrameSize();
+
if (frame_access_state()->has_frame()) {
+ // Link the frame
if (descriptor->IsJSFunctionCall()) {
DCHECK(!descriptor->UseNativeStack());
__ Prologue(this->info()->GeneratePreagedPrologue());
} else {
- if (descriptor->IsCFunctionCall()) {
- __ Push(lr, fp);
- __ Mov(fp, masm_.StackPointer());
- __ Claim(frame()->GetSpillSlotCount());
- } else {
- __ StubPrologue(info()->GetOutputStackFrameType(),
- frame()->GetTotalFrameSlotCount());
- }
+ __ Push(lr, fp);
+ __ Mov(fp, masm_.StackPointer());
}
-
if (!info()->GeneratePreagedPrologue()) {
unwinding_info_writer_.MarkFrameConstructed(__ pc_offset());
}
- }
- int shrink_slots = frame()->GetSpillSlotCount();
-
- if (info()->is_osr()) {
- // TurboFan OSR-compiled functions cannot be entered directly.
- __ Abort(kShouldNotDirectlyEnterOsrFunction);
-
- // Unoptimized code jumps directly to this entrypoint while the unoptimized
- // frame is still on the stack. Optimized code uses OSR values directly from
- // the unoptimized frame. Thus, all that needs to be done is to allocate the
- // remaining stack slots.
- if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
- osr_pc_offset_ = __ pc_offset();
- shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
- }
+ // Create OSR entry if applicable
+ if (info()->is_osr()) {
+ // TurboFan OSR-compiled functions cannot be entered directly.
+ __ Abort(kShouldNotDirectlyEnterOsrFunction);
+
+ // Unoptimized code jumps directly to this entrypoint while the
+ // unoptimized
+ // frame is still on the stack. Optimized code uses OSR values directly
+ // from
+ // the unoptimized frame. Thus, all that needs to be done is to allocate
+ // the
+ // remaining stack slots.
+ if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
+ osr_pc_offset_ = __ pc_offset();
+ shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
+ }
- if (descriptor->IsJSFunctionCall()) {
- __ Claim(shrink_slots);
+ // Build remainder of frame, including accounting for and filling-in
+ // frame-specific header information, e.g. claiming the extra slot that
+ // other platforms explicitly push for STUB frames and frames recording
+ // their argument count.
+ __ Claim(shrink_slots + (fixed_frame_size & 1));
+ if (descriptor->PushArgumentCount()) {
+ __ Str(kJavaScriptCallArgCountRegister,
+ MemOperand(fp, OptimizedBuiltinFrameConstants::kArgCOffset));
+ }
+ bool is_stub_frame =
+ !descriptor->IsJSFunctionCall() && !descriptor->IsCFunctionCall();
+ if (is_stub_frame) {
+ UseScratchRegisterScope temps(masm());
+ Register temp = temps.AcquireX();
+ __ Mov(temp, Smi::FromInt(info()->GetOutputStackFrameType()));
+ __ Str(temp, MemOperand(fp, TypedFrameConstants::kFrameTypeOffset));
+ }
}
// Save FP registers.
@@ -1857,8 +1868,7 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
-
-void CodeGenerator::AssembleReturn() {
+void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
// Restore registers.
@@ -1877,16 +1887,25 @@ void CodeGenerator::AssembleReturn() {
unwinding_info_writer_.MarkBlockWillExit();
+ Arm64OperandConverter g(this, nullptr);
int pop_count = static_cast<int>(descriptor->StackParameterCount());
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
- // Canonicalize JSFunction return sites for now.
- if (return_label_.is_bound()) {
- __ B(&return_label_);
- return;
+ // Canonicalize JSFunction return sites for now unless they have an variable
+ // number of stack slot pops.
+ if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
+ if (return_label_.is_bound()) {
+ __ B(&return_label_);
+ return;
+ } else {
+ __ Bind(&return_label_);
+ AssembleDeconstructFrame();
+ if (descriptor->UseNativeStack()) {
+ pop_count += (pop_count & 1); // align
+ }
+ }
} else {
- __ Bind(&return_label_);
AssembleDeconstructFrame();
if (descriptor->UseNativeStack()) {
pop_count += (pop_count & 1); // align
@@ -1895,7 +1914,16 @@ void CodeGenerator::AssembleReturn() {
} else if (descriptor->UseNativeStack()) {
pop_count += (pop_count & 1); // align
}
- __ Drop(pop_count);
+
+ if (pop->IsImmediate()) {
+ DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
+ pop_count += g.ToConstant(pop).ToInt32();
+ __ Drop(pop_count);
+ } else {
+ Register pop_reg = g.ToRegister(pop);
+ __ Add(pop_reg, pop_reg, pop_count);
+ __ Drop(pop_reg);
+ }
if (descriptor->UseNativeStack()) {
__ AssertCspAligned();
diff --git a/deps/v8/src/compiler/arm64/instruction-selector-arm64.cc b/deps/v8/src/compiler/arm64/instruction-selector-arm64.cc
index da27be8626..0eef53c6d5 100644
--- a/deps/v8/src/compiler/arm64/instruction-selector-arm64.cc
+++ b/deps/v8/src/compiler/arm64/instruction-selector-arm64.cc
@@ -2152,6 +2152,20 @@ FlagsCondition MapForCbz(FlagsCondition cond) {
}
}
+void EmitBranchOrDeoptimize(InstructionSelector* selector,
+ InstructionCode opcode, InstructionOperand value,
+ FlagsContinuation* cont) {
+ Arm64OperandGenerator g(selector);
+ if (cont->IsBranch()) {
+ selector->Emit(cont->Encode(opcode), g.NoOutput(), value,
+ g.Label(cont->true_block()), g.Label(cont->false_block()));
+ } else {
+ DCHECK(cont->IsDeoptimize());
+ selector->EmitDeoptimize(cont->Encode(opcode), g.NoOutput(), value,
+ cont->reason(), cont->frame_state());
+ }
+}
+
// Try to emit TBZ, TBNZ, CBZ or CBNZ for certain comparisons of {node}
// against zero, depending on the condition.
bool TryEmitCbzOrTbz(InstructionSelector* selector, Node* node, Node* user,
@@ -2160,12 +2174,16 @@ bool TryEmitCbzOrTbz(InstructionSelector* selector, Node* node, Node* user,
USE(m_user);
DCHECK(m_user.right().Is(0) || m_user.left().Is(0));
- // Only handle branches.
- if (!cont->IsBranch()) return false;
+ // Only handle branches and deoptimisations.
+ if (!cont->IsBranch() && !cont->IsDeoptimize()) return false;
switch (cond) {
case kSignedLessThan:
case kSignedGreaterThanOrEqual: {
+ // We don't generate TBZ/TBNZ for deoptimisations, as they have a
+ // shorter range than conditional branches and generating them for
+ // deoptimisations results in more veneers.
+ if (cont->IsDeoptimize()) return false;
Arm64OperandGenerator g(selector);
cont->Overwrite(MapForTbz(cond));
Int32Matcher m(node);
@@ -2192,9 +2210,8 @@ bool TryEmitCbzOrTbz(InstructionSelector* selector, Node* node, Node* user,
case kUnsignedGreaterThan: {
Arm64OperandGenerator g(selector);
cont->Overwrite(MapForCbz(cond));
- selector->Emit(cont->Encode(kArm64CompareAndBranch32), g.NoOutput(),
- g.UseRegister(node), g.Label(cont->true_block()),
- g.Label(cont->false_block()));
+ EmitBranchOrDeoptimize(selector, kArm64CompareAndBranch32,
+ g.UseRegister(node), cont);
return true;
}
default:
@@ -2336,21 +2353,22 @@ void VisitFloat64Compare(InstructionSelector* selector, Node* node,
void VisitWordCompareZero(InstructionSelector* selector, Node* user,
Node* value, FlagsContinuation* cont) {
Arm64OperandGenerator g(selector);
- while (selector->CanCover(user, value)) {
+ // Try to combine with comparisons against 0 by simply inverting the branch.
+ while (value->opcode() == IrOpcode::kWord32Equal &&
+ selector->CanCover(user, value)) {
+ Int32BinopMatcher m(value);
+ if (!m.right().Is(0)) break;
+
+ user = value;
+ value = m.left().node();
+ cont->Negate();
+ }
+
+ if (selector->CanCover(user, value)) {
switch (value->opcode()) {
- case IrOpcode::kWord32Equal: {
- // Combine with comparisons against 0 by simply inverting the
- // continuation.
- Int32BinopMatcher m(value);
- if (m.right().Is(0)) {
- user = value;
- value = m.left().node();
- cont->Negate();
- continue;
- }
+ case IrOpcode::kWord32Equal:
cont->OverwriteAndNegateIfEqual(kEqual);
return VisitWord32Compare(selector, value, cont);
- }
case IrOpcode::kInt32LessThan:
cont->OverwriteAndNegateIfEqual(kSignedLessThan);
return VisitWord32Compare(selector, value, cont);
@@ -2380,10 +2398,10 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
kLogical64Imm);
}
// Merge the Word64Equal(x, 0) comparison into a cbz instruction.
- if (cont->IsBranch()) {
- selector->Emit(cont->Encode(kArm64CompareAndBranch), g.NoOutput(),
- g.UseRegister(left), g.Label(cont->true_block()),
- g.Label(cont->false_block()));
+ if (cont->IsBranch() || cont->IsDeoptimize()) {
+ EmitBranchOrDeoptimize(selector,
+ cont->Encode(kArm64CompareAndBranch),
+ g.UseRegister(left), cont);
return;
}
}
@@ -2488,7 +2506,6 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
default:
break;
}
- break;
}
// Branch could not be combined with a compare, compare against 0 and branch.
diff --git a/deps/v8/src/compiler/arm64/unwinding-info-writer-arm64.cc b/deps/v8/src/compiler/arm64/unwinding-info-writer-arm64.cc
index f4b732bf77..3095423854 100644
--- a/deps/v8/src/compiler/arm64/unwinding-info-writer-arm64.cc
+++ b/deps/v8/src/compiler/arm64/unwinding-info-writer-arm64.cc
@@ -15,7 +15,8 @@ void UnwindingInfoWriter::BeginInstructionBlock(int pc_offset,
block_will_exit_ = false;
- DCHECK_LT(block->rpo_number().ToInt(), block_initial_states_.size());
+ DCHECK_LT(block->rpo_number().ToInt(),
+ static_cast<int>(block_initial_states_.size()));
const BlockInitialState* initial_state =
block_initial_states_[block->rpo_number().ToInt()];
if (initial_state) {
@@ -42,7 +43,7 @@ void UnwindingInfoWriter::EndInstructionBlock(const InstructionBlock* block) {
for (const RpoNumber& successor : block->successors()) {
int successor_index = successor.ToInt();
- DCHECK_LT(successor_index, block_initial_states_.size());
+ DCHECK_LT(successor_index, static_cast<int>(block_initial_states_.size()));
const BlockInitialState* existing_state =
block_initial_states_[successor_index];