diff options
author | Michaël Zasso <targos@protonmail.com> | 2017-05-02 10:50:00 +0200 |
---|---|---|
committer | Michaël Zasso <targos@protonmail.com> | 2017-05-06 20:02:35 +0200 |
commit | 60d1aac8d225e844e68ae48e8f3d58802e635fbe (patch) | |
tree | 922f347dd054db18d88666fad7181e5a777f4022 /deps/v8/src/crankshaft | |
parent | 73d9c0f903ae371cd5011af64c3a6f69a1bda978 (diff) | |
download | node-new-60d1aac8d225e844e68ae48e8f3d58802e635fbe.tar.gz |
deps: update V8 to 5.8.283.38
PR-URL: https://github.com/nodejs/node/pull/12784
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Reviewed-By: Gibson Fahnestock <gibfahn@gmail.com>
Diffstat (limited to 'deps/v8/src/crankshaft')
40 files changed, 421 insertions, 393 deletions
diff --git a/deps/v8/src/crankshaft/arm/lithium-codegen-arm.cc b/deps/v8/src/crankshaft/arm/lithium-codegen-arm.cc index 2fb746dcbd..4783808ba7 100644 --- a/deps/v8/src/crankshaft/arm/lithium-codegen-arm.cc +++ b/deps/v8/src/crankshaft/arm/lithium-codegen-arm.cc @@ -270,7 +270,7 @@ bool LCodeGen::GenerateDeferredCode() { DCHECK(!frame_is_built_); DCHECK(info()->IsStub()); frame_is_built_ = true; - __ Move(scratch0(), Smi::FromInt(StackFrame::STUB)); + __ mov(scratch0(), Operand(StackFrame::TypeToMarker(StackFrame::STUB))); __ PushCommonFrame(scratch0()); Comment(";;; Deferred code"); } @@ -344,7 +344,7 @@ bool LCodeGen::GenerateJumpTable() { // This variant of deopt can only be used with stubs. Since we don't // have a function pointer to install in the stack frame that we're // building, install a special marker there instead. - __ mov(ip, Operand(Smi::FromInt(StackFrame::STUB))); + __ mov(ip, Operand(StackFrame::TypeToMarker(StackFrame::STUB))); __ push(ip); DCHECK(info()->IsStub()); } @@ -688,7 +688,7 @@ void LCodeGen::CallCodeGeneric(Handle<Code> code, // Block literal pool emission to ensure nop indicating no inlined smi code // is in the correct position. Assembler::BlockConstPoolScope block_const_pool(masm()); - __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode); + __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode, false); RecordSafepointWithLazyDeopt(instr, safepoint_mode); // Signal that we don't inline smi code before these stubs in the @@ -2130,12 +2130,6 @@ void LCodeGen::DoBranch(LBranch* instr) { __ b(eq, instr->TrueLabel(chunk_)); } - if (expected & ToBooleanHint::kSimdValue) { - // SIMD value -> true. - __ CompareInstanceType(map, ip, SIMD128_VALUE_TYPE); - __ b(eq, instr->TrueLabel(chunk_)); - } - if (expected & ToBooleanHint::kHeapNumber) { // heap number -> false iff +0, -0, or NaN. DwVfpRegister dbl_scratch = double_scratch0(); @@ -2941,7 +2935,8 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ ldr(result, MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset)); - __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + __ cmp(result, + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); // Result is the frame pointer for the frame if not adapted and for the real // frame below the adaptor frame if adapted. @@ -3505,7 +3500,8 @@ void LCodeGen::PrepareForTailCall(const ParameterCount& actual, __ ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ ldr(scratch3, MemOperand(scratch2, StandardFrameConstants::kContextOffset)); - __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + __ cmp(scratch3, + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); __ b(ne, &no_arguments_adaptor); // Drop current frame and load arguments count from arguments adaptor frame. @@ -4763,6 +4759,13 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) { void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { + Label deopt, done; + // If the map is not deprecated the migration attempt does not make sense. + __ ldr(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); + __ ldr(scratch0(), FieldMemOperand(scratch0(), Map::kBitField3Offset)); + __ tst(scratch0(), Operand(Map::Deprecated::kMask)); + __ b(eq, &deopt); + { PushSafepointRegistersScope scope(this); __ push(object); @@ -4773,7 +4776,12 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { __ StoreToSafepointRegisterSlot(r0, scratch0()); } __ tst(scratch0(), Operand(kSmiTagMask)); - DeoptimizeIf(eq, instr, DeoptimizeReason::kInstanceMigrationFailed); + __ b(ne, &done); + + __ bind(&deopt); + DeoptimizeIf(al, instr, DeoptimizeReason::kInstanceMigrationFailed); + + __ bind(&done); } @@ -5124,17 +5132,6 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); final_branch_condition = eq; -// clang-format off -#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ - } else if (String::Equals(type_name, factory->type##_string())) { \ - __ JumpIfSmi(input, false_label); \ - __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); \ - __ CompareRoot(scratch, Heap::k##Type##MapRootIndex); \ - final_branch_condition = eq; - SIMD128_TYPES(SIMD128_TYPE) -#undef SIMD128_TYPE - // clang-format on - } else { __ b(false_label); } @@ -5231,6 +5228,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { __ cmp(sp, Operand(ip)); __ b(hs, &done); Handle<Code> stack_check = isolate()->builtins()->StackCheck(); + masm()->MaybeCheckConstPool(); PredictableCodeSizeScope predictable(masm()); predictable.ExpectSize(CallCodeSize(stack_check, RelocInfo::CODE_TARGET)); DCHECK(instr->context()->IsRegister()); diff --git a/deps/v8/src/crankshaft/arm64/lithium-codegen-arm64.cc b/deps/v8/src/crankshaft/arm64/lithium-codegen-arm64.cc index 141ac3f610..8152924420 100644 --- a/deps/v8/src/crankshaft/arm64/lithium-codegen-arm64.cc +++ b/deps/v8/src/crankshaft/arm64/lithium-codegen-arm64.cc @@ -724,7 +724,7 @@ bool LCodeGen::GenerateDeferredCode() { DCHECK(info()->IsStub()); frame_is_built_ = true; __ Push(lr, fp); - __ Mov(fp, Smi::FromInt(StackFrame::STUB)); + __ Mov(fp, StackFrame::TypeToMarker(StackFrame::STUB)); __ Push(fp); __ Add(fp, __ StackPointer(), TypedFrameConstants::kFixedFrameSizeFromFp); @@ -803,7 +803,7 @@ bool LCodeGen::GenerateJumpTable() { UseScratchRegisterScope temps(masm()); Register stub_marker = temps.AcquireX(); __ Bind(&needs_frame); - __ Mov(stub_marker, Smi::FromInt(StackFrame::STUB)); + __ Mov(stub_marker, StackFrame::TypeToMarker(StackFrame::STUB)); __ Push(cp, stub_marker); __ Add(fp, __ StackPointer(), 2 * kPointerSize); } @@ -1618,7 +1618,7 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ Ldr(result, MemOperand(previous_fp, CommonFrameConstants::kContextOrFrameTypeOffset)); - __ Cmp(result, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); + __ Cmp(result, StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)); __ Csel(result, fp, previous_fp, ne); } else { __ Mov(result, fp); @@ -1865,12 +1865,6 @@ void LCodeGen::DoBranch(LBranch* instr) { __ B(eq, true_label); } - if (expected & ToBooleanHint::kSimdValue) { - // SIMD value -> true. - __ CompareInstanceType(map, scratch, SIMD128_VALUE_TYPE); - __ B(eq, true_label); - } - if (expected & ToBooleanHint::kHeapNumber) { Label not_heap_number; __ JumpIfNotRoot(map, Heap::kHeapNumberMapRootIndex, ¬_heap_number); @@ -2024,6 +2018,13 @@ void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { Register temp = ToRegister(instr->temp()); + Label deopt, done; + // If the map is not deprecated the migration attempt does not make sense. + __ Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset)); + __ Ldr(temp, FieldMemOperand(temp, Map::kBitField3Offset)); + __ Tst(temp, Operand(Map::Deprecated::kMask)); + __ B(eq, &deopt); + { PushSafepointRegistersScope scope(this); __ Push(object); @@ -2033,7 +2034,13 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); __ StoreToSafepointRegisterSlot(x0, temp); } - DeoptimizeIfSmi(temp, instr, DeoptimizeReason::kInstanceMigrationFailed); + __ Tst(temp, Operand(kSmiTagMask)); + __ B(ne, &done); + + __ bind(&deopt); + Deoptimize(instr, DeoptimizeReason::kInstanceMigrationFailed); + + __ bind(&done); } @@ -2833,7 +2840,8 @@ void LCodeGen::PrepareForTailCall(const ParameterCount& actual, __ Ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ Ldr(scratch3, MemOperand(scratch2, StandardFrameConstants::kContextOffset)); - __ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + __ Cmp(scratch3, + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); __ B(ne, &no_arguments_adaptor); // Drop current frame and load arguments count from arguments adaptor frame. @@ -5439,20 +5447,6 @@ void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { EmitTestAndBranch(instr, eq, scratch, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)); -// clang-format off -#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ - } else if (String::Equals(type_name, factory->type##_string())) { \ - DCHECK((instr->temp1() != NULL) && (instr->temp2() != NULL)); \ - Register map = ToRegister(instr->temp1()); \ - \ - __ JumpIfSmi(value, false_label); \ - __ Ldr(map, FieldMemOperand(value, HeapObject::kMapOffset)); \ - __ CompareRoot(map, Heap::k##Type##MapRootIndex); \ - EmitBranch(instr, eq); - SIMD128_TYPES(SIMD128_TYPE) -#undef SIMD128_TYPE - // clang-format on - } else { __ B(false_label); } diff --git a/deps/v8/src/crankshaft/compilation-phase.cc b/deps/v8/src/crankshaft/compilation-phase.cc index 4be0b1a488..11300701b0 100644 --- a/deps/v8/src/crankshaft/compilation-phase.cc +++ b/deps/v8/src/crankshaft/compilation-phase.cc @@ -6,6 +6,7 @@ #include "src/crankshaft/hydrogen.h" #include "src/isolate.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-bce.cc b/deps/v8/src/crankshaft/hydrogen-bce.cc index 7910c5bdae..333fafbf13 100644 --- a/deps/v8/src/crankshaft/hydrogen-bce.cc +++ b/deps/v8/src/crankshaft/hydrogen-bce.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-bce.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-canonicalize.cc b/deps/v8/src/crankshaft/hydrogen-canonicalize.cc index 4a07357d58..20e771763f 100644 --- a/deps/v8/src/crankshaft/hydrogen-canonicalize.cc +++ b/deps/v8/src/crankshaft/hydrogen-canonicalize.cc @@ -4,7 +4,9 @@ #include "src/crankshaft/hydrogen-canonicalize.h" +#include "src/counters.h" #include "src/crankshaft/hydrogen-redundant-phi.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-check-elimination.cc b/deps/v8/src/crankshaft/hydrogen-check-elimination.cc index 548e4cd8bd..951628e3bb 100644 --- a/deps/v8/src/crankshaft/hydrogen-check-elimination.cc +++ b/deps/v8/src/crankshaft/hydrogen-check-elimination.cc @@ -6,6 +6,7 @@ #include "src/crankshaft/hydrogen-alias-analysis.h" #include "src/crankshaft/hydrogen-flow-engine.h" +#include "src/objects-inl.h" #define GLOBAL 1 diff --git a/deps/v8/src/crankshaft/hydrogen-dce.cc b/deps/v8/src/crankshaft/hydrogen-dce.cc index 3cb9cf4a07..60b41cda76 100644 --- a/deps/v8/src/crankshaft/hydrogen-dce.cc +++ b/deps/v8/src/crankshaft/hydrogen-dce.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-dce.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-dehoist.cc b/deps/v8/src/crankshaft/hydrogen-dehoist.cc index 34de94afc5..0fccecc4d3 100644 --- a/deps/v8/src/crankshaft/hydrogen-dehoist.cc +++ b/deps/v8/src/crankshaft/hydrogen-dehoist.cc @@ -5,6 +5,7 @@ #include "src/crankshaft/hydrogen-dehoist.h" #include "src/base/safe_math.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-environment-liveness.cc b/deps/v8/src/crankshaft/hydrogen-environment-liveness.cc index 7965a9432a..e1eb11692f 100644 --- a/deps/v8/src/crankshaft/hydrogen-environment-liveness.cc +++ b/deps/v8/src/crankshaft/hydrogen-environment-liveness.cc @@ -4,7 +4,7 @@ #include "src/crankshaft/hydrogen-environment-liveness.h" - +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-escape-analysis.cc b/deps/v8/src/crankshaft/hydrogen-escape-analysis.cc index ab3bff2edc..91b4ff2b67 100644 --- a/deps/v8/src/crankshaft/hydrogen-escape-analysis.cc +++ b/deps/v8/src/crankshaft/hydrogen-escape-analysis.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-escape-analysis.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { @@ -142,7 +143,6 @@ HValue* HEscapeAnalysisPhase::NewMapCheckAndInsert(HCapturedObject* state, // necessary. HValue* HEscapeAnalysisPhase::NewLoadReplacement( HLoadNamedField* load, HValue* load_value) { - isolate()->counters()->crankshaft_escape_loads_replaced()->Increment(); HValue* replacement = load_value; Representation representation = load->representation(); if (representation.IsSmiOrInteger32() || representation.IsDouble()) { @@ -320,8 +320,6 @@ void HEscapeAnalysisPhase::Run() { for (int i = 0; i < max_fixpoint_iteration_count; i++) { CollectCapturedValues(); if (captured_.is_empty()) break; - isolate()->counters()->crankshaft_escape_allocs_replaced()->Increment( - captured_.length()); PerformScalarReplacement(); captured_.Rewind(0); } diff --git a/deps/v8/src/crankshaft/hydrogen-gvn.cc b/deps/v8/src/crankshaft/hydrogen-gvn.cc index bf51bad4bb..70320052b0 100644 --- a/deps/v8/src/crankshaft/hydrogen-gvn.cc +++ b/deps/v8/src/crankshaft/hydrogen-gvn.cc @@ -5,8 +5,7 @@ #include "src/crankshaft/hydrogen-gvn.h" #include "src/crankshaft/hydrogen.h" -#include "src/list.h" -#include "src/list-inl.h" +#include "src/objects-inl.h" #include "src/v8.h" namespace v8 { @@ -652,23 +651,19 @@ SideEffects HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock( HBasicBlock* dominator, HBasicBlock* dominated) { SideEffects side_effects; - List<HBasicBlock*> blocks; - for (;;) { - for (int i = 0; i < dominated->predecessors()->length(); ++i) { - HBasicBlock* block = dominated->predecessors()->at(i); - if (dominator->block_id() < block->block_id() && - block->block_id() < dominated->block_id() && - !visited_on_paths_.Contains(block->block_id())) { - visited_on_paths_.Add(block->block_id()); - side_effects.Add(block_side_effects_[block->block_id()]); - if (block->IsLoopHeader()) { - side_effects.Add(loop_side_effects_[block->block_id()]); - } - blocks.Add(block); + for (int i = 0; i < dominated->predecessors()->length(); ++i) { + HBasicBlock* block = dominated->predecessors()->at(i); + if (dominator->block_id() < block->block_id() && + block->block_id() < dominated->block_id() && + !visited_on_paths_.Contains(block->block_id())) { + visited_on_paths_.Add(block->block_id()); + side_effects.Add(block_side_effects_[block->block_id()]); + if (block->IsLoopHeader()) { + side_effects.Add(loop_side_effects_[block->block_id()]); } + side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock( + dominator, block)); } - if (blocks.is_empty()) break; - dominated = blocks.RemoveLast(); } return side_effects; } diff --git a/deps/v8/src/crankshaft/hydrogen-infer-representation.cc b/deps/v8/src/crankshaft/hydrogen-infer-representation.cc index 74f264e17a..bbff24e5d1 100644 --- a/deps/v8/src/crankshaft/hydrogen-infer-representation.cc +++ b/deps/v8/src/crankshaft/hydrogen-infer-representation.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-infer-representation.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-infer-types.cc b/deps/v8/src/crankshaft/hydrogen-infer-types.cc index bfd3dd2281..a2fd72e443 100644 --- a/deps/v8/src/crankshaft/hydrogen-infer-types.cc +++ b/deps/v8/src/crankshaft/hydrogen-infer-types.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-infer-types.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-instructions.cc b/deps/v8/src/crankshaft/hydrogen-instructions.cc index be1ac9a18c..8cf49201d0 100644 --- a/deps/v8/src/crankshaft/hydrogen-instructions.cc +++ b/deps/v8/src/crankshaft/hydrogen-instructions.cc @@ -12,6 +12,7 @@ #include "src/double.h" #include "src/elements.h" #include "src/factory.h" +#include "src/objects-inl.h" #if V8_TARGET_ARCH_IA32 #include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT @@ -1072,9 +1073,9 @@ std::ostream& HReturn::PrintDataTo(std::ostream& os) const { // NOLINT Representation HBranch::observed_input_representation(int index) { - if (expected_input_types_ & (ToBooleanHint::kNull | ToBooleanHint::kReceiver | - ToBooleanHint::kString | ToBooleanHint::kSymbol | - ToBooleanHint::kSimdValue)) { + if (expected_input_types_ & + (ToBooleanHint::kNull | ToBooleanHint::kReceiver | + ToBooleanHint::kString | ToBooleanHint::kSymbol)) { return Representation::Tagged(); } if (expected_input_types_ & ToBooleanHint::kUndefined) { @@ -1244,17 +1245,6 @@ String* TypeOfString(HConstant* constant, Isolate* isolate) { } case SYMBOL_TYPE: return heap->symbol_string(); - case SIMD128_VALUE_TYPE: { - Unique<Map> map = constant->ObjectMap(); -#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ - if (map.IsKnownGlobal(heap->type##_map())) { \ - return heap->type##_string(); \ - } - SIMD128_TYPES(SIMD128_TYPE) -#undef SIMD128_TYPE - UNREACHABLE(); - return nullptr; - } default: if (constant->IsUndetectable()) return heap->undefined_string(); if (constant->IsCallable()) return heap->function_string(); @@ -2177,6 +2167,11 @@ HConstant::HConstant(Special special) InstanceTypeField::encode(kUnknownInstanceType)), int32_value_(0) { DCHECK_EQ(kHoleNaN, special); + // Manipulating the signaling NaN used for the hole in C++, e.g. with bit_cast + // will change its value on ia32 (the x87 stack is used to return values + // and stores to the stack silently clear the signalling bit). + // Therefore we have to use memcpy for initializing |double_value_| with + // kHoleNanInt64 here. std::memcpy(&double_value_, &kHoleNanInt64, sizeof(double_value_)); Initialize(Representation::Double()); } diff --git a/deps/v8/src/crankshaft/hydrogen-instructions.h b/deps/v8/src/crankshaft/hydrogen-instructions.h index b20bc9b53f..7059425cb0 100644 --- a/deps/v8/src/crankshaft/hydrogen-instructions.h +++ b/deps/v8/src/crankshaft/hydrogen-instructions.h @@ -3086,11 +3086,8 @@ class HConstant final : public HTemplateInstruction<0> { return double_value_; } uint64_t DoubleValueAsBits() const { - uint64_t bits; DCHECK(HasDoubleValue()); - STATIC_ASSERT(sizeof(bits) == sizeof(double_value_)); - std::memcpy(&bits, &double_value_, sizeof(bits)); - return bits; + return bit_cast<uint64_t>(double_value_); } bool IsTheHole() const { if (HasDoubleValue() && DoubleValueAsBits() == kHoleNanInt64) { @@ -5125,10 +5122,6 @@ class HObjectAccess final { return HObjectAccess(kElementsPointer, JSObject::kElementsOffset); } - static HObjectAccess ForLiteralsPointer() { - return HObjectAccess(kInobject, JSFunction::kLiteralsOffset); - } - static HObjectAccess ForNextFunctionLinkPointer() { return HObjectAccess(kInobject, JSFunction::kNextFunctionLinkOffset); } diff --git a/deps/v8/src/crankshaft/hydrogen-load-elimination.cc b/deps/v8/src/crankshaft/hydrogen-load-elimination.cc index 88963fc18b..99f4947a84 100644 --- a/deps/v8/src/crankshaft/hydrogen-load-elimination.cc +++ b/deps/v8/src/crankshaft/hydrogen-load-elimination.cc @@ -7,6 +7,7 @@ #include "src/crankshaft/hydrogen-alias-analysis.h" #include "src/crankshaft/hydrogen-flow-engine.h" #include "src/crankshaft/hydrogen-instructions.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-mark-unreachable.cc b/deps/v8/src/crankshaft/hydrogen-mark-unreachable.cc index 4e1dd689ee..2393b5a8a4 100644 --- a/deps/v8/src/crankshaft/hydrogen-mark-unreachable.cc +++ b/deps/v8/src/crankshaft/hydrogen-mark-unreachable.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-mark-unreachable.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-osr.cc b/deps/v8/src/crankshaft/hydrogen-osr.cc index 607bfbd85d..093f94b83f 100644 --- a/deps/v8/src/crankshaft/hydrogen-osr.cc +++ b/deps/v8/src/crankshaft/hydrogen-osr.cc @@ -5,6 +5,7 @@ #include "src/crankshaft/hydrogen-osr.h" #include "src/crankshaft/hydrogen.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-range-analysis.cc b/deps/v8/src/crankshaft/hydrogen-range-analysis.cc index a489e014eb..50592d32ca 100644 --- a/deps/v8/src/crankshaft/hydrogen-range-analysis.cc +++ b/deps/v8/src/crankshaft/hydrogen-range-analysis.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-range-analysis.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-redundant-phi.cc b/deps/v8/src/crankshaft/hydrogen-redundant-phi.cc index ef8b29159d..08644c874c 100644 --- a/deps/v8/src/crankshaft/hydrogen-redundant-phi.cc +++ b/deps/v8/src/crankshaft/hydrogen-redundant-phi.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-redundant-phi.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-removable-simulates.cc b/deps/v8/src/crankshaft/hydrogen-removable-simulates.cc index ceef7430eb..e68168cf9c 100644 --- a/deps/v8/src/crankshaft/hydrogen-removable-simulates.cc +++ b/deps/v8/src/crankshaft/hydrogen-removable-simulates.cc @@ -6,6 +6,7 @@ #include "src/crankshaft/hydrogen-flow-engine.h" #include "src/crankshaft/hydrogen-instructions.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-representation-changes.cc b/deps/v8/src/crankshaft/hydrogen-representation-changes.cc index 4d74df4952..5fd72618fa 100644 --- a/deps/v8/src/crankshaft/hydrogen-representation-changes.cc +++ b/deps/v8/src/crankshaft/hydrogen-representation-changes.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-representation-changes.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-sce.cc b/deps/v8/src/crankshaft/hydrogen-sce.cc index 91e91d2033..a08190de3e 100644 --- a/deps/v8/src/crankshaft/hydrogen-sce.cc +++ b/deps/v8/src/crankshaft/hydrogen-sce.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-sce.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-store-elimination.cc b/deps/v8/src/crankshaft/hydrogen-store-elimination.cc index 57c7880aa7..b081c21984 100644 --- a/deps/v8/src/crankshaft/hydrogen-store-elimination.cc +++ b/deps/v8/src/crankshaft/hydrogen-store-elimination.cc @@ -5,6 +5,7 @@ #include "src/crankshaft/hydrogen-store-elimination.h" #include "src/crankshaft/hydrogen-instructions.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen-uint32-analysis.cc b/deps/v8/src/crankshaft/hydrogen-uint32-analysis.cc index ac4a63f8f2..de31a616c1 100644 --- a/deps/v8/src/crankshaft/hydrogen-uint32-analysis.cc +++ b/deps/v8/src/crankshaft/hydrogen-uint32-analysis.cc @@ -3,6 +3,7 @@ // found in the LICENSE file. #include "src/crankshaft/hydrogen-uint32-analysis.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/hydrogen.cc b/deps/v8/src/crankshaft/hydrogen.cc index ea3401c90c..d55bb37c39 100644 --- a/deps/v8/src/crankshaft/hydrogen.cc +++ b/deps/v8/src/crankshaft/hydrogen.cc @@ -5287,10 +5287,11 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) { return; } else { Handle<FeedbackVector> vector(current_feedback_vector(), isolate()); + FeedbackSlot slot = expr->VariableFeedbackSlot(); + DCHECK(vector->IsLoadGlobalIC(slot)); HValue* vector_value = Add<HConstant>(vector); - HValue* slot_value = - Add<HConstant>(vector->GetIndex(expr->VariableFeedbackSlot())); + HValue* slot_value = Add<HConstant>(vector->GetIndex(slot)); Callable callable = CodeFactory::LoadGlobalICInOptimizedCode( isolate(), ast_context()->typeof_mode()); HValue* stub = Add<HConstant>(callable.code()); @@ -5354,7 +5355,8 @@ void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) { DCHECK(current_block() != NULL); DCHECK(current_block()->HasPredecessor()); Callable callable = CodeFactory::FastCloneRegExp(isolate()); - HValue* values[] = {AddThisFunction(), Add<HConstant>(expr->literal_index()), + int index = FeedbackVector::GetIndex(expr->literal_slot()); + HValue* values[] = {AddThisFunction(), Add<HConstant>(index), Add<HConstant>(expr->pattern()), Add<HConstant>(expr->flags())}; HConstant* stub_value = Add<HConstant>(callable.code()); @@ -5455,7 +5457,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { // Check whether to use fast or slow deep-copying for boilerplate. int max_properties = kMaxFastLiteralProperties; Handle<Object> literals_cell( - closure->literals()->literal(expr->literal_index()), isolate()); + closure->feedback_vector()->Get(expr->literal_slot()), isolate()); Handle<AllocationSite> site; Handle<JSObject> boilerplate; if (!literals_cell->IsUndefined(isolate())) { @@ -5473,9 +5475,9 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { site_context.ExitScope(site, boilerplate); } else { NoObservableSideEffectsScope no_effects(this); - Handle<FixedArray> constant_properties = + Handle<BoilerplateDescription> constant_properties = expr->GetOrBuildConstantProperties(isolate()); - int literal_index = expr->literal_index(); + int literal_index = FeedbackVector::GetIndex(expr->literal_slot()); int flags = expr->ComputeFlags(true); Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index), @@ -5513,7 +5515,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { Handle<Map> map = property->GetReceiverType(); Handle<String> name = key->AsPropertyName(); HValue* store; - FeedbackVectorSlot slot = property->GetSlot(); + FeedbackSlot slot = property->GetSlot(); if (map.is_null()) { // If we don't know the monomorphic type, do a generic store. CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot, literal, @@ -5527,6 +5529,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { store = BuildMonomorphicAccess( &info, literal, checked_literal, value, BailoutId::None(), BailoutId::None()); + DCHECK_NOT_NULL(store); } else { CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot, literal, name, value)); @@ -5574,10 +5577,9 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { HInstruction* literal; Handle<AllocationSite> site; - Handle<LiteralsArray> literals(environment()->closure()->literals(), - isolate()); - Handle<Object> literals_cell(literals->literal(expr->literal_index()), - isolate()); + Handle<FeedbackVector> vector(environment()->closure()->feedback_vector(), + isolate()); + Handle<Object> literals_cell(vector->Get(expr->literal_slot()), isolate()); Handle<JSObject> boilerplate_object; if (!literals_cell->IsUndefined(isolate())) { DCHECK(literals_cell->IsAllocationSite()); @@ -5600,7 +5602,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { NoObservableSideEffectsScope no_effects(this); Handle<ConstantElementsPair> constants = expr->GetOrBuildConstantElements(isolate()); - int literal_index = expr->literal_index(); + int literal_index = FeedbackVector::GetIndex(expr->literal_slot()); int flags = expr->ComputeFlags(true); Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index), @@ -5678,7 +5680,7 @@ HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object, HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField( PropertyAccessInfo* info, HValue* checked_object) { - // See if this is a load for an immutable property + // Check if this is a load of an immutable or constant property. if (checked_object->ActualValue()->IsConstant()) { Handle<Object> object( HConstant::cast(checked_object->ActualValue())->handle(isolate())); @@ -5686,9 +5688,20 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField( if (object->IsJSObject()) { LookupIterator it(object, info->name(), LookupIterator::OWN_SKIP_INTERCEPTOR); - Handle<Object> value = JSReceiver::GetDataProperty(&it); - if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) { - return New<HConstant>(value); + if (it.IsFound()) { + bool is_reaonly_non_configurable = + it.IsReadOnly() && !it.IsConfigurable(); + if (is_reaonly_non_configurable || + (FLAG_track_constant_fields && info->IsDataConstantField())) { + Handle<Object> value = JSReceiver::GetDataProperty(&it); + if (!is_reaonly_non_configurable) { + DCHECK(!it.is_dictionary_holder()); + // Add dependency on the map that introduced the field. + Handle<Map> field_owner_map = it.GetFieldOwnerMap(); + top_info()->dependencies()->AssumeFieldOwner(field_owner_map); + } + return New<HConstant>(value); + } } } } @@ -5717,15 +5730,17 @@ HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField( checked_object, checked_object, access, maps, info->field_type()); } - -HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( - PropertyAccessInfo* info, - HValue* checked_object, - HValue* value) { +HValue* HOptimizedGraphBuilder::BuildStoreNamedField(PropertyAccessInfo* info, + HValue* checked_object, + HValue* value) { bool transition_to_field = info->IsTransition(); // TODO(verwaest): Move this logic into PropertyAccessInfo. HObjectAccess field_access = info->access(); + bool store_to_constant_field = FLAG_track_constant_fields && + info->StoreMode() != INITIALIZING_STORE && + info->IsDataConstantField(); + HStoreNamedField *instr; if (field_access.representation().IsDouble() && (!FLAG_unbox_double_fields || !field_access.IsInobject())) { @@ -5751,23 +5766,57 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( // Already holds a HeapNumber; load the box and write its value field. HInstruction* heap_number = Add<HLoadNamedField>(checked_object, nullptr, heap_number_access); - instr = New<HStoreNamedField>(heap_number, - HObjectAccess::ForHeapNumberValue(), - value, STORE_TO_INITIALIZED_ENTRY); + + if (store_to_constant_field) { + // If the field is constant check that the value we are going to store + // matches current value. + HInstruction* current_value = Add<HLoadNamedField>( + heap_number, nullptr, HObjectAccess::ForHeapNumberValue()); + IfBuilder value_checker(this); + value_checker.IfNot<HCompareNumericAndBranch>(current_value, value, + Token::EQ); + value_checker.ThenDeopt(DeoptimizeReason::kValueMismatch); + value_checker.End(); + return nullptr; + + } else { + instr = New<HStoreNamedField>(heap_number, + HObjectAccess::ForHeapNumberValue(), + value, STORE_TO_INITIALIZED_ENTRY); + } } } else { - if (field_access.representation().IsHeapObject()) { - BuildCheckHeapObject(value); - } + if (store_to_constant_field) { + // If the field is constant check that the value we are going to store + // matches current value. + HInstruction* current_value = Add<HLoadNamedField>( + checked_object->ActualValue(), checked_object, field_access); + + IfBuilder value_checker(this); + if (field_access.representation().IsDouble()) { + value_checker.IfNot<HCompareNumericAndBranch>(current_value, value, + Token::EQ); + } else { + value_checker.IfNot<HCompareObjectEqAndBranch>(current_value, value); + } + value_checker.ThenDeopt(DeoptimizeReason::kValueMismatch); + value_checker.End(); + return nullptr; - if (!info->field_maps()->is_empty()) { - DCHECK(field_access.representation().IsHeapObject()); - value = Add<HCheckMaps>(value, info->field_maps()); - } + } else { + if (field_access.representation().IsHeapObject()) { + BuildCheckHeapObject(value); + } - // This is a normal store. - instr = New<HStoreNamedField>(checked_object->ActualValue(), field_access, - value, info->StoreMode()); + if (!info->field_maps()->is_empty()) { + DCHECK(field_access.representation().IsHeapObject()); + value = Add<HCheckMaps>(value, info->field_maps()); + } + + // This is a normal store. + instr = New<HStoreNamedField>(checked_object->ActualValue(), field_access, + value, info->StoreMode()); + } } if (transition_to_field) { @@ -6151,9 +6200,8 @@ HValue* HOptimizedGraphBuilder::BuildMonomorphicAccess( } } - void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess( - PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot, + PropertyAccessType access_type, Expression* expr, FeedbackSlot slot, BailoutId ast_id, BailoutId return_id, HValue* object, HValue* value, SmallMapList* maps, Handle<Name> name) { // Something did not match; must use a polymorphic load. @@ -6351,8 +6399,8 @@ static bool AreStringTypes(SmallMapList* maps) { } void HOptimizedGraphBuilder::BuildStore(Expression* expr, Property* prop, - FeedbackVectorSlot slot, - BailoutId ast_id, BailoutId return_id, + FeedbackSlot slot, BailoutId ast_id, + BailoutId return_id, bool is_uninitialized) { if (!prop->key()->IsPropertyName()) { // Keyed store. @@ -6471,8 +6519,10 @@ HInstruction* HOptimizedGraphBuilder::InlineGlobalPropertyStore( // Because not every expression has a position and there is not common // superclass of Assignment and CountOperation, we cannot just pass the // owning expression instead of position and ast_id separately. -void HOptimizedGraphBuilder::HandleGlobalVariableAssignment( - Variable* var, HValue* value, FeedbackVectorSlot slot, BailoutId ast_id) { +void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(Variable* var, + HValue* value, + FeedbackSlot slot, + BailoutId ast_id) { Handle<JSGlobalObject> global(current_info()->global_object()); // Lookup in script contexts. @@ -6523,6 +6573,7 @@ void HOptimizedGraphBuilder::HandleGlobalVariableAssignment( HValue* name = Add<HConstant>(var->name()); HValue* vector_value = Add<HConstant>(vector); HValue* slot_value = Add<HConstant>(vector->GetIndex(slot)); + DCHECK_EQ(vector->GetLanguageMode(slot), function_language_mode()); Callable callable = CodeFactory::StoreICInOptimizedCode( isolate(), function_language_mode()); HValue* stub = Add<HConstant>(callable.code()); @@ -6818,9 +6869,8 @@ HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* string) { HObjectAccess::ForStringLength()); } - HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric( - PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot, + PropertyAccessType access_type, Expression* expr, FeedbackSlot slot, HValue* object, Handle<Name> name, HValue* value, bool is_uninitialized) { if (is_uninitialized) { Add<HDeoptimize>( @@ -6836,6 +6886,7 @@ HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric( if (access_type == LOAD) { HValue* values[] = {object, key, slot_value, vector_value}; if (!expr->AsProperty()->key()->IsPropertyName()) { + DCHECK(vector->IsKeyedLoadIC(slot)); // It's possible that a keyed load of a constant string was converted // to a named load. Here, at the last minute, we need to make sure to // use a generic Keyed Load if we are using the type vector, because @@ -6847,6 +6898,7 @@ HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric( callable.descriptor(), ArrayVector(values)); return result; } + DCHECK(vector->IsLoadIC(slot)); Callable callable = CodeFactory::LoadICInOptimizedCode(isolate()); HValue* stub = Add<HConstant>(callable.code()); HCallWithDescriptor* result = New<HCallWithDescriptor>( @@ -6855,11 +6907,12 @@ HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric( } else { HValue* values[] = {object, key, value, slot_value, vector_value}; - if (vector->GetKind(slot) == FeedbackVectorSlotKind::KEYED_STORE_IC) { + if (vector->IsKeyedStoreIC(slot)) { // It's possible that a keyed store of a constant string was converted // to a named store. Here, at the last minute, we need to make sure to // use a generic Keyed Store if we are using the type vector, because // it has to share information with full code. + DCHECK_EQ(vector->GetLanguageMode(slot), function_language_mode()); Callable callable = CodeFactory::KeyedStoreICInOptimizedCode( isolate(), function_language_mode()); HValue* stub = Add<HConstant>(callable.code()); @@ -6868,18 +6921,27 @@ HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric( callable.descriptor(), ArrayVector(values)); return result; } - Callable callable = CodeFactory::StoreICInOptimizedCode( - isolate(), function_language_mode()); - HValue* stub = Add<HConstant>(callable.code()); - HCallWithDescriptor* result = New<HCallWithDescriptor>( - Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values)); + HCallWithDescriptor* result; + if (vector->IsStoreOwnIC(slot)) { + Callable callable = CodeFactory::StoreOwnICInOptimizedCode(isolate()); + HValue* stub = Add<HConstant>(callable.code()); + result = New<HCallWithDescriptor>( + Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values)); + } else { + DCHECK(vector->IsStoreIC(slot)); + DCHECK_EQ(vector->GetLanguageMode(slot), function_language_mode()); + Callable callable = CodeFactory::StoreICInOptimizedCode( + isolate(), function_language_mode()); + HValue* stub = Add<HConstant>(callable.code()); + result = New<HCallWithDescriptor>( + Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values)); + } return result; } } - HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric( - PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot, + PropertyAccessType access_type, Expression* expr, FeedbackSlot slot, HValue* object, HValue* key, HValue* value) { Handle<FeedbackVector> vector(current_feedback_vector(), isolate()); HValue* vector_value = Add<HConstant>(vector); @@ -7074,9 +7136,8 @@ HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad( return instr; } - HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess( - Expression* expr, FeedbackVectorSlot slot, HValue* object, HValue* key, + Expression* expr, FeedbackSlot slot, HValue* object, HValue* key, HValue* val, SmallMapList* maps, PropertyAccessType access_type, KeyedAccessStoreMode store_mode, bool* has_side_effects) { *has_side_effects = false; @@ -7211,9 +7272,9 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess( } HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess( - HValue* obj, HValue* key, HValue* val, Expression* expr, - FeedbackVectorSlot slot, BailoutId ast_id, BailoutId return_id, - PropertyAccessType access_type, bool* has_side_effects) { + HValue* obj, HValue* key, HValue* val, Expression* expr, FeedbackSlot slot, + BailoutId ast_id, BailoutId return_id, PropertyAccessType access_type, + bool* has_side_effects) { // A keyed name access with type feedback may contain the name. Handle<FeedbackVector> vector = handle(current_feedback_vector(), isolate()); HValue* expected_key = key; @@ -7439,8 +7500,8 @@ bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) { HValue* HOptimizedGraphBuilder::BuildNamedAccess( PropertyAccessType access, BailoutId ast_id, BailoutId return_id, - Expression* expr, FeedbackVectorSlot slot, HValue* object, - Handle<Name> name, HValue* value, bool is_uninitialized) { + Expression* expr, FeedbackSlot slot, HValue* object, Handle<Name> name, + HValue* value, bool is_uninitialized) { SmallMapList* maps; ComputeReceiverTypes(expr, object, &maps, this); DCHECK(maps != NULL); @@ -7667,7 +7728,7 @@ HInstruction* HOptimizedGraphBuilder::NewCallFunction( HInstruction* HOptimizedGraphBuilder::NewCallFunctionViaIC( HValue* function, int argument_count, TailCallMode syntactic_tail_call_mode, ConvertReceiverMode convert_mode, TailCallMode tail_call_mode, - FeedbackVectorSlot slot) { + FeedbackSlot slot) { if (syntactic_tail_call_mode == TailCallMode::kAllow) { BuildEnsureCallable(function); } else { @@ -7680,8 +7741,8 @@ HInstruction* HOptimizedGraphBuilder::NewCallFunctionViaIC( HValue* vector_val = Add<HConstant>(vector); HValue* op_vals[] = {function, arity_val, index_val, vector_val}; - Callable callable = CodeFactory::CallICInOptimizedCode( - isolate(), convert_mode, tail_call_mode); + Callable callable = + CodeFactory::CallIC(isolate(), convert_mode, tail_call_mode); HConstant* stub = Add<HConstant>(callable.code()); return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(), @@ -8034,12 +8095,12 @@ bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target, // Use the same AstValueFactory for creating strings in the sub-compilation // step, but don't transfer ownership to target_info. Handle<SharedFunctionInfo> target_shared(target->shared()); - ParseInfo parse_info(zone(), target_shared); + ParseInfo parse_info(target_shared, top_info()->parse_info()->zone_shared()); parse_info.set_ast_value_factory( top_info()->parse_info()->ast_value_factory()); parse_info.set_ast_value_factory_owned(false); - CompilationInfo target_info(&parse_info, target); + CompilationInfo target_info(parse_info.zone(), &parse_info, target); if (inlining_kind != CONSTRUCT_CALL_RETURN && IsClassConstructor(target_shared->kind())) { @@ -10400,7 +10461,7 @@ HInstruction* HOptimizedGraphBuilder::BuildIncrement(CountOperation* expr) { } void HOptimizedGraphBuilder::BuildStoreForEffect( - Expression* expr, Property* prop, FeedbackVectorSlot slot, BailoutId ast_id, + Expression* expr, Property* prop, FeedbackSlot slot, BailoutId ast_id, BailoutId return_id, HValue* object, HValue* key, HValue* value) { EffectContext for_effect(this); Push(object); @@ -11133,11 +11194,9 @@ bool IsLiteralCompareStrict(Isolate* isolate, HValue* left, Token::Value op, return op == Token::EQ_STRICT && ((left->IsConstant() && !HConstant::cast(left)->handle(isolate)->IsNumber() && - !HConstant::cast(left)->handle(isolate)->IsSimd128Value() && !HConstant::cast(left)->handle(isolate)->IsString()) || (right->IsConstant() && !HConstant::cast(right)->handle(isolate)->IsNumber() && - !HConstant::cast(right)->handle(isolate)->IsSimd128Value() && !HConstant::cast(right)->handle(isolate)->IsString())); } @@ -11846,10 +11905,11 @@ void HOptimizedGraphBuilder::VisitVariableDeclaration( case VariableLocation::UNALLOCATED: { DCHECK(!variable->binding_needs_init()); globals_.Add(variable->name(), zone()); - FeedbackVectorSlot slot = proxy->VariableFeedbackSlot(); + FeedbackSlot slot = proxy->VariableFeedbackSlot(); DCHECK(!slot.IsInvalid()); globals_.Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone()); globals_.Add(isolate()->factory()->undefined_value(), zone()); + globals_.Add(isolate()->factory()->undefined_value(), zone()); return; } case VariableLocation::PARAMETER: @@ -11885,9 +11945,15 @@ void HOptimizedGraphBuilder::VisitFunctionDeclaration( switch (variable->location()) { case VariableLocation::UNALLOCATED: { globals_.Add(variable->name(), zone()); - FeedbackVectorSlot slot = proxy->VariableFeedbackSlot(); + FeedbackSlot slot = proxy->VariableFeedbackSlot(); DCHECK(!slot.IsInvalid()); globals_.Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone()); + + // We need the slot where the literals array lives, too. + slot = declaration->fun()->LiteralFeedbackSlot(); + DCHECK(!slot.IsInvalid()); + globals_.Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone()); + Handle<SharedFunctionInfo> function = Compiler::GetSharedFunctionInfo( declaration->fun(), current_info()->script(), top_info()); // Check for stack-overflow exception. @@ -12117,32 +12183,6 @@ void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) { return ast_context()->ReturnInstruction(result, call->id()); } -// Support for direct calls from JavaScript to native RegExp code. -void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) { - DCHECK_EQ(4, call->arguments()->length()); - CHECK_ALIVE(VisitExpressions(call->arguments())); - Callable callable = CodeFactory::RegExpExec(isolate()); - HValue* last_match_info = Pop(); - HValue* index = Pop(); - HValue* subject = Pop(); - HValue* regexp_object = Pop(); - HValue* stub = Add<HConstant>(callable.code()); - HValue* values[] = {regexp_object, subject, index, last_match_info}; - HInstruction* result = New<HCallWithDescriptor>( - stub, 0, callable.descriptor(), ArrayVector(values)); - return ast_context()->ReturnInstruction(result, call->id()); -} - - -// Fast support for number to string. -void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) { - DCHECK_EQ(1, call->arguments()->length()); - CHECK_ALIVE(VisitForValue(call->arguments()->at(0))); - HValue* number = Pop(); - HValue* result = BuildNumberToString(number, AstType::Any()); - return ast_context()->ReturnValue(result); -} - // Fast support for calls. void HOptimizedGraphBuilder::GenerateCall(CallRuntime* call) { diff --git a/deps/v8/src/crankshaft/hydrogen.h b/deps/v8/src/crankshaft/hydrogen.h index 97cb9f2d80..2ce6454f13 100644 --- a/deps/v8/src/crankshaft/hydrogen.h +++ b/deps/v8/src/crankshaft/hydrogen.h @@ -12,10 +12,13 @@ #include "src/bailout-reason.h" #include "src/compilation-info.h" #include "src/compiler.h" +#include "src/counters.h" #include "src/crankshaft/compilation-phase.h" #include "src/crankshaft/hydrogen-instructions.h" #include "src/globals.h" #include "src/parsing/parse-info.h" +#include "src/string-stream.h" +#include "src/transitions.h" #include "src/zone/zone.h" namespace v8 { @@ -37,9 +40,8 @@ class HCompilationJob final : public CompilationJob { public: explicit HCompilationJob(Handle<JSFunction> function) : CompilationJob(function->GetIsolate(), &info_, "Crankshaft"), - zone_(function->GetIsolate()->allocator(), ZONE_NAME), - parse_info_(&zone_, handle(function->shared())), - info_(&parse_info_, function), + parse_info_(handle(function->shared())), + info_(parse_info_.zone(), &parse_info_, function), graph_(nullptr), chunk_(nullptr) {} @@ -49,7 +51,6 @@ class HCompilationJob final : public CompilationJob { virtual Status FinalizeJobImpl(); private: - Zone zone_; ParseInfo parse_info_; CompilationInfo info_; HGraph* graph_; @@ -2167,8 +2168,6 @@ class HOptimizedGraphBuilder : public HGraphBuilder, F(DebugBreakInOptimizedCode) \ F(StringCharCodeAt) \ F(SubString) \ - F(RegExpExec) \ - F(NumberToString) \ F(DebugIsActive) \ /* Typed Arrays */ \ F(TypedArrayInitialize) \ @@ -2387,15 +2386,16 @@ class HOptimizedGraphBuilder : public HGraphBuilder, TailCallMode tail_call_mode = TailCallMode::kDisallow); void HandleGlobalVariableAssignment(Variable* var, HValue* value, - FeedbackVectorSlot slot, - BailoutId ast_id); + FeedbackSlot slot, BailoutId ast_id); void HandlePropertyAssignment(Assignment* expr); void HandleCompoundAssignment(Assignment* expr); - void HandlePolymorphicNamedFieldAccess( - PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot, - BailoutId ast_id, BailoutId return_id, HValue* object, HValue* value, - SmallMapList* types, Handle<Name> name); + void HandlePolymorphicNamedFieldAccess(PropertyAccessType access_type, + Expression* expr, FeedbackSlot slot, + BailoutId ast_id, BailoutId return_id, + HValue* object, HValue* value, + SmallMapList* types, + Handle<Name> name); HValue* BuildAllocateExternalElements( ExternalArrayType array_type, @@ -2525,6 +2525,12 @@ class HOptimizedGraphBuilder : public HGraphBuilder, bool IsFound() const { return lookup_type_ != NOT_FOUND; } bool IsProperty() const { return IsFound() && !IsTransition(); } bool IsTransition() const { return lookup_type_ == TRANSITION_TYPE; } + // TODO(ishell): rename to IsDataConstant() once constant field tracking + // is done. + bool IsDataConstantField() const { + return lookup_type_ == DESCRIPTOR_TYPE && details_.kind() == kData && + details_.location() == kField && details_.constness() == kConst; + } bool IsData() const { return lookup_type_ == DESCRIPTOR_TYPE && details_.kind() == kData && details_.location() == kField; @@ -2641,9 +2647,8 @@ class HOptimizedGraphBuilder : public HGraphBuilder, HValue* BuildNamedAccess(PropertyAccessType access, BailoutId ast_id, BailoutId reutrn_id, Expression* expr, - FeedbackVectorSlot slot, HValue* object, - Handle<Name> name, HValue* value, - bool is_uninitialized = false); + FeedbackSlot slot, HValue* object, Handle<Name> name, + HValue* value, bool is_uninitialized = false); void HandlePolymorphicCallNamed(Call* expr, HValue* receiver, @@ -2677,7 +2682,7 @@ class HOptimizedGraphBuilder : public HGraphBuilder, PushBeforeSimulateBehavior push_sim_result); HInstruction* BuildIncrement(CountOperation* expr); HInstruction* BuildKeyedGeneric(PropertyAccessType access_type, - Expression* expr, FeedbackVectorSlot slot, + Expression* expr, FeedbackSlot slot, HValue* object, HValue* key, HValue* value); HInstruction* TryBuildConsolidatedElementLoad(HValue* object, @@ -2695,19 +2700,21 @@ class HOptimizedGraphBuilder : public HGraphBuilder, PropertyAccessType access_type, KeyedAccessStoreMode store_mode); - HValue* HandlePolymorphicElementAccess( - Expression* expr, FeedbackVectorSlot slot, HValue* object, HValue* key, - HValue* val, SmallMapList* maps, PropertyAccessType access_type, - KeyedAccessStoreMode store_mode, bool* has_side_effects); + HValue* HandlePolymorphicElementAccess(Expression* expr, FeedbackSlot slot, + HValue* object, HValue* key, + HValue* val, SmallMapList* maps, + PropertyAccessType access_type, + KeyedAccessStoreMode store_mode, + bool* has_side_effects); HValue* HandleKeyedElementAccess(HValue* obj, HValue* key, HValue* val, - Expression* expr, FeedbackVectorSlot slot, + Expression* expr, FeedbackSlot slot, BailoutId ast_id, BailoutId return_id, PropertyAccessType access_type, bool* has_side_effects); HInstruction* BuildNamedGeneric(PropertyAccessType access, Expression* expr, - FeedbackVectorSlot slot, HValue* object, + FeedbackSlot slot, HValue* object, Handle<Name> name, HValue* value, bool is_uninitialized = false); @@ -2720,19 +2727,18 @@ class HOptimizedGraphBuilder : public HGraphBuilder, HValue* key); void BuildStoreForEffect(Expression* expression, Property* prop, - FeedbackVectorSlot slot, BailoutId ast_id, + FeedbackSlot slot, BailoutId ast_id, BailoutId return_id, HValue* object, HValue* key, HValue* value); - void BuildStore(Expression* expression, Property* prop, - FeedbackVectorSlot slot, BailoutId ast_id, - BailoutId return_id, bool is_uninitialized = false); + void BuildStore(Expression* expression, Property* prop, FeedbackSlot slot, + BailoutId ast_id, BailoutId return_id, + bool is_uninitialized = false); HInstruction* BuildLoadNamedField(PropertyAccessInfo* info, HValue* checked_object); - HInstruction* BuildStoreNamedField(PropertyAccessInfo* info, - HValue* checked_object, - HValue* value); + HValue* BuildStoreNamedField(PropertyAccessInfo* info, HValue* checked_object, + HValue* value); HValue* BuildContextChainWalk(Variable* var); @@ -2778,7 +2784,7 @@ class HOptimizedGraphBuilder : public HGraphBuilder, TailCallMode syntactic_tail_call_mode, ConvertReceiverMode convert_mode, TailCallMode tail_call_mode, - FeedbackVectorSlot slot); + FeedbackSlot slot); HInstruction* NewCallConstantFunction(Handle<JSFunction> target, int argument_count, diff --git a/deps/v8/src/crankshaft/ia32/lithium-codegen-ia32.cc b/deps/v8/src/crankshaft/ia32/lithium-codegen-ia32.cc index 978ae2f1c2..d5b87492c5 100644 --- a/deps/v8/src/crankshaft/ia32/lithium-codegen-ia32.cc +++ b/deps/v8/src/crankshaft/ia32/lithium-codegen-ia32.cc @@ -309,7 +309,7 @@ bool LCodeGen::GenerateJumpTable() { // building, install a special marker there instead. DCHECK(info()->IsStub()); __ mov(MemOperand(esp, 2 * kPointerSize), - Immediate(Smi::FromInt(StackFrame::STUB))); + Immediate(StackFrame::TypeToMarker(StackFrame::STUB))); /* stack layout 3: old ebp @@ -346,7 +346,7 @@ bool LCodeGen::GenerateDeferredCode() { frame_is_built_ = true; // Build the frame in such a way that esi isn't trashed. __ push(ebp); // Caller's frame pointer. - __ push(Immediate(Smi::FromInt(StackFrame::STUB))); + __ push(Immediate(StackFrame::TypeToMarker(StackFrame::STUB))); __ lea(ebp, Operand(esp, TypedFrameConstants::kFixedFrameSizeFromFp)); Comment(";;; Deferred code"); } @@ -1927,12 +1927,6 @@ void LCodeGen::DoBranch(LBranch* instr) { __ j(equal, instr->TrueLabel(chunk_)); } - if (expected & ToBooleanHint::kSimdValue) { - // SIMD value -> true. - __ CmpInstanceType(map, SIMD128_VALUE_TYPE); - __ j(equal, instr->TrueLabel(chunk_)); - } - if (expected & ToBooleanHint::kHeapNumber) { // heap number -> false iff +0, -0, or NaN. Label not_heap_number; @@ -2696,7 +2690,7 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { __ mov(result, Operand(result, CommonFrameConstants::kContextOrFrameTypeOffset)); __ cmp(Operand(result), - Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); __ j(equal, &adapted, Label::kNear); // No arguments adaptor frame. @@ -3398,7 +3392,7 @@ void LCodeGen::PrepareForTailCall(const ParameterCount& actual, Label no_arguments_adaptor, formal_parameter_count_loaded; __ mov(scratch2, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); __ cmp(Operand(scratch2, StandardFrameConstants::kContextOffset), - Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); __ j(not_equal, &no_arguments_adaptor, Label::kNear); // Drop current frame and load arguments count from arguments adaptor frame. @@ -4548,6 +4542,15 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) { void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { + Label deopt, done; + // If the map is not deprecated the migration attempt does not make sense. + __ push(object); + __ mov(object, FieldOperand(object, HeapObject::kMapOffset)); + __ test(FieldOperand(object, Map::kBitField3Offset), + Immediate(Map::Deprecated::kMask)); + __ pop(object); + __ j(zero, &deopt); + { PushSafepointRegistersScope scope(this); __ push(object); @@ -4558,7 +4561,12 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { __ test(eax, Immediate(kSmiTagMask)); } - DeoptimizeIf(zero, instr, DeoptimizeReason::kInstanceMigrationFailed); + __ j(not_zero, &done); + + __ bind(&deopt); + DeoptimizeIf(no_condition, instr, DeoptimizeReason::kInstanceMigrationFailed); + + __ bind(&done); } @@ -4899,18 +4907,6 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { __ test_b(FieldOperand(input, Map::kBitFieldOffset), Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); final_branch_condition = zero; - -// clang-format off -#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ - } else if (String::Equals(type_name, factory()->type##_string())) { \ - __ JumpIfSmi(input, false_label, false_distance); \ - __ cmp(FieldOperand(input, HeapObject::kMapOffset), \ - factory()->type##_map()); \ - final_branch_condition = equal; - SIMD128_TYPES(SIMD128_TYPE) -#undef SIMD128_TYPE - // clang-format on - } else { __ jmp(false_label, false_distance); } diff --git a/deps/v8/src/crankshaft/lithium-allocator.cc b/deps/v8/src/crankshaft/lithium-allocator.cc index aa4459b23a..201c6062a8 100644 --- a/deps/v8/src/crankshaft/lithium-allocator.cc +++ b/deps/v8/src/crankshaft/lithium-allocator.cc @@ -5,8 +5,9 @@ #include "src/crankshaft/lithium-allocator.h" #include "src/crankshaft/hydrogen.h" -#include "src/crankshaft/lithium-inl.h" #include "src/crankshaft/lithium-allocator-inl.h" +#include "src/crankshaft/lithium-inl.h" +#include "src/objects-inl.h" #include "src/register-configuration.h" #include "src/string-stream.h" diff --git a/deps/v8/src/crankshaft/lithium-codegen.cc b/deps/v8/src/crankshaft/lithium-codegen.cc index 2d165601d7..9569660357 100644 --- a/deps/v8/src/crankshaft/lithium-codegen.cc +++ b/deps/v8/src/crankshaft/lithium-codegen.cc @@ -6,6 +6,8 @@ #include <sstream> +#include "src/objects-inl.h" + #if V8_TARGET_ARCH_IA32 #include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT #include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT @@ -237,7 +239,8 @@ void LCodeGenBase::WriteTranslationFrame(LEnvironment* environment, int shared_id = DefineDeoptimizationLiteral( environment->entry() ? environment->entry()->shared() : info()->shared_info()); - translation->BeginConstructStubFrame(shared_id, translation_size); + translation->BeginConstructStubFrame(BailoutId::ConstructStubInvoke(), + shared_id, translation_size); if (info()->closure().is_identical_to(environment->closure())) { translation->StoreJSFrameFunction(); } else { diff --git a/deps/v8/src/crankshaft/lithium.cc b/deps/v8/src/crankshaft/lithium.cc index 94d60418fd..5f0e9e386d 100644 --- a/deps/v8/src/crankshaft/lithium.cc +++ b/deps/v8/src/crankshaft/lithium.cc @@ -6,6 +6,7 @@ #include "src/ast/scopes.h" #include "src/codegen.h" +#include "src/objects-inl.h" #if V8_TARGET_ARCH_IA32 #include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT diff --git a/deps/v8/src/crankshaft/mips/lithium-codegen-mips.cc b/deps/v8/src/crankshaft/mips/lithium-codegen-mips.cc index 36019cc94d..cd6e45af85 100644 --- a/deps/v8/src/crankshaft/mips/lithium-codegen-mips.cc +++ b/deps/v8/src/crankshaft/mips/lithium-codegen-mips.cc @@ -303,7 +303,7 @@ bool LCodeGen::GenerateDeferredCode() { DCHECK(!frame_is_built_); DCHECK(info()->IsStub()); frame_is_built_ = true; - __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); + __ li(scratch0(), Operand(StackFrame::TypeToMarker(StackFrame::STUB))); __ PushCommonFrame(scratch0()); Comment(";;; Deferred code"); } @@ -362,7 +362,7 @@ bool LCodeGen::GenerateJumpTable() { // This variant of deopt can only be used with stubs. Since we don't // have a function pointer to install in the stack frame that we're // building, install a special marker there instead. - __ li(at, Operand(Smi::FromInt(StackFrame::STUB))); + __ li(at, Operand(StackFrame::TypeToMarker(StackFrame::STUB))); __ push(at); DCHECK(info()->IsStub()); } @@ -2025,14 +2025,6 @@ void LCodeGen::DoBranch(LBranch* instr) { __ Branch(instr->TrueLabel(chunk_), eq, scratch, Operand(SYMBOL_TYPE)); } - if (expected & ToBooleanHint::kSimdValue) { - // SIMD value -> true. - const Register scratch = scratch1(); - __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); - __ Branch(instr->TrueLabel(chunk_), eq, scratch, - Operand(SIMD128_VALUE_TYPE)); - } - if (expected & ToBooleanHint::kHeapNumber) { // heap number -> false iff +0, -0, or NaN. DoubleRegister dbl_scratch = double_scratch0(); @@ -2874,7 +2866,8 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { __ lw(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ lw(result, MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset)); - __ Xor(temp, result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + __ Xor(temp, result, + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); // Result is the frame pointer for the frame if not adapted and for the real // frame below the adaptor frame if adapted. @@ -3486,7 +3479,7 @@ void LCodeGen::PrepareForTailCall(const ParameterCount& actual, __ lw(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ lw(scratch3, MemOperand(scratch2, StandardFrameConstants::kContextOffset)); __ Branch(&no_arguments_adaptor, ne, scratch3, - Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); // Drop current frame and load arguments count from arguments adaptor frame. __ mov(fp, scratch2); @@ -4768,6 +4761,13 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) { void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { + Label deopt, done; + // If the map is not deprecated the migration attempt does not make sense. + __ lw(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); + __ lw(scratch0(), FieldMemOperand(scratch0(), Map::kBitField3Offset)); + __ And(at, scratch0(), Operand(Map::Deprecated::kMask)); + __ Branch(&deopt, eq, at, Operand(zero_reg)); + { PushSafepointRegistersScope scope(this); __ push(object); @@ -4778,8 +4778,15 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { __ StoreToSafepointRegisterSlot(v0, scratch0()); } __ SmiTst(scratch0(), at); - DeoptimizeIf(eq, instr, DeoptimizeReason::kInstanceMigrationFailed, at, + __ Branch(&done, ne, at, Operand(zero_reg)); + + __ bind(&deopt); + // In case of "al" condition the operands are not used so just pass zero_reg + // there. + DeoptimizeIf(al, instr, DeoptimizeReason::kInstanceMigrationFailed, zero_reg, Operand(zero_reg)); + + __ bind(&done); } @@ -5156,19 +5163,6 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, *cmp2 = Operand(zero_reg); final_branch_condition = eq; -// clang-format off -#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ - } else if (String::Equals(type_name, factory->type##_string())) { \ - __ JumpIfSmi(input, false_label); \ - __ lw(input, FieldMemOperand(input, HeapObject::kMapOffset)); \ - __ LoadRoot(at, Heap::k##Type##MapRootIndex); \ - *cmp1 = input; \ - *cmp2 = Operand(at); \ - final_branch_condition = eq; - SIMD128_TYPES(SIMD128_TYPE) -#undef SIMD128_TYPE - // clang-format on - } else { *cmp1 = at; *cmp2 = Operand(zero_reg); // Set to valid regs, to avoid caller assertion. diff --git a/deps/v8/src/crankshaft/mips64/lithium-codegen-mips64.cc b/deps/v8/src/crankshaft/mips64/lithium-codegen-mips64.cc index 350cede90b..d32052c5e7 100644 --- a/deps/v8/src/crankshaft/mips64/lithium-codegen-mips64.cc +++ b/deps/v8/src/crankshaft/mips64/lithium-codegen-mips64.cc @@ -279,7 +279,7 @@ bool LCodeGen::GenerateDeferredCode() { DCHECK(!frame_is_built_); DCHECK(info()->IsStub()); frame_is_built_ = true; - __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); + __ li(scratch0(), Operand(StackFrame::TypeToMarker(StackFrame::STUB))); __ PushCommonFrame(scratch0()); Comment(";;; Deferred code"); } @@ -347,7 +347,7 @@ bool LCodeGen::GenerateJumpTable() { // This variant of deopt can only be used with stubs. Since we don't // have a function pointer to install in the stack frame that we're // building, install a special marker there instead. - __ li(at, Operand(Smi::FromInt(StackFrame::STUB))); + __ li(at, Operand(StackFrame::TypeToMarker(StackFrame::STUB))); __ push(at); DCHECK(info()->IsStub()); } @@ -2146,14 +2146,6 @@ void LCodeGen::DoBranch(LBranch* instr) { __ Branch(instr->TrueLabel(chunk_), eq, scratch, Operand(SYMBOL_TYPE)); } - if (expected & ToBooleanHint::kSimdValue) { - // SIMD value -> true. - const Register scratch = scratch1(); - __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); - __ Branch(instr->TrueLabel(chunk_), eq, scratch, - Operand(SIMD128_VALUE_TYPE)); - } - if (expected & ToBooleanHint::kHeapNumber) { // heap number -> false iff +0, -0, or NaN. DoubleRegister dbl_scratch = double_scratch0(); @@ -3056,7 +3048,8 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { __ ld(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ ld(result, MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset)); - __ Xor(temp, result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + __ Xor(temp, result, + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); // Result is the frame pointer for the frame if not adapted and for the real // frame below the adaptor frame if adapted. @@ -3692,7 +3685,7 @@ void LCodeGen::PrepareForTailCall(const ParameterCount& actual, __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ ld(scratch3, MemOperand(scratch2, StandardFrameConstants::kContextOffset)); __ Branch(&no_arguments_adaptor, ne, scratch3, - Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); // Drop current frame and load arguments count from arguments adaptor frame. __ mov(fp, scratch2); @@ -4959,6 +4952,13 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) { void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { + Label deopt, done; + // If the map is not deprecated the migration attempt does not make sense. + __ ld(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset)); + __ lwu(scratch0(), FieldMemOperand(scratch0(), Map::kBitField3Offset)); + __ And(at, scratch0(), Operand(Map::Deprecated::kMask)); + __ Branch(&deopt, eq, at, Operand(zero_reg)); + { PushSafepointRegistersScope scope(this); __ push(object); @@ -4969,8 +4969,15 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { __ StoreToSafepointRegisterSlot(v0, scratch0()); } __ SmiTst(scratch0(), at); - DeoptimizeIf(eq, instr, DeoptimizeReason::kInstanceMigrationFailed, at, + __ Branch(&done, ne, at, Operand(zero_reg)); + + __ bind(&deopt); + // In case of "al" condition the operands are not used so just pass zero_reg + // there. + DeoptimizeIf(al, instr, DeoptimizeReason::kInstanceMigrationFailed, zero_reg, Operand(zero_reg)); + + __ bind(&done); } @@ -5349,20 +5356,6 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, *cmp2 = Operand(zero_reg); final_branch_condition = eq; -// clang-format off -#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ - } else if (String::Equals(type_name, factory->type##_string())) { \ - __ JumpIfSmi(input, false_label); \ - __ ld(input, FieldMemOperand(input, HeapObject::kMapOffset)); \ - __ LoadRoot(at, Heap::k##Type##MapRootIndex); \ - *cmp1 = input; \ - *cmp2 = Operand(at); \ - final_branch_condition = eq; - SIMD128_TYPES(SIMD128_TYPE) -#undef SIMD128_TYPE - // clang-format on - - } else { *cmp1 = at; *cmp2 = Operand(zero_reg); // Set to valid regs, to avoid caller assertion. diff --git a/deps/v8/src/crankshaft/ppc/lithium-codegen-ppc.cc b/deps/v8/src/crankshaft/ppc/lithium-codegen-ppc.cc index 1450a714c4..f930611b14 100644 --- a/deps/v8/src/crankshaft/ppc/lithium-codegen-ppc.cc +++ b/deps/v8/src/crankshaft/ppc/lithium-codegen-ppc.cc @@ -287,7 +287,7 @@ bool LCodeGen::GenerateDeferredCode() { DCHECK(!frame_is_built_); DCHECK(info()->IsStub()); frame_is_built_ = true; - __ LoadSmiLiteral(scratch0(), Smi::FromInt(StackFrame::STUB)); + __ mov(scratch0(), Operand(StackFrame::TypeToMarker(StackFrame::STUB))); __ PushCommonFrame(scratch0()); Comment(";;; Deferred code"); } @@ -356,7 +356,7 @@ bool LCodeGen::GenerateJumpTable() { // This variant of deopt can only be used with stubs. Since we don't // have a function pointer to install in the stack frame that we're // building, install a special marker there instead. - __ LoadSmiLiteral(ip, Smi::FromInt(StackFrame::STUB)); + __ mov(ip, Operand(StackFrame::TypeToMarker(StackFrame::STUB))); __ push(ip); DCHECK(info()->IsStub()); } @@ -1707,12 +1707,15 @@ void LCodeGen::DoSubI(LSubI* instr) { } else { __ sub(result, left, EmitLoadRegister(right, ip)); } -#if V8_TARGET_ARCH_PPC64 if (can_overflow) { +#if V8_TARGET_ARCH_PPC64 __ TestIfInt32(result, r0); +#else + __ TestIfInt32(scratch0(), result, r0); +#endif DeoptimizeIf(ne, instr, DeoptimizeReason::kOverflow); } -#endif + } else { if (right->IsConstantOperand()) { __ AddAndCheckForOverflow(result, left, -(ToOperand(right).immediate()), @@ -2203,13 +2206,6 @@ void LCodeGen::DoBranch(LBranch* instr) { __ beq(instr->TrueLabel(chunk_)); } - if (expected & ToBooleanHint::kSimdValue) { - // SIMD value -> true. - Label not_simd; - __ CompareInstanceType(map, ip, SIMD128_VALUE_TYPE); - __ beq(instr->TrueLabel(chunk_)); - } - if (expected & ToBooleanHint::kHeapNumber) { // heap number -> false iff +0, -0, or NaN. Label not_heap_number; @@ -3133,7 +3129,8 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { __ LoadP( result, MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset)); - __ CmpSmiLiteral(result, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); + __ cmpi(result, + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); // Result is the frame pointer for the frame if not adapted and for the real // frame below the adaptor frame if adapted. @@ -3771,7 +3768,8 @@ void LCodeGen::PrepareForTailCall(const ParameterCount& actual, __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ LoadP(scratch3, MemOperand(scratch2, StandardFrameConstants::kContextOffset)); - __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); + __ cmpi(scratch3, + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); __ bne(&no_arguments_adaptor); // Drop current frame and load arguments count from arguments adaptor frame. @@ -5064,6 +5062,13 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) { void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { Register temp = ToRegister(instr->temp()); + Label deopt, done; + // If the map is not deprecated the migration attempt does not make sense. + __ LoadP(temp, FieldMemOperand(object, HeapObject::kMapOffset)); + __ lwz(temp, FieldMemOperand(temp, Map::kBitField3Offset)); + __ TestBitMask(temp, Map::Deprecated::kMask, r0); + __ beq(&deopt, cr0); + { PushSafepointRegistersScope scope(this); __ push(object); @@ -5074,7 +5079,13 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { __ StoreToSafepointRegisterSlot(r3, temp); } __ TestIfSmi(temp, r0); - DeoptimizeIf(eq, instr, DeoptimizeReason::kInstanceMigrationFailed, cr0); + __ bne(&done, cr0); + + __ bind(&deopt); + // In case of "al" condition the operand is not used so just pass cr0 there. + DeoptimizeIf(al, instr, DeoptimizeReason::kInstanceMigrationFailed, cr0); + + __ bind(&done); } @@ -5426,17 +5437,6 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, Label* false_label, __ cmpi(r0, Operand::Zero()); final_branch_condition = eq; -// clang-format off -#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ - } else if (String::Equals(type_name, factory->type##_string())) { \ - __ JumpIfSmi(input, false_label); \ - __ LoadP(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); \ - __ CompareRoot(scratch, Heap::k##Type##MapRootIndex); \ - final_branch_condition = eq; - SIMD128_TYPES(SIMD128_TYPE) -#undef SIMD128_TYPE - // clang-format on - } else { __ b(false_label); } diff --git a/deps/v8/src/crankshaft/s390/lithium-codegen-s390.cc b/deps/v8/src/crankshaft/s390/lithium-codegen-s390.cc index 7bbc917bc6..02c6b6f7fa 100644 --- a/deps/v8/src/crankshaft/s390/lithium-codegen-s390.cc +++ b/deps/v8/src/crankshaft/s390/lithium-codegen-s390.cc @@ -275,7 +275,8 @@ bool LCodeGen::GenerateDeferredCode() { DCHECK(!frame_is_built_); DCHECK(info()->IsStub()); frame_is_built_ = true; - __ LoadSmiLiteral(scratch0(), Smi::FromInt(StackFrame::STUB)); + __ Load(scratch0(), + Operand(StackFrame::TypeToMarker(StackFrame::STUB))); __ PushCommonFrame(scratch0()); Comment(";;; Deferred code"); } @@ -344,7 +345,7 @@ bool LCodeGen::GenerateJumpTable() { // have a function pointer to install in the stack frame that we're // building, install a special marker there instead. DCHECK(info()->IsStub()); - __ LoadSmiLiteral(ip, Smi::FromInt(StackFrame::STUB)); + __ Load(ip, Operand(StackFrame::TypeToMarker(StackFrame::STUB))); __ push(ip); DCHECK(info()->IsStub()); } @@ -1698,10 +1699,17 @@ void LCodeGen::DoSubI(LSubI* instr) { #endif if (right->IsConstantOperand()) { - if (!isInteger || !checkOverflow) + if (!isInteger || !checkOverflow) { __ SubP(ToRegister(result), ToRegister(left), ToOperand(right)); - else - __ Sub32(ToRegister(result), ToRegister(left), ToOperand(right)); + } else { + // -(MinInt) will overflow + if (ToInteger32(LConstantOperand::cast(right)) == kMinInt) { + __ Load(scratch0(), ToOperand(right)); + __ Sub32(ToRegister(result), ToRegister(left), scratch0()); + } else { + __ Sub32(ToRegister(result), ToRegister(left), ToOperand(right)); + } + } } else if (right->IsRegister()) { if (!isInteger) __ SubP(ToRegister(result), ToRegister(left), ToRegister(right)); @@ -2202,13 +2210,6 @@ void LCodeGen::DoBranch(LBranch* instr) { __ beq(instr->TrueLabel(chunk_)); } - if (expected & ToBooleanHint::kSimdValue) { - // SIMD value -> true. - Label not_simd; - __ CompareInstanceType(map, ip, SIMD128_VALUE_TYPE); - __ beq(instr->TrueLabel(chunk_)); - } - if (expected & ToBooleanHint::kHeapNumber) { // heap number -> false iff +0, -0, or NaN. Label not_heap_number; @@ -3095,8 +3096,8 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { __ LoadP( result, MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset)); - __ LoadSmiLiteral(r0, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); - __ CmpP(result, r0); + __ CmpP(result, + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); // Result is the frame pointer for the frame if not adapted and for the real // frame below the adaptor frame if adapted. @@ -3673,7 +3674,8 @@ void LCodeGen::PrepareForTailCall(const ParameterCount& actual, __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); __ LoadP(scratch3, MemOperand(scratch2, StandardFrameConstants::kContextOffset)); - __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); + __ CmpP(scratch3, + Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); __ bne(&no_arguments_adaptor); // Drop current frame and load arguments count from arguments adaptor frame. @@ -5005,6 +5007,13 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) { void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { Register temp = ToRegister(instr->temp()); + Label deopt, done; + // If the map is not deprecated the migration attempt does not make sense. + __ LoadP(temp, FieldMemOperand(object, HeapObject::kMapOffset)); + __ LoadlW(temp, FieldMemOperand(temp, Map::kBitField3Offset)); + __ TestBitMask(temp, Map::Deprecated::kMask, r0); + __ beq(&deopt); + { PushSafepointRegistersScope scope(this); __ push(object); @@ -5015,7 +5024,13 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { __ StoreToSafepointRegisterSlot(r2, temp); } __ TestIfSmi(temp); - DeoptimizeIf(eq, instr, DeoptimizeReason::kInstanceMigrationFailed, cr0); + __ bne(&done); + + __ bind(&deopt); + // In case of "al" condition the operand is not used so just pass cr0 there. + DeoptimizeIf(al, instr, DeoptimizeReason::kInstanceMigrationFailed, cr0); + + __ bind(&done); } void LCodeGen::DoCheckMaps(LCheckMaps* instr) { @@ -5364,17 +5379,6 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, Label* false_label, __ CmpP(r0, Operand::Zero()); final_branch_condition = eq; -// clang-format off -#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ - } else if (String::Equals(type_name, factory->type##_string())) { \ - __ JumpIfSmi(input, false_label); \ - __ LoadP(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); \ - __ CompareRoot(scratch, Heap::k##Type##MapRootIndex); \ - final_branch_condition = eq; - SIMD128_TYPES(SIMD128_TYPE) -#undef SIMD128_TYPE - // clang-format on - } else { __ b(false_label); } diff --git a/deps/v8/src/crankshaft/typing.cc b/deps/v8/src/crankshaft/typing.cc index bbf629d45e..9713e4fd6f 100644 --- a/deps/v8/src/crankshaft/typing.cc +++ b/deps/v8/src/crankshaft/typing.cc @@ -405,7 +405,7 @@ void AstTyper::VisitObjectLiteral(ObjectLiteral* expr) { prop->key()->AsLiteral()->value()->IsInternalizedString() && prop->emit_store()) { // Record type feed back for the property. - FeedbackVectorSlot slot = prop->GetSlot(); + FeedbackSlot slot = prop->GetSlot(); SmallMapList maps; oracle()->CollectReceiverTypes(slot, &maps); prop->set_receiver_type(maps.length() == 1 ? maps.at(0) @@ -435,7 +435,7 @@ void AstTyper::VisitAssignment(Assignment* expr) { // Collect type feedback. Property* prop = expr->target()->AsProperty(); if (prop != NULL) { - FeedbackVectorSlot slot = expr->AssignmentSlot(); + FeedbackSlot slot = expr->AssignmentSlot(); expr->set_is_uninitialized(oracle()->StoreIsUninitialized(slot)); if (!expr->IsUninitialized()) { SmallMapList* receiver_types = expr->GetReceiverTypes(); @@ -486,7 +486,7 @@ void AstTyper::VisitThrow(Throw* expr) { void AstTyper::VisitProperty(Property* expr) { // Collect type feedback. - FeedbackVectorSlot slot = expr->PropertyFeedbackSlot(); + FeedbackSlot slot = expr->PropertyFeedbackSlot(); expr->set_inline_cache_state(oracle()->LoadInlineCacheState(slot)); if (!expr->IsUninitialized()) { @@ -515,7 +515,7 @@ void AstTyper::VisitProperty(Property* expr) { void AstTyper::VisitCall(Call* expr) { // Collect type feedback. RECURSE(Visit(expr->expression())); - FeedbackVectorSlot slot = expr->CallFeedbackICSlot(); + FeedbackSlot slot = expr->CallFeedbackICSlot(); bool is_uninitialized = oracle()->CallIsUninitialized(slot); if (!expr->expression()->IsProperty() && oracle()->CallIsMonomorphic(slot)) { expr->set_target(oracle()->GetCallTarget(slot)); @@ -541,8 +541,7 @@ void AstTyper::VisitCall(Call* expr) { void AstTyper::VisitCallNew(CallNew* expr) { // Collect type feedback. - FeedbackVectorSlot allocation_site_feedback_slot = - expr->CallNewFeedbackSlot(); + FeedbackSlot allocation_site_feedback_slot = expr->CallNewFeedbackSlot(); expr->set_allocation_site( oracle()->GetCallNewAllocationSite(allocation_site_feedback_slot)); bool monomorphic = @@ -602,7 +601,7 @@ void AstTyper::VisitUnaryOperation(UnaryOperation* expr) { void AstTyper::VisitCountOperation(CountOperation* expr) { // Collect type feedback. - FeedbackVectorSlot slot = expr->CountSlot(); + FeedbackSlot slot = expr->CountSlot(); KeyedAccessStoreMode store_mode; IcCheckType key_type; oracle()->GetStoreModeAndKeyType(slot, &store_mode, &key_type); diff --git a/deps/v8/src/crankshaft/x64/lithium-codegen-x64.cc b/deps/v8/src/crankshaft/x64/lithium-codegen-x64.cc index f09af7136e..65816a1b69 100644 --- a/deps/v8/src/crankshaft/x64/lithium-codegen-x64.cc +++ b/deps/v8/src/crankshaft/x64/lithium-codegen-x64.cc @@ -13,6 +13,7 @@ #include "src/crankshaft/hydrogen-osr.h" #include "src/ic/ic.h" #include "src/ic/stub-cache.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { @@ -337,7 +338,8 @@ bool LCodeGen::GenerateJumpTable() { // have a function pointer to install in the stack frame that we're // building, install a special marker there instead. DCHECK(info()->IsStub()); - __ Move(MemOperand(rsp, 2 * kPointerSize), Smi::FromInt(StackFrame::STUB)); + __ movp(MemOperand(rsp, 2 * kPointerSize), + Immediate(StackFrame::TypeToMarker(StackFrame::STUB))); /* stack layout 3: old rbp @@ -375,7 +377,7 @@ bool LCodeGen::GenerateDeferredCode() { frame_is_built_ = true; // Build the frame in such a way that esi isn't trashed. __ pushq(rbp); // Caller's frame pointer. - __ Push(Smi::FromInt(StackFrame::STUB)); + __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::STUB))); __ leap(rbp, Operand(rsp, TypedFrameConstants::kFixedFrameSizeFromFp)); Comment(";;; Deferred code"); } @@ -2065,12 +2067,6 @@ void LCodeGen::DoBranch(LBranch* instr) { __ j(equal, instr->TrueLabel(chunk_)); } - if (expected & ToBooleanHint::kSimdValue) { - // SIMD value -> true. - __ CmpInstanceType(map, SIMD128_VALUE_TYPE); - __ j(equal, instr->TrueLabel(chunk_)); - } - if (expected & ToBooleanHint::kHeapNumber) { // heap number -> false iff +0, -0, or NaN. Label not_heap_number; @@ -2887,8 +2883,8 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { // Check for arguments adapter frame. Label done, adapted; __ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); - __ Cmp(Operand(result, CommonFrameConstants::kContextOrFrameTypeOffset), - Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); + __ cmpp(Operand(result, CommonFrameConstants::kContextOrFrameTypeOffset), + Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); __ j(equal, &adapted, Label::kNear); // No arguments adaptor frame. @@ -3563,8 +3559,8 @@ void LCodeGen::PrepareForTailCall(const ParameterCount& actual, Register caller_args_count_reg = scratch1; Label no_arguments_adaptor, formal_parameter_count_loaded; __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); - __ Cmp(Operand(scratch2, StandardFrameConstants::kContextOffset), - Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); + __ cmpp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset), + Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); __ j(not_equal, &no_arguments_adaptor, Label::kNear); // Drop current frame and load arguments count from arguments adaptor frame. @@ -4817,9 +4813,19 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) { void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { + Label deopt, done; + // If the map is not deprecated the migration attempt does not make sense. + __ Push(object); + __ movp(object, FieldOperand(object, HeapObject::kMapOffset)); + __ testl(FieldOperand(object, Map::kBitField3Offset), + Immediate(Map::Deprecated::kMask)); + __ Pop(object); + __ j(zero, &deopt); + { PushSafepointRegistersScope scope(this); __ Push(object); + __ Set(rsi, 0); __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance); RecordSafepointWithRegisters( @@ -4827,7 +4833,12 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { __ testp(rax, Immediate(kSmiTagMask)); } - DeoptimizeIf(zero, instr, DeoptimizeReason::kInstanceMigrationFailed); + __ j(not_zero, &done); + + __ bind(&deopt); + DeoptimizeIf(always, instr, DeoptimizeReason::kInstanceMigrationFailed); + + __ bind(&done); } @@ -5180,17 +5191,6 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); final_branch_condition = zero; -// clang-format off -#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ - } else if (String::Equals(type_name, factory->type##_string())) { \ - __ JumpIfSmi(input, false_label, false_distance); \ - __ CompareRoot(FieldOperand(input, HeapObject::kMapOffset), \ - Heap::k##Type##MapRootIndex); \ - final_branch_condition = equal; - SIMD128_TYPES(SIMD128_TYPE) -#undef SIMD128_TYPE - // clang-format on - } else { __ jmp(false_label, false_distance); } diff --git a/deps/v8/src/crankshaft/x64/lithium-gap-resolver-x64.cc b/deps/v8/src/crankshaft/x64/lithium-gap-resolver-x64.cc index 94dffb333a..38b7d4525a 100644 --- a/deps/v8/src/crankshaft/x64/lithium-gap-resolver-x64.cc +++ b/deps/v8/src/crankshaft/x64/lithium-gap-resolver-x64.cc @@ -7,6 +7,7 @@ #include "src/crankshaft/x64/lithium-gap-resolver-x64.h" #include "src/crankshaft/x64/lithium-codegen-x64.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/x64/lithium-x64.cc b/deps/v8/src/crankshaft/x64/lithium-x64.cc index bc9040b94c..d0671e9d41 100644 --- a/deps/v8/src/crankshaft/x64/lithium-x64.cc +++ b/deps/v8/src/crankshaft/x64/lithium-x64.cc @@ -11,6 +11,7 @@ #include "src/crankshaft/hydrogen-osr.h" #include "src/crankshaft/lithium-inl.h" #include "src/crankshaft/x64/lithium-codegen-x64.h" +#include "src/objects-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc b/deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc index 9c932bc6ae..f526a19603 100644 --- a/deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc +++ b/deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc @@ -2198,12 +2198,6 @@ void LCodeGen::DoBranch(LBranch* instr) { __ j(equal, instr->TrueLabel(chunk_)); } - if (expected & ToBooleanHint::kSimdValue) { - // SIMD value -> true. - __ CmpInstanceType(map, SIMD128_VALUE_TYPE); - __ j(equal, instr->TrueLabel(chunk_)); - } - if (expected & ToBooleanHint::kHeapNumber) { // heap number -> false iff +0, -0, or NaN. Label not_heap_number; @@ -4946,6 +4940,15 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) { void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { + Label deopt, done; + // If the map is not deprecated the migration attempt does not make sense. + __ push(object); + __ mov(object, FieldOperand(object, HeapObject::kMapOffset)); + __ test(FieldOperand(object, Map::kBitField3Offset), + Immediate(Map::Deprecated::kMask)); + __ pop(object); + __ j(zero, &deopt); + { PushSafepointRegistersScope scope(this); __ push(object); @@ -4956,7 +4959,12 @@ void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { __ test(eax, Immediate(kSmiTagMask)); } - DeoptimizeIf(zero, instr, DeoptimizeReason::kInstanceMigrationFailed); + __ j(not_zero, &done); + + __ bind(&deopt); + DeoptimizeIf(no_condition, instr, DeoptimizeReason::kInstanceMigrationFailed); + + __ bind(&done); } @@ -5392,17 +5400,6 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) { Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); final_branch_condition = zero; -// clang-format off -#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ - } else if (String::Equals(type_name, factory()->type##_string())) { \ - __ JumpIfSmi(input, false_label, false_distance); \ - __ cmp(FieldOperand(input, HeapObject::kMapOffset), \ - factory()->type##_map()); \ - final_branch_condition = equal; - SIMD128_TYPES(SIMD128_TYPE) -#undef SIMD128_TYPE - // clang-format on - } else { __ jmp(false_label, false_distance); } |