diff options
author | Ben Noordhuis <info@bnoordhuis.nl> | 2013-08-10 16:27:43 +0200 |
---|---|---|
committer | Ben Noordhuis <info@bnoordhuis.nl> | 2013-08-10 16:27:43 +0200 |
commit | f69be329f0d78f19e71ac9e75d6e4ee816e13c97 (patch) | |
tree | 022e53d1aff74dbe50f3984c154461bf6c19efb0 /deps/v8/src/ia32/lithium-codegen-ia32.cc | |
parent | 39aa894035f9e3b58e04ce1a2b598e496e1f6bd6 (diff) | |
download | node-f69be329f0d78f19e71ac9e75d6e4ee816e13c97.tar.gz |
v8: upgrade v8 to 3.20.14.1
Diffstat (limited to 'deps/v8/src/ia32/lithium-codegen-ia32.cc')
-rw-r--r-- | deps/v8/src/ia32/lithium-codegen-ia32.cc | 175 |
1 files changed, 91 insertions, 84 deletions
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc index 3ddad068b..061ec9b6d 100644 --- a/deps/v8/src/ia32/lithium-codegen-ia32.cc +++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc @@ -113,7 +113,7 @@ void LCodeGen::FinishCode(Handle<Code> code) { } -void LCodeGen::Abort(const char* reason) { +void LCodeGen::Abort(BailoutReason reason) { info()->set_bailout_reason(reason); status_ = ABORTED; } @@ -137,6 +137,16 @@ void LCodeGen::Comment(const char* format, ...) { } +#ifdef _MSC_VER +void LCodeGen::MakeSureStackPagesMapped(int offset) { + const int kPageSize = 4 * KB; + for (offset -= kPageSize; offset > 0; offset -= kPageSize) { + __ mov(Operand(esp, offset), eax); + } +} +#endif + + bool LCodeGen::GeneratePrologue() { ASSERT(is_generating()); @@ -210,7 +220,7 @@ bool LCodeGen::GeneratePrologue() { dynamic_frame_alignment_ && FLAG_debug_code) { __ test(esp, Immediate(kPointerSize)); - __ Assert(zero, "frame is expected to be aligned"); + __ Assert(zero, kFrameIsExpectedToBeAligned); } // Reserve space for the stack slots needed by the code. @@ -226,6 +236,9 @@ bool LCodeGen::GeneratePrologue() { } else { if (FLAG_debug_code) { __ sub(Operand(esp), Immediate(slots * kPointerSize)); +#ifdef _MSC_VER + MakeSureStackPagesMapped(slots * kPointerSize); +#endif __ push(eax); __ mov(Operand(eax), Immediate(slots)); Label loop; @@ -238,15 +251,7 @@ bool LCodeGen::GeneratePrologue() { } else { __ sub(Operand(esp), Immediate(slots * kPointerSize)); #ifdef _MSC_VER - // On windows, you may not access the stack more than one page below - // the most recently mapped page. To make the allocated area randomly - // accessible, we write to each page in turn (the value is irrelevant). - const int kPageSize = 4 * KB; - for (int offset = slots * kPointerSize - kPageSize; - offset > 0; - offset -= kPageSize) { - __ mov(Operand(esp, offset), eax); - } + MakeSureStackPagesMapped(slots * kPointerSize); #endif } @@ -877,7 +882,7 @@ void LCodeGen::LoadContextFromDeferred(LOperand* context) { } else if (context->IsConstantOperand()) { HConstant* constant = chunk_->LookupConstant(LConstantOperand::cast(context)); - __ LoadHeapObject(esi, Handle<Context>::cast(constant->handle())); + __ LoadObject(esi, Handle<Object>::cast(constant->handle())); } else { UNREACHABLE(); } @@ -943,7 +948,7 @@ void LCodeGen::DeoptimizeIf(Condition cc, Address entry = Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); if (entry == NULL) { - Abort("bailout was not prepared"); + Abort(kBailoutWasNotPrepared); return; } @@ -1674,8 +1679,9 @@ void LCodeGen::DoBitI(LBitI* instr) { ASSERT(left->IsRegister()); if (right->IsConstantOperand()) { - int right_operand = ToRepresentation(LConstantOperand::cast(right), - instr->hydrogen()->representation()); + int32_t right_operand = + ToRepresentation(LConstantOperand::cast(right), + instr->hydrogen()->representation()); switch (instr->op()) { case Token::BIT_AND: __ and_(ToRegister(left), right_operand); @@ -1684,7 +1690,11 @@ void LCodeGen::DoBitI(LBitI* instr) { __ or_(ToRegister(left), right_operand); break; case Token::BIT_XOR: - __ xor_(ToRegister(left), right_operand); + if (right_operand == int32_t(~0)) { + __ not_(ToRegister(left)); + } else { + __ xor_(ToRegister(left), right_operand); + } break; default: UNREACHABLE(); @@ -1771,7 +1781,9 @@ void LCodeGen::DoShiftI(LShiftI* instr) { if (shift_count != 0) { if (instr->hydrogen_value()->representation().IsSmi() && instr->can_deopt()) { - __ shl(ToRegister(left), shift_count - 1); + if (shift_count != 1) { + __ shl(ToRegister(left), shift_count - 1); + } __ SmiTag(ToRegister(left)); DeoptimizeIf(overflow, instr->environment()); } else { @@ -1969,7 +1981,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; __ cmp(value, Immediate(encoding == String::ONE_BYTE_ENCODING ? one_byte_seq_type : two_byte_seq_type)); - __ Check(equal, "Unexpected string type"); + __ Check(equal, kUnexpectedStringType); __ pop(value); } @@ -1983,13 +1995,6 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { } -void LCodeGen::DoBitNotI(LBitNotI* instr) { - LOperand* input = instr->value(); - ASSERT(input->Equals(instr->result())); - __ not_(ToRegister(input)); -} - - void LCodeGen::DoThrow(LThrow* instr) { __ push(ToOperand(instr->value())); ASSERT(ToRegister(instr->context()).is(esi)); @@ -2856,7 +2861,7 @@ void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) { __ cmp(Operand(esp, (parameter_count + extra_value_count) * kPointerSize), Immediate(kAlignmentZapValue)); - __ Assert(equal, "expected alignment marker"); + __ Assert(equal, kExpectedAlignmentMarker); } __ Ret((parameter_count + extra_value_count) * kPointerSize, ecx); } else { @@ -2869,7 +2874,7 @@ void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) { __ cmp(Operand(esp, reg, times_pointer_size, extra_value_count * kPointerSize), Immediate(kAlignmentZapValue)); - __ Assert(equal, "expected alignment marker"); + __ Assert(equal, kExpectedAlignmentMarker); } // emit code to restore stack based on instr->parameter_count() @@ -2989,20 +2994,6 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { } -void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) { - Register object = ToRegister(instr->object()); - Register temp = ToRegister(instr->temp()); - ExternalReference sites_list_address = instr->GetReference(isolate()); - - __ mov(temp, Immediate(sites_list_address)); - __ mov(temp, Operand(temp, 0)); - __ mov(FieldOperand(object, instr->hydrogen()->store_field().offset()), - temp); - __ mov(temp, Immediate(sites_list_address)); - __ mov(Operand(temp, 0), object); -} - - void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { Register context = ToRegister(instr->context()); Register result = ToRegister(instr->result()); @@ -3163,9 +3154,6 @@ static bool CompactEmit(SmallMapList* list, int i, Isolate* isolate) { Handle<Map> map = list->at(i); - // If the map has ElementsKind transitions, we will generate map checks - // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS). - if (map->HasElementsTransition()) return false; LookupResult lookup(isolate); map->LookupDescriptor(NULL, *name, &lookup); return lookup.IsField() || lookup.IsConstant(); @@ -3457,7 +3445,7 @@ Operand LCodeGen::BuildFastArrayOperand( if (key->IsConstantOperand()) { int constant_value = ToInteger32(LConstantOperand::cast(key)); if (constant_value & 0xF0000000) { - Abort("array index constant value too big"); + Abort(kArrayIndexConstantValueTooBig); } return Operand(elements_pointer_reg, ((constant_value + additional_index) << shift_size) @@ -3831,7 +3819,7 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) { __ xorps(scratch, scratch); __ subsd(scratch, input_reg); __ pand(input_reg, scratch); - } else if (r.IsInteger32()) { + } else if (r.IsSmiOrInteger32()) { EmitIntegerMathAbs(instr); } else { // Tagged case. DeferredMathAbsTaggedHeapNumber* deferred = @@ -4357,6 +4345,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { int offset = access.offset(); if (access.IsExternalMemory()) { + ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); MemOperand operand = instr->object()->IsConstantOperand() ? MemOperand::StaticVariable( ToExternalReference(LConstantOperand::cast(instr->object()))) @@ -4876,13 +4865,6 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) { } -void LCodeGen::DoStringLength(LStringLength* instr) { - Register string = ToRegister(instr->string()); - Register result = ToRegister(instr->result()); - __ mov(result, FieldOperand(string, String::kLengthOffset)); -} - - void LCodeGen::DoStringAdd(LStringAdd* instr) { EmitPushTaggedOperand(instr->left()); EmitPushTaggedOperand(instr->right()); @@ -5809,31 +5791,68 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) { } -void LCodeGen::DoCheckMapCommon(Register reg, - Handle<Map> map, - LInstruction* instr) { - Label success; - __ CompareMap(reg, map, &success); - DeoptimizeIf(not_equal, instr->environment()); - __ bind(&success); +void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { + { + PushSafepointRegistersScope scope(this); + __ push(object); + __ xor_(esi, esi); + __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance); + RecordSafepointWithRegisters( + instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); + + __ test(eax, Immediate(kSmiTagMask)); + } + DeoptimizeIf(zero, instr->environment()); } void LCodeGen::DoCheckMaps(LCheckMaps* instr) { + class DeferredCheckMaps: public LDeferredCode { + public: + DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object) + : LDeferredCode(codegen), instr_(instr), object_(object) { + SetExit(check_maps()); + } + virtual void Generate() { + codegen()->DoDeferredInstanceMigration(instr_, object_); + } + Label* check_maps() { return &check_maps_; } + virtual LInstruction* instr() { return instr_; } + private: + LCheckMaps* instr_; + Label check_maps_; + Register object_; + }; + if (instr->hydrogen()->CanOmitMapChecks()) return; + LOperand* input = instr->value(); ASSERT(input->IsRegister()); Register reg = ToRegister(input); - Label success; SmallMapList* map_set = instr->hydrogen()->map_set(); + + DeferredCheckMaps* deferred = NULL; + if (instr->hydrogen()->has_migration_target()) { + deferred = new(zone()) DeferredCheckMaps(this, instr, reg); + __ bind(deferred->check_maps()); + } + + Label success; for (int i = 0; i < map_set->length() - 1; i++) { Handle<Map> map = map_set->at(i); __ CompareMap(reg, map, &success); __ j(equal, &success); } + Handle<Map> map = map_set->last(); - DoCheckMapCommon(reg, map, instr); + __ CompareMap(reg, map, &success); + if (instr->hydrogen()->has_migration_target()) { + __ j(not_equal, deferred->entry()); + } else { + DeoptimizeIf(not_equal, instr->environment()); + } + __ bind(&success); } @@ -6010,22 +6029,6 @@ void LCodeGen::DoClampTToUint8NoSSE2(LClampTToUint8NoSSE2* instr) { } -void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { - if (instr->hydrogen()->CanOmitPrototypeChecks()) return; - Register reg = ToRegister(instr->temp()); - - ZoneList<Handle<JSObject> >* prototypes = instr->prototypes(); - ZoneList<Handle<Map> >* maps = instr->maps(); - - ASSERT(prototypes->length() == maps->length()); - - for (int i = 0; i < prototypes->length(); i++) { - __ LoadHeapObject(reg, prototypes->at(i)); - DoCheckMapCommon(reg, maps->at(i), instr); - } -} - - void LCodeGen::DoAllocate(LAllocate* instr) { class DeferredAllocate: public LDeferredCode { public: @@ -6048,10 +6051,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) { if (instr->hydrogen()->MustAllocateDoubleAligned()) { flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); } - if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { - ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); + if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { + ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); + ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); - } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { + } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { + ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); } @@ -6103,11 +6108,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) { __ push(Immediate(Smi::FromInt(size))); } - if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { - ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); + if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { + ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); + ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); CallRuntimeFromDeferred( Runtime::kAllocateInOldPointerSpace, 1, instr, instr->context()); - } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { + } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { + ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); CallRuntimeFromDeferred( Runtime::kAllocateInOldDataSpace, 1, instr, instr->context()); } else { |