diff options
Diffstat (limited to 'deps/v8/src/ia32')
-rw-r--r-- | deps/v8/src/ia32/assembler-ia32-inl.h | 2 | ||||
-rw-r--r-- | deps/v8/src/ia32/assembler-ia32.cc | 80 | ||||
-rw-r--r-- | deps/v8/src/ia32/assembler-ia32.h | 33 | ||||
-rw-r--r-- | deps/v8/src/ia32/builtins-ia32.cc | 41 | ||||
-rw-r--r-- | deps/v8/src/ia32/codegen-ia32.cc | 338 | ||||
-rw-r--r-- | deps/v8/src/ia32/codegen-ia32.h | 62 | ||||
-rw-r--r-- | deps/v8/src/ia32/disasm-ia32.cc | 40 | ||||
-rw-r--r-- | deps/v8/src/ia32/fast-codegen-ia32.cc | 1251 | ||||
-rw-r--r-- | deps/v8/src/ia32/frames-ia32.cc | 13 | ||||
-rw-r--r-- | deps/v8/src/ia32/frames-ia32.h | 2 | ||||
-rw-r--r-- | deps/v8/src/ia32/macro-assembler-ia32.cc | 102 | ||||
-rw-r--r-- | deps/v8/src/ia32/macro-assembler-ia32.h | 19 | ||||
-rw-r--r-- | deps/v8/src/ia32/regexp-macro-assembler-ia32.cc | 12 | ||||
-rw-r--r-- | deps/v8/src/ia32/register-allocator-ia32.cc | 4 | ||||
-rw-r--r-- | deps/v8/src/ia32/simulator-ia32.h | 9 | ||||
-rw-r--r-- | deps/v8/src/ia32/stub-cache-ia32.cc | 42 | ||||
-rw-r--r-- | deps/v8/src/ia32/virtual-frame-ia32.cc | 18 |
17 files changed, 1511 insertions, 557 deletions
diff --git a/deps/v8/src/ia32/assembler-ia32-inl.h b/deps/v8/src/ia32/assembler-ia32-inl.h index 5fa75ec8d7..69f2a8da3b 100644 --- a/deps/v8/src/ia32/assembler-ia32-inl.h +++ b/deps/v8/src/ia32/assembler-ia32-inl.h @@ -89,7 +89,7 @@ Object* RelocInfo::target_object() { } -Handle<Object> RelocInfo::target_object_handle(Assembler *origin) { +Handle<Object> RelocInfo::target_object_handle(Assembler* origin) { ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); return Memory::Object_Handle_at(pc_); } diff --git a/deps/v8/src/ia32/assembler-ia32.cc b/deps/v8/src/ia32/assembler-ia32.cc index 698377a0c8..d6f555082a 100644 --- a/deps/v8/src/ia32/assembler-ia32.cc +++ b/deps/v8/src/ia32/assembler-ia32.cc @@ -49,6 +49,7 @@ namespace internal { // Safe default is no features. uint64_t CpuFeatures::supported_ = 0; uint64_t CpuFeatures::enabled_ = 0; +uint64_t CpuFeatures::found_by_runtime_probing_ = 0; // The Probe method needs executable memory, so it uses Heap::CreateCode. @@ -56,7 +57,10 @@ uint64_t CpuFeatures::enabled_ = 0; void CpuFeatures::Probe() { ASSERT(Heap::HasBeenSetup()); ASSERT(supported_ == 0); - if (Serializer::enabled()) return; // No features if we might serialize. + if (Serializer::enabled()) { + supported_ |= OS::CpuFeaturesImpliedByPlatform(); + return; // No features if we might serialize. + } Assembler assm(NULL, 0); Label cpuid, done; @@ -124,6 +128,10 @@ void CpuFeatures::Probe() { typedef uint64_t (*F0)(); F0 probe = FUNCTION_CAST<F0>(Code::cast(code)->entry()); supported_ = probe(); + found_by_runtime_probing_ = supported_; + uint64_t os_guarantees = OS::CpuFeaturesImpliedByPlatform(); + supported_ |= os_guarantees; + found_by_runtime_probing_ &= ~os_guarantees; } @@ -360,7 +368,7 @@ void Assembler::Align(int m) { void Assembler::cpuid() { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::CPUID)); + ASSERT(CpuFeatures::IsEnabled(CPUID)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x0F); @@ -712,7 +720,7 @@ void Assembler::movzx_w(Register dst, const Operand& src) { void Assembler::cmov(Condition cc, Register dst, int32_t imm32) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::CMOV)); + ASSERT(CpuFeatures::IsEnabled(CMOV)); EnsureSpace ensure_space(this); last_pc_ = pc_; UNIMPLEMENTED(); @@ -723,7 +731,7 @@ void Assembler::cmov(Condition cc, Register dst, int32_t imm32) { void Assembler::cmov(Condition cc, Register dst, Handle<Object> handle) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::CMOV)); + ASSERT(CpuFeatures::IsEnabled(CMOV)); EnsureSpace ensure_space(this); last_pc_ = pc_; UNIMPLEMENTED(); @@ -734,7 +742,7 @@ void Assembler::cmov(Condition cc, Register dst, Handle<Object> handle) { void Assembler::cmov(Condition cc, Register dst, const Operand& src) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::CMOV)); + ASSERT(CpuFeatures::IsEnabled(CMOV)); EnsureSpace ensure_space(this); last_pc_ = pc_; // Opcode: 0f 40 + cc /r @@ -1083,7 +1091,7 @@ void Assembler::sar(Register dst, uint8_t imm8) { } -void Assembler::sar(Register dst) { +void Assembler::sar_cl(Register dst) { EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xD3); @@ -1123,7 +1131,7 @@ void Assembler::shl(Register dst, uint8_t imm8) { } -void Assembler::shl(Register dst) { +void Assembler::shl_cl(Register dst) { EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xD3); @@ -1144,24 +1152,21 @@ void Assembler::shr(Register dst, uint8_t imm8) { EnsureSpace ensure_space(this); last_pc_ = pc_; ASSERT(is_uint5(imm8)); // illegal shift count - EMIT(0xC1); - EMIT(0xE8 | dst.code()); - EMIT(imm8); -} - - -void Assembler::shr(Register dst) { - EnsureSpace ensure_space(this); - last_pc_ = pc_; - EMIT(0xD3); - EMIT(0xE8 | dst.code()); + if (imm8 == 1) { + EMIT(0xD1); + EMIT(0xE8 | dst.code()); + } else { + EMIT(0xC1); + EMIT(0xE8 | dst.code()); + EMIT(imm8); + } } void Assembler::shr_cl(Register dst) { EnsureSpace ensure_space(this); last_pc_ = pc_; - EMIT(0xD1); + EMIT(0xD3); EMIT(0xE8 | dst.code()); } @@ -1316,7 +1321,7 @@ void Assembler::nop() { void Assembler::rdtsc() { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::RDTSC)); + ASSERT(CpuFeatures::IsEnabled(RDTSC)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x0F); @@ -1662,7 +1667,7 @@ void Assembler::fistp_s(const Operand& adr) { void Assembler::fisttp_s(const Operand& adr) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE3)); + ASSERT(CpuFeatures::IsEnabled(SSE3)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xDB); @@ -1923,7 +1928,7 @@ void Assembler::setcc(Condition cc, Register reg) { void Assembler::cvttss2si(Register dst, const Operand& src) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE2)); + ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF3); @@ -1934,7 +1939,7 @@ void Assembler::cvttss2si(Register dst, const Operand& src) { void Assembler::cvttsd2si(Register dst, const Operand& src) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE2)); + ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -1945,7 +1950,7 @@ void Assembler::cvttsd2si(Register dst, const Operand& src) { void Assembler::cvtsi2sd(XMMRegister dst, const Operand& src) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE2)); + ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -1956,7 +1961,7 @@ void Assembler::cvtsi2sd(XMMRegister dst, const Operand& src) { void Assembler::addsd(XMMRegister dst, XMMRegister src) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE2)); + ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -1967,7 +1972,7 @@ void Assembler::addsd(XMMRegister dst, XMMRegister src) { void Assembler::mulsd(XMMRegister dst, XMMRegister src) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE2)); + ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -1978,7 +1983,7 @@ void Assembler::mulsd(XMMRegister dst, XMMRegister src) { void Assembler::subsd(XMMRegister dst, XMMRegister src) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE2)); + ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -1989,7 +1994,7 @@ void Assembler::subsd(XMMRegister dst, XMMRegister src) { void Assembler::divsd(XMMRegister dst, XMMRegister src) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE2)); + ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -2000,7 +2005,7 @@ void Assembler::divsd(XMMRegister dst, XMMRegister src) { void Assembler::comisd(XMMRegister dst, XMMRegister src) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE2)); + ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2025,7 +2030,7 @@ void Assembler::movdbl(const Operand& dst, XMMRegister src) { void Assembler::movsd(const Operand& dst, XMMRegister src ) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE2)); + ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); // double @@ -2036,7 +2041,7 @@ void Assembler::movsd(const Operand& dst, XMMRegister src ) { void Assembler::movsd(XMMRegister dst, const Operand& src) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE2)); + ASSERT(CpuFeatures::IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); // double @@ -2245,10 +2250,15 @@ void Assembler::dd(uint32_t data, RelocInfo::Mode reloc_info) { void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { ASSERT(rmode != RelocInfo::NONE); // Don't record external references unless the heap will be serialized. - if (rmode == RelocInfo::EXTERNAL_REFERENCE && - !Serializer::enabled() && - !FLAG_debug_code) { - return; + if (rmode == RelocInfo::EXTERNAL_REFERENCE) { +#ifdef DEBUG + if (!Serializer::enabled()) { + Serializer::TooLateToEnableNow(); + } +#endif + if (!Serializer::enabled() && !FLAG_debug_code) { + return; + } } RelocInfo rinfo(pc_, rmode, data); reloc_info_writer.Write(&rinfo); diff --git a/deps/v8/src/ia32/assembler-ia32.h b/deps/v8/src/ia32/assembler-ia32.h index a431d04c66..962206fb74 100644 --- a/deps/v8/src/ia32/assembler-ia32.h +++ b/deps/v8/src/ia32/assembler-ia32.h @@ -37,6 +37,8 @@ #ifndef V8_IA32_ASSEMBLER_IA32_H_ #define V8_IA32_ASSEMBLER_IA32_H_ +#include "serialize.h" + namespace v8 { namespace internal { @@ -358,15 +360,11 @@ class Displacement BASE_EMBEDDED { // } class CpuFeatures : public AllStatic { public: - // Feature flags bit positions. They are mostly based on the CPUID spec. - // (We assign CPUID itself to one of the currently reserved bits -- - // feel free to change this if needed.) - enum Feature { SSE3 = 32, SSE2 = 26, CMOV = 15, RDTSC = 4, CPUID = 10 }; // Detect features of the target CPU. Set safe defaults if the serializer // is enabled (snapshots must be portable). static void Probe(); // Check whether a feature is supported by the target CPU. - static bool IsSupported(Feature f) { + static bool IsSupported(CpuFeature f) { if (f == SSE2 && !FLAG_enable_sse2) return false; if (f == SSE3 && !FLAG_enable_sse3) return false; if (f == CMOV && !FLAG_enable_cmov) return false; @@ -374,29 +372,32 @@ class CpuFeatures : public AllStatic { return (supported_ & (static_cast<uint64_t>(1) << f)) != 0; } // Check whether a feature is currently enabled. - static bool IsEnabled(Feature f) { + static bool IsEnabled(CpuFeature f) { return (enabled_ & (static_cast<uint64_t>(1) << f)) != 0; } // Enable a specified feature within a scope. class Scope BASE_EMBEDDED { #ifdef DEBUG public: - explicit Scope(Feature f) { + explicit Scope(CpuFeature f) { + uint64_t mask = static_cast<uint64_t>(1) << f; ASSERT(CpuFeatures::IsSupported(f)); + ASSERT(!Serializer::enabled() || (found_by_runtime_probing_ & mask) == 0); old_enabled_ = CpuFeatures::enabled_; - CpuFeatures::enabled_ |= (static_cast<uint64_t>(1) << f); + CpuFeatures::enabled_ |= mask; } ~Scope() { CpuFeatures::enabled_ = old_enabled_; } private: uint64_t old_enabled_; #else public: - explicit Scope(Feature f) {} + explicit Scope(CpuFeature f) {} #endif }; private: static uint64_t supported_; static uint64_t enabled_; + static uint64_t found_by_runtime_probing_; }; @@ -440,12 +441,21 @@ class Assembler : public Malloced { inline static void set_target_address_at(Address pc, Address target); // This sets the branch destination (which is in the instruction on x86). + // This is for calls and branches within generated code. inline static void set_target_at(Address instruction_payload, Address target) { set_target_address_at(instruction_payload, target); } + // This sets the branch destination (which is in the instruction on x86). + // This is for calls and branches to runtime code. + inline static void set_external_target_at(Address instruction_payload, + Address target) { + set_target_address_at(instruction_payload, target); + } + static const int kCallTargetSize = kPointerSize; + static const int kExternalTargetSize = kPointerSize; // Distance between the address of the code target in the call instruction // and the return address @@ -587,19 +597,18 @@ class Assembler : public Malloced { void rcl(Register dst, uint8_t imm8); void sar(Register dst, uint8_t imm8); - void sar(Register dst); + void sar_cl(Register dst); void sbb(Register dst, const Operand& src); void shld(Register dst, const Operand& src); void shl(Register dst, uint8_t imm8); - void shl(Register dst); + void shl_cl(Register dst); void shrd(Register dst, const Operand& src); void shr(Register dst, uint8_t imm8); - void shr(Register dst); void shr_cl(Register dst); void subb(const Operand& dst, int8_t imm8); diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc index 963b0e3ac8..a164cfa85c 100644 --- a/deps/v8/src/ia32/builtins-ia32.cc +++ b/deps/v8/src/ia32/builtins-ia32.cc @@ -522,43 +522,26 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { __ push(Operand(ebp, 2 * kPointerSize)); // push arguments __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); - // Check the stack for overflow or a break request. - // We need to catch preemptions right here, otherwise an unlucky preemption - // could show up as a failed apply. - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); - Label retry_preemption; - Label no_preemption; - __ bind(&retry_preemption); - __ mov(edi, Operand::StaticVariable(stack_guard_limit)); - __ cmp(esp, Operand(edi)); - __ j(above, &no_preemption, taken); - - // Preemption! - // Because builtins always remove the receiver from the stack, we - // have to fake one to avoid underflowing the stack. - __ push(eax); - __ push(Immediate(Smi::FromInt(0))); - - // Do call to runtime routine. - __ CallRuntime(Runtime::kStackGuard, 1); - __ pop(eax); - __ jmp(&retry_preemption); - - __ bind(&no_preemption); - + // Check the stack for overflow. We are not trying need to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. Label okay; - // Make ecx the space we have left. + ExternalReference real_stack_limit = + ExternalReference::address_of_real_stack_limit(); + __ mov(edi, Operand::StaticVariable(real_stack_limit)); + // Make ecx the space we have left. The stack might already be overflowed + // here which will cause ecx to become negative. __ mov(ecx, Operand(esp)); __ sub(ecx, Operand(edi)); // Make edx the space we need for the array when it is unrolled onto the // stack. __ mov(edx, Operand(eax)); __ shl(edx, kPointerSizeLog2 - kSmiTagSize); + // Check if the arguments will overflow the stack. __ cmp(ecx, Operand(edx)); - __ j(greater, &okay, taken); + __ j(greater, &okay, taken); // Signed comparison. - // Too bad: Out of stack space. + // Out of stack space. __ push(Operand(ebp, 4 * kPointerSize)); // push this __ push(eax); __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); @@ -898,7 +881,7 @@ static void AllocateJSArray(MacroAssembler* masm, // be preserved. static void ArrayNativeCode(MacroAssembler* masm, bool construct_call, - Label *call_generic_code) { + Label* call_generic_code) { Label argc_one_or_more, argc_two_or_more, prepare_generic_code_call; // Push the constructor and argc. No need to tag argc as a smi, as there will diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc index 4ac5527699..69a17cd9b5 100644 --- a/deps/v8/src/ia32/codegen-ia32.cc +++ b/deps/v8/src/ia32/codegen-ia32.cc @@ -29,6 +29,7 @@ #include "bootstrapper.h" #include "codegen-inl.h" +#include "compiler.h" #include "debug.h" #include "ic-inl.h" #include "parser.h" @@ -75,7 +76,6 @@ void DeferredCode::RestoreRegisters() { CodeGenState::CodeGenState(CodeGenerator* owner) : owner_(owner), - typeof_state_(NOT_INSIDE_TYPEOF), destination_(NULL), previous_(NULL) { owner_->set_state(this); @@ -83,10 +83,8 @@ CodeGenState::CodeGenState(CodeGenerator* owner) CodeGenState::CodeGenState(CodeGenerator* owner, - TypeofState typeof_state, ControlDestination* destination) : owner_(owner), - typeof_state_(typeof_state), destination_(destination), previous_(owner->state()) { owner_->set_state(this); @@ -415,13 +413,12 @@ Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot, // partially compiled) into control flow to the control destination. // If force_control is true, control flow is forced. void CodeGenerator::LoadCondition(Expression* x, - TypeofState typeof_state, ControlDestination* dest, bool force_control) { ASSERT(!in_spilled_code()); int original_height = frame_->height(); - { CodeGenState new_state(this, typeof_state, dest); + { CodeGenState new_state(this, dest); Visit(x); // If we hit a stack overflow, we may not have actually visited @@ -450,17 +447,16 @@ void CodeGenerator::LoadCondition(Expression* x, } -void CodeGenerator::LoadAndSpill(Expression* expression, - TypeofState typeof_state) { +void CodeGenerator::LoadAndSpill(Expression* expression) { ASSERT(in_spilled_code()); set_in_spilled_code(false); - Load(expression, typeof_state); + Load(expression); frame_->SpillAll(); set_in_spilled_code(true); } -void CodeGenerator::Load(Expression* x, TypeofState typeof_state) { +void CodeGenerator::Load(Expression* expr) { #ifdef DEBUG int original_height = frame_->height(); #endif @@ -468,7 +464,7 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) { JumpTarget true_target; JumpTarget false_target; ControlDestination dest(&true_target, &false_target, true); - LoadCondition(x, typeof_state, &dest, false); + LoadCondition(expr, &dest, false); if (dest.false_was_fall_through()) { // The false target was just bound. @@ -543,23 +539,25 @@ void CodeGenerator::LoadGlobalReceiver() { } -// TODO(1241834): Get rid of this function in favor of just using Load, now -// that we have the INSIDE_TYPEOF typeof state. => Need to handle global -// variables w/o reference errors elsewhere. -void CodeGenerator::LoadTypeofExpression(Expression* x) { - Variable* variable = x->AsVariableProxy()->AsVariable(); +void CodeGenerator::LoadTypeofExpression(Expression* expr) { + // Special handling of identifiers as subexpressions of typeof. + Variable* variable = expr->AsVariableProxy()->AsVariable(); if (variable != NULL && !variable->is_this() && variable->is_global()) { - // NOTE: This is somewhat nasty. We force the compiler to load - // the variable as if through '<global>.<variable>' to make sure we - // do not get reference errors. + // For a global variable we build the property reference + // <global>.<variable> and perform a (regular non-contextual) property + // load to make sure we do not get reference errors. Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX); Literal key(variable->name()); - // TODO(1241834): Fetch the position from the variable instead of using - // no position. Property property(&global, &key, RelocInfo::kNoPosition); - Load(&property); + Reference ref(this, &property); + ref.GetValue(); + } else if (variable != NULL && variable->slot() != NULL) { + // For a variable that rewrites to a slot, we signal it is the immediate + // subexpression of a typeof. + LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF); } else { - Load(x, INSIDE_TYPEOF); + // Anything else can be handled normally. + Load(expr); } } @@ -1190,12 +1188,12 @@ void CodeGenerator::LikelySmiBinaryOperation(Token::Value op, // Perform the operation. switch (op) { case Token::SAR: - __ sar(answer.reg()); + __ sar_cl(answer.reg()); // No checks of result necessary break; case Token::SHR: { Label result_ok; - __ shr(answer.reg()); + __ shr_cl(answer.reg()); // Check that the *unsigned* result fits in a smi. Neither of // the two high-order bits can be set: // * 0x80000000: high bit would be lost when smi tagging. @@ -1216,7 +1214,7 @@ void CodeGenerator::LikelySmiBinaryOperation(Token::Value op, } case Token::SHL: { Label result_ok; - __ shl(answer.reg()); + __ shl_cl(answer.reg()); // Check that the *signed* result fits in a smi. __ cmp(answer.reg(), 0xc0000000); __ j(positive, &result_ok); @@ -1970,27 +1968,6 @@ void CodeGenerator::Comparison(Condition cc, } -class CallFunctionStub: public CodeStub { - public: - CallFunctionStub(int argc, InLoopFlag in_loop) - : argc_(argc), in_loop_(in_loop) { } - - void Generate(MacroAssembler* masm); - - private: - int argc_; - InLoopFlag in_loop_; - -#ifdef DEBUG - void Print() { PrintF("CallFunctionStub (args %d)\n", argc_); } -#endif - - Major MajorKey() { return CallFunction; } - int MinorKey() { return argc_; } - InLoopFlag InLoop() { return in_loop_; } -}; - - // Call the function just below TOS on the stack with the given // arguments. The receiver is the TOS. void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args, @@ -2027,7 +2004,7 @@ void CodeGenerator::CallApplyLazy(Property* apply, // Load the apply function onto the stack. This will usually // give us a megamorphic load site. Not super, but it works. Reference ref(this, apply); - ref.GetValue(NOT_INSIDE_TYPEOF); + ref.GetValue(); ASSERT(ref.type() == Reference::NAMED); // Load the receiver and the existing arguments object onto the @@ -2204,9 +2181,9 @@ void DeferredStackCheck::Generate() { void CodeGenerator::CheckStack() { DeferredStackCheck* deferred = new DeferredStackCheck; - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); - __ cmp(esp, Operand::StaticVariable(stack_guard_limit)); + ExternalReference stack_limit = + ExternalReference::address_of_stack_limit(); + __ cmp(esp, Operand::StaticVariable(stack_limit)); deferred->Branch(below); deferred->BindExit(); } @@ -2366,7 +2343,7 @@ void CodeGenerator::VisitIfStatement(IfStatement* node) { JumpTarget then; JumpTarget else_; ControlDestination dest(&then, &else_, true); - LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->condition(), &dest, true); if (dest.false_was_fall_through()) { // The else target was bound, so we compile the else part first. @@ -2393,7 +2370,7 @@ void CodeGenerator::VisitIfStatement(IfStatement* node) { ASSERT(!has_else_stm); JumpTarget then; ControlDestination dest(&then, &exit, true); - LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->condition(), &dest, true); if (dest.false_was_fall_through()) { // The exit label was bound. We may have dangling jumps to the @@ -2413,7 +2390,7 @@ void CodeGenerator::VisitIfStatement(IfStatement* node) { ASSERT(!has_then_stm); JumpTarget else_; ControlDestination dest(&exit, &else_, false); - LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->condition(), &dest, true); if (dest.true_was_fall_through()) { // The exit label was bound. We may have dangling jumps to the @@ -2435,7 +2412,7 @@ void CodeGenerator::VisitIfStatement(IfStatement* node) { // or control flow effect). LoadCondition is called without // forcing control flow. ControlDestination dest(&exit, &exit, true); - LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, false); + LoadCondition(node->condition(), &dest, false); if (!dest.is_used()) { // We got a value on the frame rather than (or in addition to) // control flow. @@ -2472,6 +2449,7 @@ void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { CodeForStatementPosition(node); Load(node->expression()); Result return_value = frame_->Pop(); + masm()->WriteRecordedPositions(); if (function_return_is_shadowed_) { function_return_.Jump(&return_value); } else { @@ -2735,8 +2713,10 @@ void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) { node->continue_target()->Bind(); } if (has_valid_frame()) { + Comment cmnt(masm_, "[ DoWhileCondition"); + CodeForDoWhileConditionPosition(node); ControlDestination dest(&body, node->break_target(), false); - LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->cond(), &dest, true); } if (node->break_target()->is_linked()) { node->break_target()->Bind(); @@ -2791,7 +2771,7 @@ void CodeGenerator::VisitWhileStatement(WhileStatement* node) { // Compile the test with the body as the true target and preferred // fall-through and with the break target as the false target. ControlDestination dest(&body, node->break_target(), true); - LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->cond(), &dest, true); if (dest.false_was_fall_through()) { // If we got the break target as fall-through, the test may have @@ -2838,7 +2818,7 @@ void CodeGenerator::VisitWhileStatement(WhileStatement* node) { // The break target is the fall-through (body is a backward // jump from here and thus an invalid fall-through). ControlDestination dest(&body, node->break_target(), false); - LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->cond(), &dest, true); } } else { // If we have chosen not to recompile the test at the bottom, @@ -2929,7 +2909,7 @@ void CodeGenerator::VisitForStatement(ForStatement* node) { // Compile the test with the body as the true target and preferred // fall-through and with the break target as the false target. ControlDestination dest(&body, node->break_target(), true); - LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->cond(), &dest, true); if (dest.false_was_fall_through()) { // If we got the break target as fall-through, the test may have @@ -2999,7 +2979,7 @@ void CodeGenerator::VisitForStatement(ForStatement* node) { // The break target is the fall-through (body is a backward // jump from here). ControlDestination dest(&body, node->break_target(), false); - LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->cond(), &dest, true); } } else { // Otherwise, jump back to the test at the top. @@ -3574,7 +3554,8 @@ void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { Comment cmnt(masm_, "[ FunctionLiteral"); // Build the function boilerplate and instantiate it. - Handle<JSFunction> boilerplate = BuildBoilerplate(node); + Handle<JSFunction> boilerplate = + Compiler::BuildBoilerplate(node, script_, this); // Check for stack-overflow exception. if (HasStackOverflow()) return; InstantiateBoilerplate(boilerplate); @@ -3594,25 +3575,25 @@ void CodeGenerator::VisitConditional(Conditional* node) { JumpTarget else_; JumpTarget exit; ControlDestination dest(&then, &else_, true); - LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->condition(), &dest, true); if (dest.false_was_fall_through()) { // The else target was bound, so we compile the else part first. - Load(node->else_expression(), typeof_state()); + Load(node->else_expression()); if (then.is_linked()) { exit.Jump(); then.Bind(); - Load(node->then_expression(), typeof_state()); + Load(node->then_expression()); } } else { // The then target was bound, so we compile the then part first. - Load(node->then_expression(), typeof_state()); + Load(node->then_expression()); if (else_.is_linked()) { exit.Jump(); else_.Bind(); - Load(node->else_expression(), typeof_state()); + Load(node->else_expression()); } } @@ -3934,7 +3915,7 @@ void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { void CodeGenerator::VisitSlot(Slot* node) { Comment cmnt(masm_, "[ Slot"); - LoadFromSlotCheckForArguments(node, typeof_state()); + LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF); } @@ -3947,7 +3928,7 @@ void CodeGenerator::VisitVariableProxy(VariableProxy* node) { } else { ASSERT(var->is_global()); Reference ref(this, node); - ref.GetValue(typeof_state()); + ref.GetValue(); } } @@ -3958,12 +3939,28 @@ void CodeGenerator::VisitLiteral(Literal* node) { } -void CodeGenerator::LoadUnsafeSmi(Register target, Handle<Object> value) { +void CodeGenerator::PushUnsafeSmi(Handle<Object> value) { + ASSERT(value->IsSmi()); + int bits = reinterpret_cast<int>(*value); + __ push(Immediate(bits & 0x0000FFFF)); + __ or_(Operand(esp, 0), Immediate(bits & 0xFFFF0000)); +} + + +void CodeGenerator::StoreUnsafeSmiToLocal(int offset, Handle<Object> value) { + ASSERT(value->IsSmi()); + int bits = reinterpret_cast<int>(*value); + __ mov(Operand(ebp, offset), Immediate(bits & 0x0000FFFF)); + __ or_(Operand(ebp, offset), Immediate(bits & 0xFFFF0000)); +} + + +void CodeGenerator::MoveUnsafeSmi(Register target, Handle<Object> value) { ASSERT(target.is_valid()); ASSERT(value->IsSmi()); int bits = reinterpret_cast<int>(*value); __ Set(target, Immediate(bits & 0x0000FFFF)); - __ xor_(target, bits & 0xFFFF0000); + __ or_(target, bits & 0xFFFF0000); } @@ -4354,9 +4351,9 @@ void CodeGenerator::VisitAssignment(Assignment* node) { // the target, with an implicit promise that it will be written to again // before it is read. if (literal != NULL || (right_var != NULL && right_var != var)) { - target.TakeValue(NOT_INSIDE_TYPEOF); + target.TakeValue(); } else { - target.GetValue(NOT_INSIDE_TYPEOF); + target.GetValue(); } Load(node->value()); GenericBinaryOperation(node->binary_op(), @@ -4404,7 +4401,7 @@ void CodeGenerator::VisitThrow(Throw* node) { void CodeGenerator::VisitProperty(Property* node) { Comment cmnt(masm_, "[ Property"); Reference property(this, node); - property.GetValue(typeof_state()); + property.GetValue(); } @@ -4589,7 +4586,7 @@ void CodeGenerator::VisitCall(Call* node) { // Load the function to call from the property through a reference. Reference ref(this, property); - ref.GetValue(NOT_INSIDE_TYPEOF); + ref.GetValue(); // Pass receiver to called function. if (property->is_synthetic()) { @@ -4699,10 +4696,10 @@ void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { // This generates code that performs a charCodeAt() call or returns // undefined in order to trigger the slow case, Runtime_StringCharCodeAt. -// It can handle flat and sliced strings, 8 and 16 bit characters and -// cons strings where the answer is found in the left hand branch of the -// cons. The slow case will flatten the string, which will ensure that -// the answer is in the left hand side the next time around. +// It can handle flat, 8 and 16 bit characters and cons strings where the +// answer is found in the left hand branch of the cons. The slow case will +// flatten the string, which will ensure that the answer is in the left hand +// side the next time around. void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { Comment(masm_, "[ GenerateFastCharCodeAt"); ASSERT(args->length() == 2); @@ -4710,7 +4707,6 @@ void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { Label slow_case; Label end; Label not_a_flat_string; - Label a_cons_string; Label try_again_with_new_string; Label ascii_string; Label got_char_code; @@ -4792,7 +4788,7 @@ void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { __ add(Operand(ecx), Immediate(String::kLongLengthShift)); // Fetch the length field into the temporary register. __ mov(temp.reg(), FieldOperand(object.reg(), String::kLengthOffset)); - __ shr(temp.reg()); // The shift amount in ecx is implicit operand. + __ shr_cl(temp.reg()); // Check for index out of range. __ cmp(index.reg(), Operand(temp.reg())); __ j(greater_equal, &slow_case); @@ -4832,21 +4828,16 @@ void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { __ bind(¬_a_flat_string); __ and_(temp.reg(), kStringRepresentationMask); __ cmp(temp.reg(), kConsStringTag); - __ j(equal, &a_cons_string); - __ cmp(temp.reg(), kSlicedStringTag); __ j(not_equal, &slow_case); - // SlicedString. - // Add the offset to the index and trigger the slow case on overflow. - __ add(index.reg(), FieldOperand(object.reg(), SlicedString::kStartOffset)); - __ j(overflow, &slow_case); - // Getting the underlying string is done by running the cons string code. - // ConsString. - __ bind(&a_cons_string); - // Get the first of the two strings. Both sliced and cons strings - // store their source string at the same offset. - ASSERT(SlicedString::kBufferOffset == ConsString::kFirstOffset); + // Check that the right hand side is the empty string (ie if this is really a + // flat string in a cons string). If that is not the case we would rather go + // to the runtime system now, to flatten the string. + __ mov(temp.reg(), FieldOperand(object.reg(), ConsString::kSecondOffset)); + __ cmp(Operand(temp.reg()), Immediate(Handle<String>(Heap::empty_string()))); + __ j(not_equal, &slow_case); + // Get the first of the two strings. __ mov(object.reg(), FieldOperand(object.reg(), ConsString::kFirstOffset)); __ jmp(&try_again_with_new_string); @@ -5224,9 +5215,6 @@ void CodeGenerator::VisitCallRuntime(CallRuntime* node) { void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { - // Note that because of NOT and an optimization in comparison of a typeof - // expression to a literal string, this function can fail to leave a value - // on top of the frame or in the cc register. Comment cmnt(masm_, "[ UnaryOperation"); Token::Value op = node->op(); @@ -5235,7 +5223,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { // Swap the true and false targets but keep the same actual label // as the fall through. destination()->Invert(); - LoadCondition(node->expression(), NOT_INSIDE_TYPEOF, destination(), true); + LoadCondition(node->expression(), destination(), true); // Swap the labels back. destination()->Invert(); @@ -5485,7 +5473,7 @@ void CodeGenerator::VisitCountOperation(CountOperation* node) { if (!is_postfix) frame_->Push(Smi::FromInt(0)); return; } - target.TakeValue(NOT_INSIDE_TYPEOF); + target.TakeValue(); Result new_value = frame_->Pop(); new_value.ToRegister(); @@ -5563,9 +5551,6 @@ void CodeGenerator::VisitCountOperation(CountOperation* node) { void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { - // Note that due to an optimization in comparison operations (typeof - // compared to a string literal), we can evaluate a binary expression such - // as AND or OR and not leave a value on the frame or in the cc register. Comment cmnt(masm_, "[ BinaryOperation"); Token::Value op = node->op(); @@ -5581,7 +5566,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { if (op == Token::AND) { JumpTarget is_true; ControlDestination dest(&is_true, destination()->false_target(), true); - LoadCondition(node->left(), NOT_INSIDE_TYPEOF, &dest, false); + LoadCondition(node->left(), &dest, false); if (dest.false_was_fall_through()) { // The current false target was used as the fall-through. If @@ -5600,7 +5585,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { is_true.Bind(); // The left subexpression compiled to control flow, so the // right one is free to do so as well. - LoadCondition(node->right(), NOT_INSIDE_TYPEOF, destination(), false); + LoadCondition(node->right(), destination(), false); } else { // We have actually just jumped to or bound the current false // target but the current control destination is not marked as @@ -5611,7 +5596,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { } else if (dest.is_used()) { // The left subexpression compiled to control flow (and is_true // was just bound), so the right is free to do so as well. - LoadCondition(node->right(), NOT_INSIDE_TYPEOF, destination(), false); + LoadCondition(node->right(), destination(), false); } else { // We have a materialized value on the frame, so we exit with @@ -5644,7 +5629,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { } else if (op == Token::OR) { JumpTarget is_false; ControlDestination dest(destination()->true_target(), &is_false, false); - LoadCondition(node->left(), NOT_INSIDE_TYPEOF, &dest, false); + LoadCondition(node->left(), &dest, false); if (dest.true_was_fall_through()) { // The current true target was used as the fall-through. If @@ -5663,7 +5648,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { is_false.Bind(); // The left subexpression compiled to control flow, so the // right one is free to do so as well. - LoadCondition(node->right(), NOT_INSIDE_TYPEOF, destination(), false); + LoadCondition(node->right(), destination(), false); } else { // We have just jumped to or bound the current true target but // the current control destination is not marked as used. @@ -5673,7 +5658,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { } else if (dest.is_used()) { // The left subexpression compiled to control flow (and is_false // was just bound), so the right is free to do so as well. - LoadCondition(node->right(), NOT_INSIDE_TYPEOF, destination(), false); + LoadCondition(node->right(), destination(), false); } else { // We have a materialized value on the frame, so we exit with @@ -5805,6 +5790,9 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) { destination()->false_target()->Branch(zero); frame_->Spill(answer.reg()); __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg()); + destination()->true_target()->Branch(equal); + // Regular expressions are callable so typeof == 'function'. + __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE); answer.Unuse(); destination()->Split(equal); @@ -5814,10 +5802,13 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) { __ cmp(answer.reg(), Factory::null_value()); destination()->true_target()->Branch(equal); - // It can be an undetectable object. Result map = allocator()->Allocate(); ASSERT(map.is_valid()); - __ mov(map.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); + // Regular expressions are typeof == 'function', not 'object'. + __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg()); + destination()->false_target()->Branch(equal); + + // It can be an undetectable object. __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset)); __ test(map.reg(), Immediate(1 << Map::kIsUndetectable)); destination()->false_target()->Branch(not_zero); @@ -6066,7 +6057,7 @@ Handle<String> Reference::GetName() { } -void Reference::GetValue(TypeofState typeof_state) { +void Reference::GetValue() { ASSERT(!cgen_->in_spilled_code()); ASSERT(cgen_->HasValidEntryRegisters()); ASSERT(!is_illegal()); @@ -6083,17 +6074,11 @@ void Reference::GetValue(TypeofState typeof_state) { Comment cmnt(masm, "[ Load from Slot"); Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); ASSERT(slot != NULL); - cgen_->LoadFromSlotCheckForArguments(slot, typeof_state); + cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF); break; } case NAMED: { - // TODO(1241834): Make sure that it is safe to ignore the - // distinction between expressions in a typeof and not in a - // typeof. If there is a chance that reference errors can be - // thrown below, we must distinguish between the two kinds of - // loads (typeof expression loads must not throw a reference - // error). Variable* var = expression_->AsVariableProxy()->AsVariable(); bool is_global = var != NULL; ASSERT(!is_global || var->is_global()); @@ -6163,8 +6148,6 @@ void Reference::GetValue(TypeofState typeof_state) { } case KEYED: { - // TODO(1241834): Make sure that this it is safe to ignore the - // distinction between expressions in a typeof and not in a typeof. Comment cmnt(masm, "[ Load from keyed Property"); Variable* var = expression_->AsVariableProxy()->AsVariable(); bool is_global = var != NULL; @@ -6283,13 +6266,13 @@ void Reference::GetValue(TypeofState typeof_state) { } -void Reference::TakeValue(TypeofState typeof_state) { +void Reference::TakeValue() { // For non-constant frame-allocated slots, we invalidate the value in the // slot. For all others, we fall back on GetValue. ASSERT(!cgen_->in_spilled_code()); ASSERT(!is_illegal()); if (type_ != SLOT) { - GetValue(typeof_state); + GetValue(); return; } @@ -6299,7 +6282,7 @@ void Reference::TakeValue(TypeofState typeof_state) { slot->type() == Slot::CONTEXT || slot->var()->mode() == Variable::CONST || slot->is_arguments()) { - GetValue(typeof_state); + GetValue(); return; } @@ -6728,11 +6711,11 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { // Perform the operation. switch (op_) { case Token::SAR: - __ sar(eax); + __ sar_cl(eax); // No checks of result necessary break; case Token::SHR: - __ shr(eax); + __ shr_cl(eax); // Check that the *unsigned* result fits in a smi. // Neither of the two high-order bits can be set: // - 0x80000000: high bit would be lost when smi tagging. @@ -6743,7 +6726,7 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { __ j(not_zero, slow, not_taken); break; case Token::SHL: - __ shl(eax); + __ shl_cl(eax); // Check that the *signed* result fits in a smi. __ cmp(eax, 0xc0000000); __ j(sign, slow, not_taken); @@ -6793,8 +6776,8 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { // eax: y // edx: x - if (CpuFeatures::IsSupported(CpuFeatures::SSE2)) { - CpuFeatures::Scope use_sse2(CpuFeatures::SSE2); + if (CpuFeatures::IsSupported(SSE2)) { + CpuFeatures::Scope use_sse2(SSE2); FloatingPointHelper::LoadSse2Operands(masm, &call_runtime); switch (op_) { @@ -6889,7 +6872,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { if (use_sse3_) { // Truncate the operands to 32-bit integers and check for // exceptions in doing so. - CpuFeatures::Scope scope(CpuFeatures::SSE3); + CpuFeatures::Scope scope(SSE3); __ fisttp_s(Operand(esp, 0 * kPointerSize)); __ fisttp_s(Operand(esp, 1 * kPointerSize)); __ fnstsw_ax(); @@ -6918,9 +6901,9 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { case Token::BIT_OR: __ or_(eax, Operand(ecx)); break; case Token::BIT_AND: __ and_(eax, Operand(ecx)); break; case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break; - case Token::SAR: __ sar(eax); break; - case Token::SHL: __ shl(eax); break; - case Token::SHR: __ shr(eax); break; + case Token::SAR: __ sar_cl(eax); break; + case Token::SHL: __ shl_cl(eax); break; + case Token::SHR: __ shr_cl(eax); break; default: UNREACHABLE(); } if (op_ == Token::SHR) { @@ -7516,9 +7499,9 @@ void CompareStub::Generate(MacroAssembler* masm) { // Call builtin if operands are not floating point or smi. Label check_for_symbols; Label unordered; - if (CpuFeatures::IsSupported(CpuFeatures::SSE2)) { - CpuFeatures::Scope use_sse2(CpuFeatures::SSE2); - CpuFeatures::Scope use_cmov(CpuFeatures::CMOV); + if (CpuFeatures::IsSupported(SSE2)) { + CpuFeatures::Scope use_sse2(SSE2); + CpuFeatures::Scope use_cmov(CMOV); FloatingPointHelper::LoadSse2Operands(masm, &check_for_symbols); __ comisd(xmm0, xmm1); @@ -7707,11 +7690,84 @@ void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { } +// If true, a Handle<T> passed by value is passed and returned by +// using the location_ field directly. If false, it is passed and +// returned as a pointer to a handle. +#ifdef USING_MAC_ABI +static const bool kPassHandlesDirectly = true; +#else +static const bool kPassHandlesDirectly = false; +#endif + + +void ApiGetterEntryStub::Generate(MacroAssembler* masm) { + Label get_result; + Label prologue; + Label promote_scheduled_exception; + __ EnterApiExitFrame(ExitFrame::MODE_NORMAL, kStackSpace, kArgc); + ASSERT_EQ(kArgc, 4); + if (kPassHandlesDirectly) { + // When handles as passed directly we don't have to allocate extra + // space for and pass an out parameter. + __ mov(Operand(esp, 0 * kPointerSize), ebx); // name. + __ mov(Operand(esp, 1 * kPointerSize), eax); // arguments pointer. + } else { + // The function expects three arguments to be passed but we allocate + // four to get space for the output cell. The argument slots are filled + // as follows: + // + // 3: output cell + // 2: arguments pointer + // 1: name + // 0: pointer to the output cell + // + // Note that this is one more "argument" than the function expects + // so the out cell will have to be popped explicitly after returning + // from the function. + __ mov(Operand(esp, 1 * kPointerSize), ebx); // name. + __ mov(Operand(esp, 2 * kPointerSize), eax); // arguments pointer. + __ mov(ebx, esp); + __ add(Operand(ebx), Immediate(3 * kPointerSize)); + __ mov(Operand(esp, 0 * kPointerSize), ebx); // output + __ mov(Operand(esp, 3 * kPointerSize), Immediate(0)); // out cell. + } + // Call the api function! + __ call(fun()->address(), RelocInfo::RUNTIME_ENTRY); + // Check if the function scheduled an exception. + ExternalReference scheduled_exception_address = + ExternalReference::scheduled_exception_address(); + __ cmp(Operand::StaticVariable(scheduled_exception_address), + Immediate(Factory::the_hole_value())); + __ j(not_equal, &promote_scheduled_exception, not_taken); + if (!kPassHandlesDirectly) { + // The returned value is a pointer to the handle holding the result. + // Dereference this to get to the location. + __ mov(eax, Operand(eax, 0)); + } + // Check if the result handle holds 0 + __ test(eax, Operand(eax)); + __ j(not_zero, &get_result, taken); + // It was zero; the result is undefined. + __ mov(eax, Factory::undefined_value()); + __ jmp(&prologue); + // It was non-zero. Dereference to get the result value. + __ bind(&get_result); + __ mov(eax, Operand(eax, 0)); + __ bind(&prologue); + __ LeaveExitFrame(ExitFrame::MODE_NORMAL); + __ ret(0); + __ bind(&promote_scheduled_exception); + __ TailCallRuntime(ExternalReference(Runtime::kPromoteScheduledException), + 0, + 1); +} + + void CEntryStub::GenerateCore(MacroAssembler* masm, Label* throw_normal_exception, Label* throw_termination_exception, Label* throw_out_of_memory_exception, - StackFrame::Type frame_type, + ExitFrame::Mode mode, bool do_gc, bool always_allocate_scope) { // eax: result parameter for PerformGC, if any @@ -7761,7 +7817,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ j(zero, &failure_returned, not_taken); // Exit the JavaScript to C++ exit frame. - __ LeaveExitFrame(frame_type); + __ LeaveExitFrame(mode); __ ret(0); // Handling of failure. @@ -7860,12 +7916,12 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) { // of a proper result. The builtin entry handles this by performing // a garbage collection and retrying the builtin (twice). - StackFrame::Type frame_type = is_debug_break ? - StackFrame::EXIT_DEBUG : - StackFrame::EXIT; + ExitFrame::Mode mode = is_debug_break + ? ExitFrame::MODE_DEBUG + : ExitFrame::MODE_NORMAL; // Enter the exit frame that transitions from JavaScript to C++. - __ EnterExitFrame(frame_type); + __ EnterExitFrame(mode); // eax: result parameter for PerformGC, if any (setup below) // ebx: pointer to builtin function (C callee-saved) @@ -7883,7 +7939,7 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) { &throw_normal_exception, &throw_termination_exception, &throw_out_of_memory_exception, - frame_type, + mode, false, false); @@ -7892,7 +7948,7 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) { &throw_normal_exception, &throw_termination_exception, &throw_out_of_memory_exception, - frame_type, + mode, true, false); @@ -7903,7 +7959,7 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) { &throw_normal_exception, &throw_termination_exception, &throw_out_of_memory_exception, - frame_type, + mode, true, true); diff --git a/deps/v8/src/ia32/codegen-ia32.h b/deps/v8/src/ia32/codegen-ia32.h index 3669e9d10d..0e69a63d89 100644 --- a/deps/v8/src/ia32/codegen-ia32.h +++ b/deps/v8/src/ia32/codegen-ia32.h @@ -77,12 +77,12 @@ class Reference BASE_EMBEDDED { // Generate code to push the value of the reference on top of the // expression stack. The reference is expected to be already on top of // the expression stack, and it is left in place with its value above it. - void GetValue(TypeofState typeof_state); + void GetValue(); // Like GetValue except that the slot is expected to be written to before // being read from again. Thae value of the reference may be invalidated, // causing subsequent attempts to read it to fail. - void TakeValue(TypeofState typeof_state); + void TakeValue(); // Generate code to store the value on top of the expression stack in the // reference. The reference is expected to be immediately below the value @@ -241,28 +241,20 @@ class CodeGenState BASE_EMBEDDED { explicit CodeGenState(CodeGenerator* owner); // Create a code generator state based on a code generator's current - // state. The new state may or may not be inside a typeof, and has its - // own control destination. - CodeGenState(CodeGenerator* owner, - TypeofState typeof_state, - ControlDestination* destination); + // state. The new state has its own control destination. + CodeGenState(CodeGenerator* owner, ControlDestination* destination); // Destroy a code generator state and restore the owning code generator's // previous state. ~CodeGenState(); // Accessors for the state. - TypeofState typeof_state() const { return typeof_state_; } ControlDestination* destination() const { return destination_; } private: // The owning code generator. CodeGenerator* owner_; - // A flag indicating whether we are compiling the immediate subexpression - // of a typeof expression. - TypeofState typeof_state_; - // A control destination in case the expression has a control-flow // effect. ControlDestination* destination_; @@ -307,17 +299,12 @@ class CodeGenerator: public AstVisitor { static bool ShouldGenerateLog(Expression* type); #endif - static void SetFunctionInfo(Handle<JSFunction> fun, - FunctionLiteral* lit, - bool is_toplevel, - Handle<Script> script); - static void RecordPositions(MacroAssembler* masm, int pos); // Accessors MacroAssembler* masm() { return masm_; } - VirtualFrame* frame() const { return frame_; } + Handle<Script> script() { return script_; } bool has_valid_frame() const { return frame_ != NULL; } @@ -352,7 +339,6 @@ class CodeGenerator: public AstVisitor { void ProcessDeferred(); // State - TypeofState typeof_state() const { return state_->typeof_state(); } ControlDestination* destination() const { return state_->destination(); } // Track loop nesting level. @@ -412,18 +398,16 @@ class CodeGenerator: public AstVisitor { } void LoadCondition(Expression* x, - TypeofState typeof_state, ControlDestination* destination, bool force_control); - void Load(Expression* x, TypeofState typeof_state = NOT_INSIDE_TYPEOF); + void Load(Expression* expr); void LoadGlobal(); void LoadGlobalReceiver(); // Generate code to push the value of an expression on top of the frame // and then spill the frame fully to memory. This function is used // temporarily while the code generator is being transformed. - void LoadAndSpill(Expression* expression, - TypeofState typeof_state = NOT_INSIDE_TYPEOF); + void LoadAndSpill(Expression* expression); // Read a value from a slot and leave it on top of the expression stack. void LoadFromSlot(Slot* slot, TypeofState typeof_state); @@ -484,9 +468,11 @@ class CodeGenerator: public AstVisitor { // than 16 bits. static const int kMaxSmiInlinedBits = 16; bool IsUnsafeSmi(Handle<Object> value); - // Load an integer constant x into a register target using + // Load an integer constant x into a register target or into the stack using // at most 16 bits of user-controlled data per assembly operation. - void LoadUnsafeSmi(Register target, Handle<Object> value); + void MoveUnsafeSmi(Register target, Handle<Object> value); + void StoreUnsafeSmiToLocal(int offset, Handle<Object> value); + void PushUnsafeSmi(Handle<Object> value); void CallWithArguments(ZoneList<Expression*>* arguments, int position); @@ -511,8 +497,6 @@ class CodeGenerator: public AstVisitor { const InlineRuntimeLUT& new_entry, InlineRuntimeLUT* old_entry); - static Handle<Code> ComputeLazyCompile(int argc); - Handle<JSFunction> BuildBoilerplate(FunctionLiteral* node); void ProcessDeclarations(ZoneList<Declaration*>* declarations); static Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop); @@ -574,6 +558,7 @@ class CodeGenerator: public AstVisitor { void CodeForFunctionPosition(FunctionLiteral* fun); void CodeForReturnPosition(FunctionLiteral* fun); void CodeForStatementPosition(Statement* stmt); + void CodeForDoWhileConditionPosition(DoWhileStatement* stmt); void CodeForSourcePosition(int pos); #ifdef DEBUG @@ -626,6 +611,27 @@ class CodeGenerator: public AstVisitor { }; +class CallFunctionStub: public CodeStub { + public: + CallFunctionStub(int argc, InLoopFlag in_loop) + : argc_(argc), in_loop_(in_loop) { } + + void Generate(MacroAssembler* masm); + + private: + int argc_; + InLoopFlag in_loop_; + +#ifdef DEBUG + void Print() { PrintF("CallFunctionStub (args %d)\n", argc_); } +#endif + + Major MajorKey() { return CallFunction; } + int MinorKey() { return argc_; } + InLoopFlag InLoop() { return in_loop_; } +}; + + class ToBooleanStub: public CodeStub { public: ToBooleanStub() { } @@ -655,7 +661,7 @@ class GenericBinaryOpStub: public CodeStub { flags_(flags), args_in_registers_(false), args_reversed_(false) { - use_sse3_ = CpuFeatures::IsSupported(CpuFeatures::SSE3); + use_sse3_ = CpuFeatures::IsSupported(SSE3); ASSERT(OpBits::is_valid(Token::NUM_TOKENS)); } diff --git a/deps/v8/src/ia32/disasm-ia32.cc b/deps/v8/src/ia32/disasm-ia32.cc index 3e3ca73e6b..df5a28a54b 100644 --- a/deps/v8/src/ia32/disasm-ia32.cc +++ b/deps/v8/src/ia32/disasm-ia32.cc @@ -272,6 +272,17 @@ class DisassemblerIA32 { }; + enum ShiftOpcodeExtension { + kROL = 0, + kROR = 1, + kRCL = 2, + kRCR = 3, + kSHL = 4, + KSHR = 5, + kSAR = 7 + }; + + const char* NameOfCPURegister(int reg) const { return converter_.NameOfCPURegister(reg); } @@ -536,31 +547,22 @@ int DisassemblerIA32::D1D3C1Instruction(byte* data) { int num_bytes = 2; if (mod == 3) { const char* mnem = NULL; + switch (regop) { + case kROL: mnem = "rol"; break; + case kROR: mnem = "ror"; break; + case kRCL: mnem = "rcl"; break; + case kSHL: mnem = "shl"; break; + case KSHR: mnem = "shr"; break; + case kSAR: mnem = "sar"; break; + default: UnimplementedInstruction(); + } if (op == 0xD1) { imm8 = 1; - switch (regop) { - case edx: mnem = "rcl"; break; - case edi: mnem = "sar"; break; - case esp: mnem = "shl"; break; - default: UnimplementedInstruction(); - } } else if (op == 0xC1) { imm8 = *(data+2); num_bytes = 3; - switch (regop) { - case edx: mnem = "rcl"; break; - case esp: mnem = "shl"; break; - case ebp: mnem = "shr"; break; - case edi: mnem = "sar"; break; - default: UnimplementedInstruction(); - } } else if (op == 0xD3) { - switch (regop) { - case esp: mnem = "shl"; break; - case ebp: mnem = "shr"; break; - case edi: mnem = "sar"; break; - default: UnimplementedInstruction(); - } + // Shift/rotate by cl. } ASSERT_NE(NULL, mnem); AppendToBuffer("%s %s,", mnem, NameOfCPURegister(rm)); diff --git a/deps/v8/src/ia32/fast-codegen-ia32.cc b/deps/v8/src/ia32/fast-codegen-ia32.cc index 247f124962..a01d754e47 100644 --- a/deps/v8/src/ia32/fast-codegen-ia32.cc +++ b/deps/v8/src/ia32/fast-codegen-ia32.cc @@ -28,8 +28,10 @@ #include "v8.h" #include "codegen-inl.h" +#include "compiler.h" #include "fast-codegen.h" #include "parser.h" +#include "debug.h" namespace v8 { namespace internal { @@ -60,102 +62,341 @@ void FastCodeGenerator::Generate(FunctionLiteral* fun) { { Comment cmnt(masm_, "[ Allocate locals"); int locals_count = fun->scope()->num_stack_slots(); - for (int i = 0; i < locals_count; i++) { + if (locals_count == 1) { __ push(Immediate(Factory::undefined_value())); + } else if (locals_count > 1) { + __ mov(eax, Immediate(Factory::undefined_value())); + for (int i = 0; i < locals_count; i++) { + __ push(eax); + } + } + } + + bool function_in_register = true; + + Variable* arguments = fun->scope()->arguments()->AsVariable(); + if (arguments != NULL) { + // Function uses arguments object. + Comment cmnt(masm_, "[ Allocate arguments object"); + __ push(edi); + // Receiver is just before the parameters on the caller's stack. + __ lea(edx, Operand(ebp, StandardFrameConstants::kCallerSPOffset + + fun->num_parameters() * kPointerSize)); + __ push(edx); + __ push(Immediate(Smi::FromInt(fun->num_parameters()))); + // Arguments to ArgumentsAccessStub: + // function, receiver address, parameter count. + // The stub will rewrite receiever and parameter count if the previous + // stack frame was an arguments adapter frame. + ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); + __ CallStub(&stub); + __ mov(Operand(ebp, SlotOffset(arguments->slot())), eax); + Slot* dot_arguments_slot = + fun->scope()->arguments_shadow()->AsVariable()->slot(); + __ mov(Operand(ebp, SlotOffset(dot_arguments_slot)), eax); + + function_in_register = false; + } + + // Possibly allocate a local context. + if (fun->scope()->num_heap_slots() > 0) { + Comment cmnt(masm_, "[ Allocate local context"); + if (function_in_register) { + // Argument to NewContext is the function, still in edi. + __ push(edi); + } else { + // Argument to NewContext is the function, no longer in edi. + __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); + } + __ CallRuntime(Runtime::kNewContext, 1); + // Context is returned in both eax and esi. It replaces the context + // passed to us. It's saved in the stack and kept live in esi. + __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); +#ifdef DEBUG + // Assert we do not have to copy any parameters into the context. + for (int i = 0, len = fun->scope()->num_parameters(); i < len; i++) { + Slot* slot = fun->scope()->parameter(i)->slot(); + ASSERT(slot != NULL && slot->type() != Slot::CONTEXT); } +#endif + } + + { Comment cmnt(masm_, "[ Declarations"); + VisitDeclarations(fun->scope()->declarations()); } { Comment cmnt(masm_, "[ Stack check"); Label ok; - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); - __ cmp(esp, Operand::StaticVariable(stack_guard_limit)); + ExternalReference stack_limit = + ExternalReference::address_of_stack_limit(); + __ cmp(esp, Operand::StaticVariable(stack_limit)); __ j(above_equal, &ok, taken); StackCheckStub stub; __ CallStub(&stub); __ bind(&ok); } - { Comment cmnt(masm_, "[ Declarations"); - VisitDeclarations(fun->scope()->declarations()); - } - if (FLAG_trace) { __ CallRuntime(Runtime::kTraceEnter, 0); } { Comment cmnt(masm_, "[ Body"); + ASSERT(loop_depth() == 0); VisitStatements(fun->body()); + ASSERT(loop_depth() == 0); } { Comment cmnt(masm_, "[ return <undefined>;"); - // Emit a 'return undefined' in case control fell off the end of the - // body. + // Emit a 'return undefined' in case control fell off the end of the body. __ mov(eax, Factory::undefined_value()); - SetReturnPosition(fun); + EmitReturnSequence(function_->end_position()); + } +} + +void FastCodeGenerator::EmitReturnSequence(int position) { + Comment cmnt(masm_, "[ Return sequence"); + if (return_label_.is_bound()) { + __ jmp(&return_label_); + } else { + // Common return label + __ bind(&return_label_); if (FLAG_trace) { __ push(eax); __ CallRuntime(Runtime::kTraceExit, 1); } +#ifdef DEBUG + // Add a label for checking the size of the code used for returning. + Label check_exit_codesize; + masm_->bind(&check_exit_codesize); +#endif + CodeGenerator::RecordPositions(masm_, position); __ RecordJSReturn(); // Do not use the leave instruction here because it is too short to // patch with the code required by the debugger. __ mov(esp, ebp); __ pop(ebp); - __ ret((fun->scope()->num_parameters() + 1) * kPointerSize); + __ ret((function_->scope()->num_parameters() + 1) * kPointerSize); +#ifdef ENABLE_DEBUGGER_SUPPORT + // Check that the size of the code used for returning matches what is + // expected by the debugger. + ASSERT_EQ(Debug::kIa32JSReturnSequenceLength, + masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); +#endif } } -void FastCodeGenerator::Move(Location destination, Slot* source) { - switch (destination.type()) { - case Location::kUninitialized: +void FastCodeGenerator::Move(Expression::Context context, Register source) { + switch (context) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kEffect: + case Expression::kEffect: break; - case Location::kValue: - __ push(Operand(ebp, SlotOffset(source))); + case Expression::kValue: + __ push(source); + break; + case Expression::kTest: + TestAndBranch(source, true_label_, false_label_); break; + case Expression::kValueTest: { + Label discard; + __ push(source); + TestAndBranch(source, true_label_, &discard); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; + __ push(source); + TestAndBranch(source, &discard, false_label_); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(true_label_); + } } } -void FastCodeGenerator::Move(Location destination, Literal* expr) { - switch (destination.type()) { - case Location::kUninitialized: +void FastCodeGenerator::Move(Expression::Context context, Slot* source) { + switch (context) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kEffect: + case Expression::kEffect: break; - case Location::kValue: - __ push(Immediate(expr->handle())); + case Expression::kValue: + __ push(Operand(ebp, SlotOffset(source))); + break; + case Expression::kTest: // Fall through. + case Expression::kValueTest: // Fall through. + case Expression::kTestValue: + __ mov(eax, Operand(ebp, SlotOffset(source))); + Move(context, eax); break; } } -void FastCodeGenerator::Move(Slot* destination, Location source) { - switch (source.type()) { - case Location::kUninitialized: // Fall through. - case Location::kEffect: +void FastCodeGenerator::Move(Expression::Context context, Literal* expr) { + switch (context) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kValue: - __ pop(Operand(ebp, SlotOffset(destination))); + case Expression::kEffect: + break; + case Expression::kValue: + __ push(Immediate(expr->handle())); + break; + case Expression::kTest: // Fall through. + case Expression::kValueTest: // Fall through. + case Expression::kTestValue: + __ mov(eax, expr->handle()); + Move(context, eax); break; } } -void FastCodeGenerator::DropAndMove(Location destination, Register source) { - switch (destination.type()) { - case Location::kUninitialized: +void FastCodeGenerator::DropAndMove(Expression::Context context, + Register source) { + switch (context) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kEffect: + case Expression::kEffect: + __ add(Operand(esp), Immediate(kPointerSize)); + break; + case Expression::kValue: + __ mov(Operand(esp, 0), source); + break; + case Expression::kTest: + ASSERT(!source.is(esp)); __ add(Operand(esp), Immediate(kPointerSize)); + TestAndBranch(source, true_label_, false_label_); break; - case Location::kValue: + case Expression::kValueTest: { + Label discard; + __ mov(Operand(esp, 0), source); + TestAndBranch(source, true_label_, &discard); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; __ mov(Operand(esp, 0), source); + TestAndBranch(source, &discard, false_label_); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(true_label_); + break; + } + } +} + + +void FastCodeGenerator::TestAndBranch(Register source, + Label* true_label, + Label* false_label) { + ASSERT_NE(NULL, true_label); + ASSERT_NE(NULL, false_label); + // Use the shared ToBoolean stub to compile the value in the register into + // control flow to the code generator's true and false labels. Perform + // the fast checks assumed by the stub. + __ cmp(source, Factory::undefined_value()); // The undefined value is false. + __ j(equal, false_label); + __ cmp(source, Factory::true_value()); // True is true. + __ j(equal, true_label); + __ cmp(source, Factory::false_value()); // False is false. + __ j(equal, false_label); + ASSERT_EQ(0, kSmiTag); + __ test(source, Operand(source)); // The smi zero is false. + __ j(zero, false_label); + __ test(source, Immediate(kSmiTagMask)); // All other smis are true. + __ j(zero, true_label); + + // Call the stub for all other cases. + __ push(source); + ToBooleanStub stub; + __ CallStub(&stub); + __ test(eax, Operand(eax)); // The stub returns nonzero for true. + __ j(not_zero, true_label); + __ jmp(false_label); +} + + +void FastCodeGenerator::VisitDeclaration(Declaration* decl) { + Comment cmnt(masm_, "[ Declaration"); + Variable* var = decl->proxy()->var(); + ASSERT(var != NULL); // Must have been resolved. + Slot* slot = var->slot(); + ASSERT(slot != NULL); // No global declarations here. + + // We have 3 cases for slots: LOOKUP, LOCAL, CONTEXT. + switch (slot->type()) { + case Slot::LOOKUP: { + __ push(esi); + __ push(Immediate(var->name())); + // Declaration nodes are always introduced in one of two modes. + ASSERT(decl->mode() == Variable::VAR || decl->mode() == Variable::CONST); + PropertyAttributes attr = + (decl->mode() == Variable::VAR) ? NONE : READ_ONLY; + __ push(Immediate(Smi::FromInt(attr))); + // Push initial value, if any. + // Note: For variables we must not push an initial value (such as + // 'undefined') because we may have a (legal) redeclaration and we + // must not destroy the current value. + if (decl->mode() == Variable::CONST) { + __ push(Immediate(Factory::the_hole_value())); + } else if (decl->fun() != NULL) { + Visit(decl->fun()); + } else { + __ push(Immediate(Smi::FromInt(0))); // No initial value! + } + __ CallRuntime(Runtime::kDeclareContextSlot, 4); + break; + } + case Slot::LOCAL: + if (decl->mode() == Variable::CONST) { + __ mov(Operand(ebp, SlotOffset(var->slot())), + Immediate(Factory::the_hole_value())); + } else if (decl->fun() != NULL) { + Visit(decl->fun()); + __ pop(Operand(ebp, SlotOffset(var->slot()))); + } + break; + case Slot::CONTEXT: + // The variable in the decl always resides in the current context. + ASSERT(function_->scope()->ContextChainLength(slot->var()->scope()) == 0); + if (decl->mode() == Variable::CONST) { + __ mov(eax, Immediate(Factory::the_hole_value())); + if (FLAG_debug_code) { + // Check if we have the correct context pointer. + __ mov(ebx, + CodeGenerator::ContextOperand(esi, Context::FCONTEXT_INDEX)); + __ cmp(ebx, Operand(esi)); + __ Check(equal, "Unexpected declaration in current context."); + } + __ mov(CodeGenerator::ContextOperand(esi, slot->index()), eax); + // No write barrier since the_hole_value is in old space. + } else if (decl->fun() != NULL) { + Visit(decl->fun()); + __ pop(eax); + if (FLAG_debug_code) { + // Check if we have the correct context pointer. + __ mov(ebx, + CodeGenerator::ContextOperand(esi, Context::FCONTEXT_INDEX)); + __ cmp(ebx, Operand(esi)); + __ Check(equal, "Unexpected declaration in current context."); + } + __ mov(CodeGenerator::ContextOperand(esi, slot->index()), eax); + int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; + __ RecordWrite(esi, offset, eax, ecx); + } break; + default: + UNREACHABLE(); } } @@ -172,27 +413,15 @@ void FastCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { void FastCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) { Comment cmnt(masm_, "[ ReturnStatement"); - SetStatementPosition(stmt); Expression* expr = stmt->expression(); - // Complete the statement based on the type of the subexpression. if (expr->AsLiteral() != NULL) { __ mov(eax, expr->AsLiteral()->handle()); } else { + ASSERT_EQ(Expression::kValue, expr->context()); Visit(expr); - Move(eax, expr->location()); - } - - if (FLAG_trace) { - __ push(eax); - __ CallRuntime(Runtime::kTraceExit, 1); + __ pop(eax); } - __ RecordJSReturn(); - - // Do not use the leave instruction here because it is too short to - // patch with the code required by the debugger. - __ mov(esp, ebp); - __ pop(ebp); - __ ret((function_->scope()->num_parameters() + 1) * kPointerSize); + EmitReturnSequence(stmt->statement_pos()); } @@ -200,7 +429,8 @@ void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) { Comment cmnt(masm_, "[ FunctionLiteral"); // Build the function boilerplate and instantiate it. - Handle<JSFunction> boilerplate = BuildBoilerplate(expr); + Handle<JSFunction> boilerplate = + Compiler::BuildBoilerplate(expr, script_, this); if (HasStackOverflow()) return; ASSERT(boilerplate->IsBoilerplate()); @@ -209,7 +439,7 @@ void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) { __ push(esi); __ push(Immediate(boilerplate)); __ CallRuntime(Runtime::kNewClosure, 2); - Move(expr->location(), eax); + Move(expr->context(), eax); } @@ -217,6 +447,7 @@ void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) { Comment cmnt(masm_, "[ VariableProxy"); Expression* rewrite = expr->var()->rewrite(); if (rewrite == NULL) { + ASSERT(expr->var()->is_global()); Comment cmnt(masm_, "Global variable"); // Use inline caching. Variable name is passed in ecx and the global // object on the stack. @@ -230,16 +461,62 @@ void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) { // (eg, push/pop elimination). __ nop(); - DropAndMove(expr->location(), eax); + DropAndMove(expr->context(), eax); + } else if (rewrite->AsSlot() != NULL) { + Slot* slot = rewrite->AsSlot(); + switch (slot->type()) { + case Slot::LOCAL: + case Slot::PARAMETER: { + Comment cmnt(masm_, "Stack slot"); + Move(expr->context(), slot); + break; + } + + case Slot::CONTEXT: { + Comment cmnt(masm_, "Context slot"); + int chain_length = + function_->scope()->ContextChainLength(slot->var()->scope()); + if (chain_length > 0) { + // Move up the chain of contexts to the context containing the slot. + __ mov(eax, + Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX))); + // Load the function context (which is the incoming, outer context). + __ mov(eax, FieldOperand(eax, JSFunction::kContextOffset)); + for (int i = 1; i < chain_length; i++) { + __ mov(eax, + Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX))); + __ mov(eax, FieldOperand(eax, JSFunction::kContextOffset)); + } + // The context may be an intermediate context, not a function context. + __ mov(eax, + Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX))); + } else { // Slot is in the current function context. + // The context may be an intermediate context, not a function context. + __ mov(eax, + Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX))); + } + __ mov(eax, Operand(eax, Context::SlotOffset(slot->index()))); + Move(expr->context(), eax); + break; + } + + case Slot::LOOKUP: + UNREACHABLE(); + break; + } } else { - Comment cmnt(masm_, "Stack slot"); - Move(expr->location(), rewrite->AsSlot()); + // The parameter variable has been rewritten into an explict access to + // the arguments object. + Property* property = rewrite->AsProperty(); + ASSERT_NOT_NULL(property); + ASSERT_EQ(expr->context(), property->context()); + Visit(property); } } void FastCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { - Comment cmnt(masm_, "[ RegExp Literal"); + Comment cmnt(masm_, "[ RegExpLiteral"); Label done; // Registers will be used as follows: // edi = JS function. @@ -261,7 +538,7 @@ void FastCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); // Label done: __ bind(&done); - Move(expr->location(), eax); + Move(expr->context(), eax); } @@ -318,7 +595,8 @@ void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { case ObjectLiteral::Property::COMPUTED: if (key->handle()->IsSymbol()) { Visit(value); - Move(eax, value->location()); + ASSERT_EQ(Expression::kValue, value->context()); + __ pop(eax); __ mov(ecx, Immediate(key->handle())); Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); __ call(ic, RelocInfo::CODE_TARGET); @@ -329,9 +607,9 @@ void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { case ObjectLiteral::Property::PROTOTYPE: __ push(eax); Visit(key); - ASSERT(key->location().is_value()); + ASSERT_EQ(Expression::kValue, key->context()); Visit(value); - ASSERT(value->location().is_value()); + ASSERT_EQ(Expression::kValue, value->context()); __ CallRuntime(Runtime::kSetProperty, 3); __ mov(eax, Operand(esp, 0)); // Restore result into eax. break; @@ -339,27 +617,49 @@ void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { case ObjectLiteral::Property::GETTER: __ push(eax); Visit(key); - ASSERT(key->location().is_value()); + ASSERT_EQ(Expression::kValue, key->context()); __ push(Immediate(property->kind() == ObjectLiteral::Property::SETTER ? Smi::FromInt(1) : Smi::FromInt(0))); Visit(value); - ASSERT(value->location().is_value()); + ASSERT_EQ(Expression::kValue, value->context()); __ CallRuntime(Runtime::kDefineAccessor, 4); __ mov(eax, Operand(esp, 0)); // Restore result into eax. break; default: UNREACHABLE(); } } - switch (expr->location().type()) { - case Location::kUninitialized: + switch (expr->context()) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kEffect: + case Expression::kEffect: if (result_saved) __ add(Operand(esp), Immediate(kPointerSize)); break; - case Location::kValue: + case Expression::kValue: if (!result_saved) __ push(eax); break; + case Expression::kTest: + if (result_saved) __ pop(eax); + TestAndBranch(eax, true_label_, false_label_); + break; + case Expression::kValueTest: { + Label discard; + if (!result_saved) __ push(eax); + TestAndBranch(eax, true_label_, &discard); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; + if (!result_saved) __ push(eax); + TestAndBranch(eax, &discard, false_label_); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(true_label_); + break; + } } } @@ -412,7 +712,7 @@ void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { result_saved = true; } Visit(subexpr); - ASSERT(subexpr->location().is_value()); + ASSERT_EQ(Expression::kValue, subexpr->context()); // Store the subexpression value in the array's elements. __ pop(eax); // Subexpression value. @@ -425,80 +725,218 @@ void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ RecordWrite(ebx, offset, eax, ecx); } - switch (expr->location().type()) { - case Location::kUninitialized: + switch (expr->context()) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kEffect: + case Expression::kEffect: if (result_saved) __ add(Operand(esp), Immediate(kPointerSize)); break; - case Location::kValue: + case Expression::kValue: + if (!result_saved) __ push(eax); + break; + case Expression::kTest: + if (result_saved) __ pop(eax); + TestAndBranch(eax, true_label_, false_label_); + break; + case Expression::kValueTest: { + Label discard; if (!result_saved) __ push(eax); + TestAndBranch(eax, true_label_, &discard); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; + if (!result_saved) __ push(eax); + TestAndBranch(eax, &discard, false_label_); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(true_label_); break; + } } } -void FastCodeGenerator::VisitAssignment(Assignment* expr) { - Comment cmnt(masm_, "[ Assignment"); - ASSERT(expr->op() == Token::ASSIGN || expr->op() == Token::INIT_VAR); - - // Left-hand side can only be a global or a (parameter or local) slot. +void FastCodeGenerator::EmitVariableAssignment(Assignment* expr) { Variable* var = expr->target()->AsVariableProxy()->AsVariable(); ASSERT(var != NULL); - ASSERT(var->is_global() || var->slot() != NULL); - Expression* rhs = expr->value(); if (var->is_global()) { - // Assignment to a global variable, use inline caching. Right-hand-side - // value is passed in eax, variable name in ecx, and the global object - // on the stack. - - // Code for the right-hand-side expression depends on its type. - if (rhs->AsLiteral() != NULL) { - __ mov(eax, rhs->AsLiteral()->handle()); - } else { - ASSERT(rhs->location().is_value()); - Visit(rhs); - __ pop(eax); - } + // Assignment to a global variable. Use inline caching for the + // assignment. Right-hand-side value is passed in eax, variable name in + // ecx, and the global object on the stack. + __ pop(eax); __ mov(ecx, var->name()); __ push(CodeGenerator::GlobalObject()); Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); __ call(ic, RelocInfo::CODE_TARGET); - // Overwrite the global object on the stack with the result if needed. - DropAndMove(expr->location(), eax); + // Overwrite the receiver on the stack with the result if needed. + DropAndMove(expr->context(), eax); + } else { - // Local or parameter assignment. - - // Code for the right-hand side expression depends on its type. - if (rhs->AsLiteral() != NULL) { - // Two cases: 'temp <- (var = constant)', or 'var = constant' with a - // discarded result. Always perform the assignment. - __ mov(eax, rhs->AsLiteral()->handle()); - __ mov(Operand(ebp, SlotOffset(var->slot())), eax); - Move(expr->location(), eax); - } else { - ASSERT(rhs->location().is_value()); - Visit(rhs); - switch (expr->location().type()) { - case Location::kUninitialized: - UNREACHABLE(); - case Location::kEffect: - // Case 'var = temp'. Discard right-hand-side temporary. - Move(var->slot(), rhs->location()); - break; - case Location::kValue: - // Case 'temp1 <- (var = temp0)'. Preserve right-hand-side - // temporary on the stack. - __ mov(eax, Operand(esp, 0)); - __ mov(Operand(ebp, SlotOffset(var->slot())), eax); - break; + Slot* slot = var->slot(); + ASSERT_NOT_NULL(slot); // Variables rewritten as properties not handled. + switch (slot->type()) { + case Slot::LOCAL: + case Slot::PARAMETER: { + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + case Expression::kEffect: + // Perform assignment and discard value. + __ pop(Operand(ebp, SlotOffset(var->slot()))); + break; + case Expression::kValue: + // Perform assignment and preserve value. + __ mov(eax, Operand(esp, 0)); + __ mov(Operand(ebp, SlotOffset(var->slot())), eax); + break; + case Expression::kTest: + // Perform assignment and test (and discard) value. + __ pop(eax); + __ mov(Operand(ebp, SlotOffset(var->slot())), eax); + TestAndBranch(eax, true_label_, false_label_); + break; + case Expression::kValueTest: { + Label discard; + __ mov(eax, Operand(esp, 0)); + __ mov(Operand(ebp, SlotOffset(var->slot())), eax); + TestAndBranch(eax, true_label_, &discard); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; + __ mov(eax, Operand(esp, 0)); + __ mov(Operand(ebp, SlotOffset(var->slot())), eax); + TestAndBranch(eax, &discard, false_label_); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(true_label_); + break; + } + } + break; + } + + case Slot::CONTEXT: { + int chain_length = + function_->scope()->ContextChainLength(slot->var()->scope()); + if (chain_length > 0) { + // Move up the context chain to the context containing the slot. + __ mov(eax, + Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX))); + // Load the function context (which is the incoming, outer context). + __ mov(eax, FieldOperand(eax, JSFunction::kContextOffset)); + for (int i = 1; i < chain_length; i++) { + __ mov(eax, + Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX))); + __ mov(eax, FieldOperand(eax, JSFunction::kContextOffset)); + } + } else { // Slot is in the current context. Generate optimized code. + __ mov(eax, esi); // RecordWrite destroys the object register. + } + if (FLAG_debug_code) { + __ cmp(eax, + Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX))); + __ Check(equal, "Context Slot chain length wrong."); + } + __ pop(ecx); + __ mov(Operand(eax, Context::SlotOffset(slot->index())), ecx); + + // RecordWrite may destroy all its register arguments. + if (expr->context() == Expression::kValue) { + __ push(ecx); + } else if (expr->context() != Expression::kEffect) { + __ mov(edx, ecx); + } + int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; + __ RecordWrite(eax, offset, ecx, ebx); + if (expr->context() != Expression::kEffect && + expr->context() != Expression::kValue) { + Move(expr->context(), edx); + } + break; } + + case Slot::LOOKUP: + UNREACHABLE(); + break; } } } +void FastCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { + // Assignment to a property, using a named store IC. + Property* prop = expr->target()->AsProperty(); + ASSERT(prop != NULL); + ASSERT(prop->key()->AsLiteral() != NULL); + + // If the assignment starts a block of assignments to the same object, + // change to slow case to avoid the quadratic behavior of repeatedly + // adding fast properties. + if (expr->starts_initialization_block()) { + __ push(Operand(esp, kPointerSize)); // Receiver is under value. + __ CallRuntime(Runtime::kToSlowProperties, 1); + } + + __ pop(eax); + __ mov(ecx, prop->key()->AsLiteral()->handle()); + Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); + __ call(ic, RelocInfo::CODE_TARGET); + + // If the assignment ends an initialization block, revert to fast case. + if (expr->ends_initialization_block()) { + __ push(eax); // Result of assignment, saved even if not needed. + __ push(Operand(esp, kPointerSize)); // Receiver is under value. + __ CallRuntime(Runtime::kToFastProperties, 1); + __ pop(eax); + } + + DropAndMove(expr->context(), eax); +} + + +void FastCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { + // Assignment to a property, using a keyed store IC. + + // If the assignment starts a block of assignments to the same object, + // change to slow case to avoid the quadratic behavior of repeatedly + // adding fast properties. + if (expr->starts_initialization_block()) { + // Reciever is under the key and value. + __ push(Operand(esp, 2 * kPointerSize)); + __ CallRuntime(Runtime::kToSlowProperties, 1); + } + + __ pop(eax); + Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); + __ call(ic, RelocInfo::CODE_TARGET); + // This nop signals to the IC that there is no inlined code at the call + // site for it to patch. + __ nop(); + + // If the assignment ends an initialization block, revert to fast case. + if (expr->ends_initialization_block()) { + __ push(eax); // Result of assignment, saved even if not needed. + // Reciever is under the key and value. + __ push(Operand(esp, 2 * kPointerSize)); + __ CallRuntime(Runtime::kToFastProperties, 1); + __ pop(eax); + } + + // Receiver and key are still on stack. + __ add(Operand(esp), Immediate(2 * kPointerSize)); + Move(expr->context(), eax); +} + + void FastCodeGenerator::VisitProperty(Property* expr) { Comment cmnt(masm_, "[ Property"); Expression* key = expr->key(); @@ -531,72 +969,147 @@ void FastCodeGenerator::VisitProperty(Property* expr) { // Drop key left on the stack by IC. __ add(Operand(esp), Immediate(kPointerSize)); } - switch (expr->location().type()) { - case Location::kUninitialized: - UNREACHABLE(); - case Location::kValue: - __ mov(Operand(esp, 0), eax); - break; - case Location::kEffect: - __ add(Operand(esp), Immediate(kPointerSize)); - break; - } + DropAndMove(expr->context(), eax); } -void FastCodeGenerator::VisitCall(Call* expr) { - Expression* fun = expr->expression(); +void FastCodeGenerator::EmitCallWithIC(Call* expr, RelocInfo::Mode reloc_info) { + // Code common for calls using the IC. ZoneList<Expression*>* args = expr->arguments(); - Variable* var = fun->AsVariableProxy()->AsVariable(); - ASSERT(var != NULL && !var->is_this() && var->is_global()); - ASSERT(!var->is_possibly_eval()); - - __ push(Immediate(var->name())); - // Push global object (receiver). - __ push(CodeGenerator::GlobalObject()); int arg_count = args->length(); for (int i = 0; i < arg_count; i++) { Visit(args->at(i)); - ASSERT(args->at(i)->location().is_value()); + ASSERT_EQ(Expression::kValue, args->at(i)->context()); } - // Record source position for debugger + // Record source position for debugger. SetSourcePosition(expr->position()); // Call the IC initialization code. Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count, NOT_IN_LOOP); - __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); + __ call(ic, reloc_info); + // Restore context register. + __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); + // Discard the function left on TOS. + DropAndMove(expr->context(), eax); +} + + +void FastCodeGenerator::EmitCallWithStub(Call* expr) { + // Code common for calls using the call stub. + ZoneList<Expression*>* args = expr->arguments(); + int arg_count = args->length(); + for (int i = 0; i < arg_count; i++) { + Visit(args->at(i)); + } + // Record source position for debugger. + SetSourcePosition(expr->position()); + CallFunctionStub stub(arg_count, NOT_IN_LOOP); + __ CallStub(&stub); // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); // Discard the function left on TOS. - DropAndMove(expr->location(), eax); + DropAndMove(expr->context(), eax); +} + + +void FastCodeGenerator::VisitCall(Call* expr) { + Comment cmnt(masm_, "[ Call"); + Expression* fun = expr->expression(); + Variable* var = fun->AsVariableProxy()->AsVariable(); + + if (var != NULL && var->is_possibly_eval()) { + // Call to the identifier 'eval'. + UNREACHABLE(); + } else if (var != NULL && !var->is_this() && var->is_global()) { + // Call to a global variable. + __ push(Immediate(var->name())); + // Push global object as receiver for the call IC lookup. + __ push(CodeGenerator::GlobalObject()); + EmitCallWithIC(expr, RelocInfo::CODE_TARGET_CONTEXT); + } else if (var != NULL && var->slot() != NULL && + var->slot()->type() == Slot::LOOKUP) { + // Call to a lookup slot. + UNREACHABLE(); + } else if (fun->AsProperty() != NULL) { + // Call to an object property. + Property* prop = fun->AsProperty(); + Literal* key = prop->key()->AsLiteral(); + if (key != NULL && key->handle()->IsSymbol()) { + // Call to a named property, use call IC. + __ push(Immediate(key->handle())); + Visit(prop->obj()); + EmitCallWithIC(expr, RelocInfo::CODE_TARGET); + } else { + // Call to a keyed property, use keyed load IC followed by function + // call. + Visit(prop->obj()); + Visit(prop->key()); + // Record source code position for IC call. + SetSourcePosition(prop->position()); + Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); + __ call(ic, RelocInfo::CODE_TARGET); + // By emitting a nop we make sure that we do not have a "test eax,..." + // instruction after the call it is treated specially by the LoadIC code. + __ nop(); + // Drop key left on the stack by IC. + __ add(Operand(esp), Immediate(kPointerSize)); + // Pop receiver. + __ pop(ebx); + // Push result (function). + __ push(eax); + // Push receiver object on stack. + if (prop->is_synthetic()) { + __ push(CodeGenerator::GlobalObject()); + } else { + __ push(ebx); + } + EmitCallWithStub(expr); + } + } else { + // Call to some other expression. If the expression is an anonymous + // function literal not called in a loop, mark it as one that should + // also use the fast code generator. + FunctionLiteral* lit = fun->AsFunctionLiteral(); + if (lit != NULL && + lit->name()->Equals(Heap::empty_string()) && + loop_depth() == 0) { + lit->set_try_fast_codegen(true); + } + Visit(fun); + // Load global receiver object. + __ mov(ebx, CodeGenerator::GlobalObject()); + __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset)); + // Emit function call. + EmitCallWithStub(expr); + } } -void FastCodeGenerator::VisitCallNew(CallNew* node) { +void FastCodeGenerator::VisitCallNew(CallNew* expr) { Comment cmnt(masm_, "[ CallNew"); // According to ECMA-262, section 11.2.2, page 44, the function // expression in new calls must be evaluated before the // arguments. // Push function on the stack. - Visit(node->expression()); - ASSERT(node->expression()->location().is_value()); + Visit(expr->expression()); + ASSERT_EQ(Expression::kValue, expr->expression()->context()); // Push global object (receiver). __ push(CodeGenerator::GlobalObject()); // Push the arguments ("left-to-right") on the stack. - ZoneList<Expression*>* args = node->arguments(); + ZoneList<Expression*>* args = expr->arguments(); int arg_count = args->length(); for (int i = 0; i < arg_count; i++) { Visit(args->at(i)); - ASSERT(args->at(i)->location().is_value()); + ASSERT_EQ(Expression::kValue, args->at(i)->context()); // If location is value, it is already on the stack, // so nothing to do here. } // Call the construct call builtin that handles allocation and // constructor invocation. - SetSourcePosition(node->position()); + SetSourcePosition(expr->position()); // Load function, arg_count into edi and eax. __ Set(eax, Immediate(arg_count)); @@ -607,7 +1120,7 @@ void FastCodeGenerator::VisitCallNew(CallNew* node) { __ call(construct_builtin, RelocInfo::CONSTRUCT_CALL); // Replace function on TOS with result in eax, or pop it. - DropAndMove(node->location(), eax); + DropAndMove(expr->context(), eax); } @@ -622,19 +1135,221 @@ void FastCodeGenerator::VisitCallRuntime(CallRuntime* expr) { int arg_count = args->length(); for (int i = 0; i < arg_count; i++) { Visit(args->at(i)); - ASSERT(args->at(i)->location().is_value()); + ASSERT_EQ(Expression::kValue, args->at(i)->context()); } __ CallRuntime(function, arg_count); - Move(expr->location(), eax); + Move(expr->context(), eax); +} + + +void FastCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { + switch (expr->op()) { + case Token::VOID: { + Comment cmnt(masm_, "[ UnaryOperation (VOID)"); + Visit(expr->expression()); + ASSERT_EQ(Expression::kEffect, expr->expression()->context()); + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + break; + case Expression::kEffect: + break; + case Expression::kValue: + __ push(Immediate(Factory::undefined_value())); + break; + case Expression::kTestValue: + // Value is false so it's needed. + __ push(Immediate(Factory::undefined_value())); + // Fall through. + case Expression::kTest: // Fall through. + case Expression::kValueTest: + __ jmp(false_label_); + break; + } + break; + } + + case Token::NOT: { + Comment cmnt(masm_, "[ UnaryOperation (NOT)"); + ASSERT_EQ(Expression::kTest, expr->expression()->context()); + + Label push_true; + Label push_false; + Label done; + Label* saved_true = true_label_; + Label* saved_false = false_label_; + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + break; + + case Expression::kValue: + true_label_ = &push_false; + false_label_ = &push_true; + Visit(expr->expression()); + __ bind(&push_true); + __ push(Immediate(Factory::true_value())); + __ jmp(&done); + __ bind(&push_false); + __ push(Immediate(Factory::false_value())); + __ bind(&done); + break; + + case Expression::kEffect: + true_label_ = &done; + false_label_ = &done; + Visit(expr->expression()); + __ bind(&done); + break; + + case Expression::kTest: + true_label_ = saved_false; + false_label_ = saved_true; + Visit(expr->expression()); + break; + + case Expression::kValueTest: + true_label_ = saved_false; + false_label_ = &push_true; + Visit(expr->expression()); + __ bind(&push_true); + __ push(Immediate(Factory::true_value())); + __ jmp(saved_true); + break; + + case Expression::kTestValue: + true_label_ = &push_false; + false_label_ = saved_true; + Visit(expr->expression()); + __ bind(&push_false); + __ push(Immediate(Factory::false_value())); + __ jmp(saved_false); + break; + } + true_label_ = saved_true; + false_label_ = saved_false; + break; + } + + case Token::TYPEOF: { + Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); + ASSERT_EQ(Expression::kValue, expr->expression()->context()); + + VariableProxy* proxy = expr->expression()->AsVariableProxy(); + if (proxy != NULL && + !proxy->var()->is_this() && + proxy->var()->is_global()) { + Comment cmnt(masm_, "Global variable"); + __ push(CodeGenerator::GlobalObject()); + __ mov(ecx, Immediate(proxy->name())); + Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); + // Use a regular load, not a contextual load, to avoid a reference + // error. + __ call(ic, RelocInfo::CODE_TARGET); + __ mov(Operand(esp, 0), eax); + } else if (proxy != NULL && + proxy->var()->slot() != NULL && + proxy->var()->slot()->type() == Slot::LOOKUP) { + __ push(esi); + __ push(Immediate(proxy->name())); + __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); + __ push(eax); + } else { + // This expression cannot throw a reference error at the top level. + Visit(expr->expression()); + } + + __ CallRuntime(Runtime::kTypeof, 1); + Move(expr->context(), eax); + break; + } + + default: + UNREACHABLE(); + } +} + + +void FastCodeGenerator::VisitCountOperation(CountOperation* expr) { + Comment cmnt(masm_, "[ CountOperation"); + VariableProxy* proxy = expr->expression()->AsVariableProxy(); + ASSERT(proxy->AsVariable() != NULL); + ASSERT(proxy->AsVariable()->is_global()); + + Visit(proxy); + __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); + + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + case Expression::kValue: // Fall through + case Expression::kTest: // Fall through + case Expression::kTestValue: // Fall through + case Expression::kValueTest: + // Duplicate the result on the stack. + __ push(eax); + break; + case Expression::kEffect: + // Do not save result. + break; + } + // Call runtime for +1/-1. + __ push(eax); + __ push(Immediate(Smi::FromInt(1))); + if (expr->op() == Token::INC) { + __ CallRuntime(Runtime::kNumberAdd, 2); + } else { + __ CallRuntime(Runtime::kNumberSub, 2); + } + // Call Store IC. + __ mov(ecx, proxy->AsVariable()->name()); + __ push(CodeGenerator::GlobalObject()); + Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); + __ call(ic, RelocInfo::CODE_TARGET); + // Restore up stack after store IC. + __ add(Operand(esp), Immediate(kPointerSize)); + + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + case Expression::kEffect: // Fall through + case Expression::kValue: + // Do nothing. Result in either on the stack for value context + // or discarded for effect context. + break; + case Expression::kTest: + __ pop(eax); + TestAndBranch(eax, true_label_, false_label_); + break; + case Expression::kValueTest: { + Label discard; + __ mov(eax, Operand(esp, 0)); + TestAndBranch(eax, true_label_, &discard); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; + __ mov(eax, Operand(esp, 0)); + TestAndBranch(eax, &discard, false_label_); + __ bind(&discard); + __ add(Operand(esp), Immediate(kPointerSize)); + __ jmp(true_label_); + break; + } + } } void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { + Comment cmnt(masm_, "[ BinaryOperation"); switch (expr->op()) { case Token::COMMA: - ASSERT(expr->left()->location().is_effect()); - ASSERT_EQ(expr->right()->location().type(), expr->location().type()); + ASSERT_EQ(Expression::kEffect, expr->left()->context()); + ASSERT_EQ(expr->context(), expr->right()->context()); Visit(expr->left()); Visit(expr->right()); break; @@ -655,8 +1370,8 @@ void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { case Token::SHL: case Token::SHR: case Token::SAR: { - ASSERT(expr->left()->location().is_value()); - ASSERT(expr->right()->location().is_value()); + ASSERT_EQ(Expression::kValue, expr->left()->context()); + ASSERT_EQ(Expression::kValue, expr->right()->context()); Visit(expr->left()); Visit(expr->right()); @@ -664,7 +1379,7 @@ void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { NO_OVERWRITE, NO_GENERIC_BINARY_FLAGS); __ CallStub(&stub); - Move(expr->location(), eax); + Move(expr->context(), eax); break; } @@ -674,90 +1389,166 @@ void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { } -void FastCodeGenerator::EmitLogicalOperation(BinaryOperation* expr) { - // Compile a short-circuited boolean operation in a non-test context. +void FastCodeGenerator::VisitCompareOperation(CompareOperation* expr) { + Comment cmnt(masm_, "[ CompareOperation"); + ASSERT_EQ(Expression::kValue, expr->left()->context()); + ASSERT_EQ(Expression::kValue, expr->right()->context()); + Visit(expr->left()); + Visit(expr->right()); + + // Convert current context to test context: Pre-test code. + Label push_true; + Label push_false; + Label done; + Label* saved_true = true_label_; + Label* saved_false = false_label_; + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + break; - // Compile (e0 || e1) or (e0 && e1) as if it were - // (let (temp = e0) temp [or !temp, for &&] ? temp : e1). + case Expression::kValue: + true_label_ = &push_true; + false_label_ = &push_false; + break; - Label eval_right, done; - Label *left_true, *left_false; // Where to branch to if lhs has that value. - if (expr->op() == Token::OR) { - left_true = &done; - left_false = &eval_right; - } else { - left_true = &eval_right; - left_false = &done; - } - Location destination = expr->location(); - Expression* left = expr->left(); - Expression* right = expr->right(); - - // Use the shared ToBoolean stub to find the boolean value of the - // left-hand subexpression. Load the value into eax to perform some - // inlined checks assumed by the stub. - - // Compile the left-hand value into eax. Put it on the stack if we may - // need it as the value of the whole expression. - if (left->AsLiteral() != NULL) { - __ mov(eax, left->AsLiteral()->handle()); - if (destination.is_value()) __ push(eax); - } else { - Visit(left); - ASSERT(left->location().is_value()); - switch (destination.type()) { - case Location::kUninitialized: - UNREACHABLE(); - case Location::kEffect: - // Pop the left-hand value into eax because we will not need it as the - // final result. - __ pop(eax); - break; - case Location::kValue: - // Copy the left-hand value into eax because we may need it as the - // final result. - __ mov(eax, Operand(esp, 0)); - break; - } - } - // The left-hand value is in eax. It is also on the stack iff the - // destination location is value. - - // Perform fast checks assumed by the stub. - __ cmp(eax, Factory::undefined_value()); // The undefined value is false. - __ j(equal, left_false); - __ cmp(eax, Factory::true_value()); // True is true. - __ j(equal, left_true); - __ cmp(eax, Factory::false_value()); // False is false. - __ j(equal, left_false); - ASSERT(kSmiTag == 0); - __ test(eax, Operand(eax)); // The smi zero is false. - __ j(zero, left_false); - __ test(eax, Immediate(kSmiTagMask)); // All other smis are true. - __ j(zero, left_true); + case Expression::kEffect: + true_label_ = &done; + false_label_ = &done; + break; - // Call the stub for all other cases. - __ push(eax); - ToBooleanStub stub; - __ CallStub(&stub); - __ test(eax, Operand(eax)); // The stub returns nonzero for true. - if (expr->op() == Token::OR) { - __ j(not_zero, &done); - } else { - __ j(zero, &done); + case Expression::kTest: + break; + + case Expression::kValueTest: + true_label_ = &push_true; + break; + + case Expression::kTestValue: + false_label_ = &push_false; + break; } + // Convert current context to test context: End pre-test code. - __ bind(&eval_right); - // Discard the left-hand value if present on the stack. - if (destination.is_value()) { - __ add(Operand(esp), Immediate(kPointerSize)); + switch (expr->op()) { + case Token::IN: { + __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); + __ cmp(eax, Factory::true_value()); + __ j(equal, true_label_); + __ jmp(false_label_); + break; + } + + case Token::INSTANCEOF: { + InstanceofStub stub; + __ CallStub(&stub); + __ test(eax, Operand(eax)); + __ j(zero, true_label_); // The stub returns 0 for true. + __ jmp(false_label_); + break; + } + + default: { + Condition cc = no_condition; + bool strict = false; + switch (expr->op()) { + case Token::EQ_STRICT: + strict = true; + // Fall through + case Token::EQ: + cc = equal; + __ pop(eax); + __ pop(edx); + break; + case Token::LT: + cc = less; + __ pop(eax); + __ pop(edx); + break; + case Token::GT: + // Reverse left and right sizes to obtain ECMA-262 conversion order. + cc = less; + __ pop(edx); + __ pop(eax); + break; + case Token::LTE: + // Reverse left and right sizes to obtain ECMA-262 conversion order. + cc = greater_equal; + __ pop(edx); + __ pop(eax); + break; + case Token::GTE: + cc = greater_equal; + __ pop(eax); + __ pop(edx); + break; + case Token::IN: + case Token::INSTANCEOF: + default: + UNREACHABLE(); + } + + // The comparison stub expects the smi vs. smi case to be handled + // before it is called. + Label slow_case; + __ mov(ecx, Operand(edx)); + __ or_(ecx, Operand(eax)); + __ test(ecx, Immediate(kSmiTagMask)); + __ j(not_zero, &slow_case, not_taken); + __ cmp(edx, Operand(eax)); + __ j(cc, true_label_); + __ jmp(false_label_); + + __ bind(&slow_case); + CompareStub stub(cc, strict); + __ CallStub(&stub); + __ test(eax, Operand(eax)); + __ j(cc, true_label_); + __ jmp(false_label_); + } } - // Save or discard the right-hand value as needed. - Visit(right); - ASSERT_EQ(destination.type(), right->location().type()); - __ bind(&done); + // Convert current context to test context: Post-test code. + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + break; + + case Expression::kValue: + __ bind(&push_true); + __ push(Immediate(Factory::true_value())); + __ jmp(&done); + __ bind(&push_false); + __ push(Immediate(Factory::false_value())); + __ bind(&done); + break; + + case Expression::kEffect: + __ bind(&done); + break; + + case Expression::kTest: + break; + + case Expression::kValueTest: + __ bind(&push_true); + __ push(Immediate(Factory::true_value())); + __ jmp(saved_true); + break; + + case Expression::kTestValue: + __ bind(&push_false); + __ push(Immediate(Factory::false_value())); + __ jmp(saved_false); + break; + } + true_label_ = saved_true; + false_label_ = saved_false; + // Convert current context to test context: End post-test code. } +#undef __ + + } } // namespace v8::internal diff --git a/deps/v8/src/ia32/frames-ia32.cc b/deps/v8/src/ia32/frames-ia32.cc index dea439f24b..5c900bedd7 100644 --- a/deps/v8/src/ia32/frames-ia32.cc +++ b/deps/v8/src/ia32/frames-ia32.cc @@ -56,19 +56,14 @@ StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) { state->fp = fp; state->sp = sp; state->pc_address = reinterpret_cast<Address*>(sp - 1 * kPointerSize); - // Determine frame type. - if (Memory::Address_at(fp + ExitFrameConstants::kDebugMarkOffset) != 0) { - return EXIT_DEBUG; - } else { - return EXIT; - } + return EXIT; } void ExitFrame::Iterate(ObjectVisitor* v) const { - // Exit frames on IA-32 do not contain any pointers. The arguments - // are traversed as part of the expression stack of the calling - // frame. + v->VisitPointer(&code_slot()); + // The arguments are traversed as part of the expression stack of + // the calling frame. } diff --git a/deps/v8/src/ia32/frames-ia32.h b/deps/v8/src/ia32/frames-ia32.h index 3a7c86bf73..c3fe6c748d 100644 --- a/deps/v8/src/ia32/frames-ia32.h +++ b/deps/v8/src/ia32/frames-ia32.h @@ -76,7 +76,7 @@ class EntryFrameConstants : public AllStatic { class ExitFrameConstants : public AllStatic { public: - static const int kDebugMarkOffset = -2 * kPointerSize; + static const int kCodeOffset = -2 * kPointerSize; static const int kSPOffset = -1 * kPointerSize; static const int kCallerFPOffset = 0 * kPointerSize; diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc index 08c4c0c51b..010433e163 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.cc +++ b/deps/v8/src/ia32/macro-assembler-ia32.cc @@ -319,7 +319,7 @@ void MacroAssembler::CmpInstanceType(Register map, InstanceType type) { void MacroAssembler::FCmp() { - if (CpuFeatures::IsSupported(CpuFeatures::CMOV)) { + if (CpuFeatures::IsSupported(CMOV)) { fucomip(); ffree(0); fincstp(); @@ -355,10 +355,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) { leave(); } - -void MacroAssembler::EnterExitFrame(StackFrame::Type type) { - ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG); - +void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode) { // Setup the frame structure on the stack. ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); @@ -369,23 +366,24 @@ void MacroAssembler::EnterExitFrame(StackFrame::Type type) { // Reserve room for entry stack pointer and push the debug marker. ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); push(Immediate(0)); // saved entry sp, patched before call - push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0)); + if (mode == ExitFrame::MODE_DEBUG) { + push(Immediate(0)); + } else { + push(Immediate(CodeObject())); + } // Save the frame pointer and the context in top. ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); ExternalReference context_address(Top::k_context_address); mov(Operand::StaticVariable(c_entry_fp_address), ebp); mov(Operand::StaticVariable(context_address), esi); +} - // Setup argc and argv in callee-saved registers. - int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; - mov(edi, Operand(eax)); - lea(esi, Operand(ebp, eax, times_4, offset)); - +void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc) { #ifdef ENABLE_DEBUGGER_SUPPORT // Save the state of all registers to the stack from the memory // location. This is needed to allow nested break points. - if (type == StackFrame::EXIT_DEBUG) { + if (mode == ExitFrame::MODE_DEBUG) { // TODO(1243899): This should be symmetric to // CopyRegistersFromStackToMemory() but it isn't! esp is assumed // correct here, but computed for the other call. Very error @@ -396,8 +394,8 @@ void MacroAssembler::EnterExitFrame(StackFrame::Type type) { } #endif - // Reserve space for two arguments: argc and argv. - sub(Operand(esp), Immediate(2 * kPointerSize)); + // Reserve space for arguments. + sub(Operand(esp), Immediate(argc * kPointerSize)); // Get the required frame alignment for the OS. static const int kFrameAlignment = OS::ActivationFrameAlignment(); @@ -411,15 +409,39 @@ void MacroAssembler::EnterExitFrame(StackFrame::Type type) { } -void MacroAssembler::LeaveExitFrame(StackFrame::Type type) { +void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode) { + EnterExitFramePrologue(mode); + + // Setup argc and argv in callee-saved registers. + int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; + mov(edi, Operand(eax)); + lea(esi, Operand(ebp, eax, times_4, offset)); + + EnterExitFrameEpilogue(mode, 2); +} + + +void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode, + int stack_space, + int argc) { + EnterExitFramePrologue(mode); + + int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; + lea(esi, Operand(ebp, (stack_space * kPointerSize) + offset)); + + EnterExitFrameEpilogue(mode, argc); +} + + +void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) { #ifdef ENABLE_DEBUGGER_SUPPORT // Restore the memory copy of the registers by digging them out from // the stack. This is needed to allow nested break points. - if (type == StackFrame::EXIT_DEBUG) { + if (mode == ExitFrame::MODE_DEBUG) { // It's okay to clobber register ebx below because we don't need // the function pointer after this. const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize; - int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize; + int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize; lea(ebx, Operand(ebp, kOffset)); CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved); } @@ -931,6 +953,52 @@ void MacroAssembler::TailCallRuntime(const ExternalReference& ext, } +void MacroAssembler::PushHandleScope(Register scratch) { + // Push the number of extensions, smi-tagged so the gc will ignore it. + ExternalReference extensions_address = + ExternalReference::handle_scope_extensions_address(); + mov(scratch, Operand::StaticVariable(extensions_address)); + ASSERT_EQ(0, kSmiTag); + shl(scratch, kSmiTagSize); + push(scratch); + mov(Operand::StaticVariable(extensions_address), Immediate(0)); + // Push next and limit pointers which will be wordsize aligned and + // hence automatically smi tagged. + ExternalReference next_address = + ExternalReference::handle_scope_next_address(); + push(Operand::StaticVariable(next_address)); + ExternalReference limit_address = + ExternalReference::handle_scope_limit_address(); + push(Operand::StaticVariable(limit_address)); +} + + +void MacroAssembler::PopHandleScope(Register saved, Register scratch) { + ExternalReference extensions_address = + ExternalReference::handle_scope_extensions_address(); + Label write_back; + mov(scratch, Operand::StaticVariable(extensions_address)); + cmp(Operand(scratch), Immediate(0)); + j(equal, &write_back); + // Calling a runtime function messes with registers so we save and + // restore any one we're asked not to change + if (saved.is_valid()) push(saved); + CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0); + if (saved.is_valid()) pop(saved); + + bind(&write_back); + ExternalReference limit_address = + ExternalReference::handle_scope_limit_address(); + pop(Operand::StaticVariable(limit_address)); + ExternalReference next_address = + ExternalReference::handle_scope_next_address(); + pop(Operand::StaticVariable(next_address)); + pop(scratch); + shr(scratch, kSmiTagSize); + mov(Operand::StaticVariable(extensions_address), scratch); +} + + void MacroAssembler::JumpToRuntime(const ExternalReference& ext) { // Set the entry point and jump to the C entry runtime stub. mov(ebx, Immediate(ext)); diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h index a0a242806b..248aa7776e 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.h +++ b/deps/v8/src/ia32/macro-assembler-ia32.h @@ -77,16 +77,18 @@ class MacroAssembler: public Assembler { void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); } void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); } - // Enter specific kind of exit frame; either EXIT or - // EXIT_DEBUG. Expects the number of arguments in register eax and + // Enter specific kind of exit frame; either in normal or debug mode. + // Expects the number of arguments in register eax and // sets up the number of arguments in register edi and the pointer // to the first argument in register esi. - void EnterExitFrame(StackFrame::Type type); + void EnterExitFrame(ExitFrame::Mode mode); + + void EnterApiExitFrame(ExitFrame::Mode mode, int stack_space, int argc); // Leave the current exit frame. Expects the return value in // register eax:edx (untouched) and the pointer to the first // argument in register esi. - void LeaveExitFrame(StackFrame::Type type); + void LeaveExitFrame(ExitFrame::Mode mode); // --------------------------------------------------------------------------- @@ -269,6 +271,12 @@ class MacroAssembler: public Assembler { int num_arguments, int result_size); + void PushHandleScope(Register scratch); + + // Pops a handle scope using the specified scratch register and + // ensuring that saved register, it is not no_reg, is left unchanged. + void PopHandleScope(Register saved, Register scratch); + // Jump to a runtime routine. void JumpToRuntime(const ExternalReference& ext); @@ -346,6 +354,9 @@ class MacroAssembler: public Assembler { void EnterFrame(StackFrame::Type type); void LeaveFrame(StackFrame::Type type); + void EnterExitFramePrologue(ExitFrame::Mode mode); + void EnterExitFrameEpilogue(ExitFrame::Mode mode, int argc); + // Allocation support helpers. void LoadAllocationTopHelper(Register result, Register result_end, diff --git a/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc b/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc index 76d36a939c..2e13d8aeed 100644 --- a/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc +++ b/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc @@ -598,10 +598,10 @@ Handle<Object> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) { Label stack_limit_hit; Label stack_ok; - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); + ExternalReference stack_limit = + ExternalReference::address_of_stack_limit(); __ mov(ecx, esp); - __ sub(ecx, Operand::StaticVariable(stack_guard_limit)); + __ sub(ecx, Operand::StaticVariable(stack_limit)); // Handle it if the stack pointer is already below the stack limit. __ j(below_equal, &stack_limit_hit, not_taken); // Check if there is room for the variable number of registers above @@ -1081,9 +1081,9 @@ void RegExpMacroAssemblerIA32::Pop(Register target) { void RegExpMacroAssemblerIA32::CheckPreemption() { // Check for preemption. Label no_preempt; - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); - __ cmp(esp, Operand::StaticVariable(stack_guard_limit)); + ExternalReference stack_limit = + ExternalReference::address_of_stack_limit(); + __ cmp(esp, Operand::StaticVariable(stack_limit)); __ j(above, &no_preempt, taken); SafeCall(&check_preempt_label_); diff --git a/deps/v8/src/ia32/register-allocator-ia32.cc b/deps/v8/src/ia32/register-allocator-ia32.cc index 2914960eac..0bad87d082 100644 --- a/deps/v8/src/ia32/register-allocator-ia32.cc +++ b/deps/v8/src/ia32/register-allocator-ia32.cc @@ -42,7 +42,7 @@ void Result::ToRegister() { Result fresh = CodeGeneratorScope::Current()->allocator()->Allocate(); ASSERT(fresh.is_valid()); if (CodeGeneratorScope::Current()->IsUnsafeSmi(handle())) { - CodeGeneratorScope::Current()->LoadUnsafeSmi(fresh.reg(), handle()); + CodeGeneratorScope::Current()->MoveUnsafeSmi(fresh.reg(), handle()); } else { CodeGeneratorScope::Current()->masm()->Set(fresh.reg(), Immediate(handle())); @@ -64,7 +64,7 @@ void Result::ToRegister(Register target) { } else { ASSERT(is_constant()); if (CodeGeneratorScope::Current()->IsUnsafeSmi(handle())) { - CodeGeneratorScope::Current()->LoadUnsafeSmi(fresh.reg(), handle()); + CodeGeneratorScope::Current()->MoveUnsafeSmi(fresh.reg(), handle()); } else { CodeGeneratorScope::Current()->masm()->Set(fresh.reg(), Immediate(handle())); diff --git a/deps/v8/src/ia32/simulator-ia32.h b/deps/v8/src/ia32/simulator-ia32.h index 8fa4287f76..ce7ed0ec9c 100644 --- a/deps/v8/src/ia32/simulator-ia32.h +++ b/deps/v8/src/ia32/simulator-ia32.h @@ -43,6 +43,12 @@ class SimulatorStack : public v8::internal::AllStatic { static inline uintptr_t JsLimitFromCLimit(uintptr_t c_limit) { return c_limit; } + + static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) { + return try_catch_address; + } + + static inline void UnregisterCTryCatch() { } }; // Call the generated regexp code directly. The entry function pointer should @@ -50,4 +56,7 @@ class SimulatorStack : public v8::internal::AllStatic { #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \ entry(p0, p1, p2, p3, p4, p5, p6) +#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \ + reinterpret_cast<TryCatch*>(try_catch_address) + #endif // V8_IA32_SIMULATOR_IA32_H_ diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc index ca4e142101..f9f986afea 100644 --- a/deps/v8/src/ia32/stub-cache-ia32.cc +++ b/deps/v8/src/ia32/stub-cache-ia32.cc @@ -240,7 +240,7 @@ void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, __ mov(eax, FieldOperand(receiver, String::kLengthOffset)); // ecx is also the receiver. __ lea(ecx, Operand(scratch, String::kLongLengthShift)); - __ shr(eax); // ecx is implicit shift register. + __ shr_cl(eax); __ shl(eax, kSmiTagSize); __ ret(0); @@ -776,20 +776,40 @@ void StubCompiler::GenerateLoadCallback(JSObject* object, CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss); - // Push the arguments on the JS stack of the caller. - __ pop(scratch2); // remove return address + Handle<AccessorInfo> callback_handle(callback); + + Register other = reg.is(scratch1) ? scratch2 : scratch1; + __ EnterInternalFrame(); + __ PushHandleScope(other); + // Push the stack address where the list of arguments ends + __ mov(other, esp); + __ sub(Operand(other), Immediate(2 * kPointerSize)); + __ push(other); __ push(receiver); // receiver __ push(reg); // holder - __ mov(reg, Immediate(Handle<AccessorInfo>(callback))); // callback data - __ push(reg); - __ push(FieldOperand(reg, AccessorInfo::kDataOffset)); + __ mov(other, Immediate(callback_handle)); + __ push(other); + __ push(FieldOperand(other, AccessorInfo::kDataOffset)); // data __ push(name_reg); // name - __ push(scratch2); // restore return address + // Save a pointer to where we pushed the arguments pointer. + // This will be passed as the const Arguments& to the C++ callback. + __ mov(eax, esp); + __ add(Operand(eax), Immediate(5 * kPointerSize)); + __ mov(ebx, esp); + + // Do call through the api. + ASSERT_EQ(6, ApiGetterEntryStub::kStackSpace); + Address getter_address = v8::ToCData<Address>(callback->getter()); + ApiFunction fun(getter_address); + ApiGetterEntryStub stub(callback_handle, &fun); + __ CallStub(&stub); - // Do tail-call to the runtime system. - ExternalReference load_callback_property = - ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); - __ TailCallRuntime(load_callback_property, 5, 1); + // We need to avoid using eax since that now holds the result. + Register tmp = other.is(eax) ? reg : other; + __ PopHandleScope(eax, tmp); + __ LeaveInternalFrame(); + + __ ret(0); } diff --git a/deps/v8/src/ia32/virtual-frame-ia32.cc b/deps/v8/src/ia32/virtual-frame-ia32.cc index 980cec8eb7..e770cddb15 100644 --- a/deps/v8/src/ia32/virtual-frame-ia32.cc +++ b/deps/v8/src/ia32/virtual-frame-ia32.cc @@ -75,10 +75,7 @@ void VirtualFrame::SyncElementBelowStackPointer(int index) { case FrameElement::CONSTANT: if (cgen()->IsUnsafeSmi(element.handle())) { - Result temp = cgen()->allocator()->Allocate(); - ASSERT(temp.is_valid()); - cgen()->LoadUnsafeSmi(temp.reg(), element.handle()); - __ mov(Operand(ebp, fp_relative(index)), temp.reg()); + cgen()->StoreUnsafeSmiToLocal(fp_relative(index), element.handle()); } else { __ Set(Operand(ebp, fp_relative(index)), Immediate(element.handle())); @@ -127,10 +124,7 @@ void VirtualFrame::SyncElementByPushing(int index) { case FrameElement::CONSTANT: if (cgen()->IsUnsafeSmi(element.handle())) { - Result temp = cgen()->allocator()->Allocate(); - ASSERT(temp.is_valid()); - cgen()->LoadUnsafeSmi(temp.reg(), element.handle()); - __ push(temp.reg()); + cgen()->PushUnsafeSmi(element.handle()); } else { __ push(Immediate(element.handle())); } @@ -161,7 +155,7 @@ void VirtualFrame::SyncRange(int begin, int end) { // on the stack. int start = Min(begin, stack_pointer_ + 1); - // Emit normal 'push' instructions for elements above stack pointer + // Emit normal push instructions for elements above stack pointer // and use mov instructions if we are below stack pointer. for (int i = start; i <= end; i++) { if (!elements_[i].is_synced()) { @@ -199,7 +193,7 @@ void VirtualFrame::MakeMergable() { // Emit a move. if (element.is_constant()) { if (cgen()->IsUnsafeSmi(element.handle())) { - cgen()->LoadUnsafeSmi(fresh.reg(), element.handle()); + cgen()->MoveUnsafeSmi(fresh.reg(), element.handle()); } else { __ Set(fresh.reg(), Immediate(element.handle())); } @@ -300,7 +294,7 @@ void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) { if (!source.is_synced()) { if (cgen()->IsUnsafeSmi(source.handle())) { esi_caches = i; - cgen()->LoadUnsafeSmi(esi, source.handle()); + cgen()->MoveUnsafeSmi(esi, source.handle()); __ mov(Operand(ebp, fp_relative(i)), esi); } else { __ Set(Operand(ebp, fp_relative(i)), Immediate(source.handle())); @@ -408,7 +402,7 @@ void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) { case FrameElement::CONSTANT: if (cgen()->IsUnsafeSmi(source.handle())) { - cgen()->LoadUnsafeSmi(target_reg, source.handle()); + cgen()->MoveUnsafeSmi(target_reg, source.handle()); } else { __ Set(target_reg, Immediate(source.handle())); } |