diff options
Diffstat (limited to 'deps/v8/src/builtins/ia32/builtins-ia32.cc')
-rw-r--r-- | deps/v8/src/builtins/ia32/builtins-ia32.cc | 185 |
1 files changed, 68 insertions, 117 deletions
diff --git a/deps/v8/src/builtins/ia32/builtins-ia32.cc b/deps/v8/src/builtins/ia32/builtins-ia32.cc index 3beff0d53f..c217c6c7c3 100644 --- a/deps/v8/src/builtins/ia32/builtins-ia32.cc +++ b/deps/v8/src/builtins/ia32/builtins-ia32.cc @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "src/codegen/register-arch.h" +#include "src/codegen/register.h" #if V8_TARGET_ARCH_IA32 #include "src/api/api-arguments.h" @@ -89,11 +89,7 @@ void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc, DCHECK(!AreAliased(array, argc, scratch1, scratch2)); Register counter = scratch1; Label loop, entry; - if (kJSArgcIncludesReceiver) { - __ lea(counter, Operand(argc, -kJSArgcReceiverSlots)); - } else { - __ mov(counter, argc); - } + __ lea(counter, Operand(argc, -kJSArgcReceiverSlots)); __ jmp(&entry); __ bind(&loop); Operand value(array, counter, times_system_pointer_size, 0); @@ -163,9 +159,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { // Remove caller arguments from the stack and return. __ DropArguments(edx, ecx, TurboAssembler::kCountIsSmi, - kJSArgcIncludesReceiver - ? TurboAssembler::kCountIncludesReceiver - : TurboAssembler::kCountExcludesReceiver); + TurboAssembler::kCountIncludesReceiver); __ ret(0); __ bind(&stack_overflow); @@ -322,9 +316,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { // Remove caller arguments from the stack and return. __ DropArguments(edx, ecx, TurboAssembler::kCountIsSmi, - kJSArgcIncludesReceiver - ? TurboAssembler::kCountIncludesReceiver - : TurboAssembler::kCountExcludesReceiver); + TurboAssembler::kCountIncludesReceiver); __ ret(0); // Otherwise we do a smi check and fall through to check if the return value @@ -695,9 +687,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); __ movzx_w(ecx, FieldOperand( ecx, SharedFunctionInfo::kFormalParameterCountOffset)); - if (kJSArgcIncludesReceiver) { - __ dec(ecx); - } + __ dec(ecx); // Exclude receiver. __ mov(ebx, FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset)); { @@ -803,18 +793,17 @@ static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1, Register scratch2) { ASM_CODE_COMMENT(masm); Register params_size = scratch1; - // Get the size of the formal parameters + receiver (in bytes). + // Get the size of the formal parameters (in bytes). __ mov(params_size, Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp)); __ mov(params_size, FieldOperand(params_size, BytecodeArray::kParameterSizeOffset)); Register actual_params_size = scratch2; - // Compute the size of the actual parameters + receiver (in bytes). + // Compute the size of the actual parameters (in bytes). __ mov(actual_params_size, Operand(ebp, StandardFrameConstants::kArgCOffset)); __ lea(actual_params_size, - Operand(actual_params_size, times_system_pointer_size, - kJSArgcIncludesReceiver ? 0 : kSystemPointerSize)); + Operand(actual_params_size, times_system_pointer_size, 0)); // If actual is bigger than formal, then we should use it to free up the stack // arguments. @@ -905,22 +894,16 @@ static void MaybeOptimizeCode(MacroAssembler* masm, ASM_CODE_COMMENT(masm); DCHECK(!AreAliased(edx, edi, optimization_marker)); - // TODO(v8:8394): The logging of first execution will break if - // feedback vectors are not allocated. We need to find a different way of - // logging these events if required. + TailCallRuntimeIfMarkerEquals( + masm, optimization_marker, + OptimizationMarker::kCompileTurbofan_NotConcurrent, + Runtime::kCompileTurbofan_NotConcurrent); TailCallRuntimeIfMarkerEquals(masm, optimization_marker, - OptimizationMarker::kLogFirstExecution, - Runtime::kFunctionFirstExecution); - TailCallRuntimeIfMarkerEquals(masm, optimization_marker, - OptimizationMarker::kCompileOptimized, - Runtime::kCompileOptimized_NotConcurrent); - TailCallRuntimeIfMarkerEquals(masm, optimization_marker, - OptimizationMarker::kCompileOptimizedConcurrent, - Runtime::kCompileOptimized_Concurrent); + OptimizationMarker::kCompileTurbofan_Concurrent, + Runtime::kCompileTurbofan_Concurrent); - // Marker should be one of LogFirstExecution / CompileOptimized / - // CompileOptimizedConcurrent. InOptimizationQueue and None shouldn't reach - // here. + // Marker should be one of CompileOptimized / CompileOptimizedConcurrent. + // InOptimizationQueue and None shouldn't reach here. if (FLAG_debug_code) { __ int3(); } @@ -1041,9 +1024,8 @@ static void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( ASM_CODE_COMMENT(masm); Label maybe_has_optimized_code; // Check if optimized code is available - __ test( - optimization_state, - Immediate(FeedbackVector::kHasCompileOptimizedOrLogFirstExecutionMarker)); + __ test(optimization_state, + Immediate(FeedbackVector::kHasCompileOptimizedMarker)); __ j(zero, &maybe_has_optimized_code); Register optimization_marker = optimization_state; @@ -1380,14 +1362,9 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl( __ movd(xmm0, eax); // Spill number of arguments. // Compute the expected number of arguments. - int argc_modification = kJSArgcIncludesReceiver ? 0 : 1; + __ mov(scratch, eax); if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) { - argc_modification -= 1; - } - if (argc_modification != 0) { - __ lea(scratch, Operand(eax, argc_modification)); - } else { - __ mov(scratch, eax); + __ dec(scratch); // Exclude receiver. } // Pop return address to allow tail-call after pushing arguments. @@ -1462,10 +1439,7 @@ void Generate_InterpreterPushZeroAndArgsAndReturnAddress( // Step 1 - Update the stack pointer. - constexpr int receiver_offset = - kJSArgcIncludesReceiver ? 0 : kSystemPointerSize; - __ lea(scratch1, - Operand(num_args, times_system_pointer_size, receiver_offset)); + __ lea(scratch1, Operand(num_args, times_system_pointer_size, 0)); __ AllocateStackSpace(scratch1); // Step 2 move return_address and slots around it to the correct locations. @@ -1474,7 +1448,7 @@ void Generate_InterpreterPushZeroAndArgsAndReturnAddress( // extra slot for receiver, so no extra checks are required to avoid copy. for (int i = 0; i < num_slots_to_move + 1; i++) { __ mov(scratch1, Operand(esp, num_args, times_system_pointer_size, - i * kSystemPointerSize + receiver_offset)); + i * kSystemPointerSize)); __ mov(Operand(esp, i * kSystemPointerSize), scratch1); } @@ -1496,11 +1470,7 @@ void Generate_InterpreterPushZeroAndArgsAndReturnAddress( __ bind(&loop_check); __ inc(scratch1); __ cmp(scratch1, eax); - if (kJSArgcIncludesReceiver) { - __ j(less, &loop_header, Label::kNear); - } else { - __ j(less_equal, &loop_header, Label::kNear); - } + __ j(less, &loop_header, Label::kNear); } } // anonymous namespace @@ -1899,7 +1869,7 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm, // from LAZY is always the last argument. __ movd(Operand(esp, eax, times_system_pointer_size, BuiltinContinuationFrameConstants::kFixedFrameSize - - (kJSArgcIncludesReceiver ? kSystemPointerSize : 0)), + kJSArgcReceiverSlots * kSystemPointerSize), xmm0); } __ mov( @@ -1965,13 +1935,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { __ LoadRoot(edx, RootIndex::kUndefinedValue); __ mov(edi, edx); - if (kJSArgcIncludesReceiver) { - __ cmp(eax, Immediate(JSParameterCount(0))); - __ j(equal, &no_this_arg, Label::kNear); - } else { - __ test(eax, eax); - __ j(zero, &no_this_arg, Label::kNear); - } + __ cmp(eax, Immediate(JSParameterCount(0))); + __ j(equal, &no_this_arg, Label::kNear); { __ mov(edi, args[1]); __ cmp(eax, Immediate(JSParameterCount(1))); @@ -1980,10 +1945,9 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { __ bind(&no_arg_array); } __ bind(&no_this_arg); - __ DropArgumentsAndPushNewReceiver( - eax, edi, ecx, TurboAssembler::kCountIsInteger, - kJSArgcIncludesReceiver ? TurboAssembler::kCountIncludesReceiver - : TurboAssembler::kCountExcludesReceiver); + __ DropArgumentsAndPushNewReceiver(eax, edi, ecx, + TurboAssembler::kCountIsInteger, + TurboAssembler::kCountIncludesReceiver); // Restore receiver to edi. __ movd(edi, xmm0); @@ -2042,13 +2006,8 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { // 3. Make sure we have at least one argument. { Label done; - if (kJSArgcIncludesReceiver) { - __ cmp(eax, Immediate(JSParameterCount(0))); - __ j(greater, &done, Label::kNear); - } else { - __ test(eax, eax); - __ j(not_zero, &done, Label::kNear); - } + __ cmp(eax, Immediate(JSParameterCount(0))); + __ j(greater, &done, Label::kNear); __ PushRoot(RootIndex::kUndefinedValue); __ inc(eax); __ bind(&done); @@ -2095,10 +2054,9 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) { // Spill argumentsList to use edx as a scratch register. __ movd(xmm0, edx); - __ DropArgumentsAndPushNewReceiver( - eax, ecx, edx, TurboAssembler::kCountIsInteger, - kJSArgcIncludesReceiver ? TurboAssembler::kCountIncludesReceiver - : TurboAssembler::kCountExcludesReceiver); + __ DropArgumentsAndPushNewReceiver(eax, ecx, edx, + TurboAssembler::kCountIsInteger, + TurboAssembler::kCountIncludesReceiver); // Restore argumentsList. __ movd(edx, xmm0); @@ -2157,8 +2115,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { __ DropArgumentsAndPushNewReceiver( eax, masm->RootAsOperand(RootIndex::kUndefinedValue), ecx, TurboAssembler::kCountIsInteger, - kJSArgcIncludesReceiver ? TurboAssembler::kCountIncludesReceiver - : TurboAssembler::kCountExcludesReceiver); + TurboAssembler::kCountIncludesReceiver); // Restore argumentsList. __ movd(ecx, xmm0); @@ -2205,9 +2162,6 @@ void Generate_AllocateSpaceAndShiftExistingArguments( __ lea(new_space, Operand(count, times_system_pointer_size, 0)); __ AllocateStackSpace(new_space); - if (!kJSArgcIncludesReceiver) { - __ inc(argc_in_out); - } Register current = scratch1; Register value = scratch2; @@ -2228,12 +2182,7 @@ void Generate_AllocateSpaceAndShiftExistingArguments( pointer_to_new_space_out, Operand(esp, argc_in_out, times_system_pointer_size, kSystemPointerSize)); // Update the total number of arguments. - if (kJSArgcIncludesReceiver) { - __ add(argc_in_out, count); - } else { - // Also subtract the receiver again. - __ lea(argc_in_out, Operand(argc_in_out, count, times_1, -1)); - } + __ add(argc_in_out, count); } } // namespace @@ -2372,9 +2321,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm, Label stack_done, stack_overflow; __ mov(edx, Operand(ebp, StandardFrameConstants::kArgCOffset)); - if (kJSArgcIncludesReceiver) { - __ dec(edx); - } + __ dec(edx); // Exclude receiver. __ sub(edx, ecx); __ j(less_equal, &stack_done); { @@ -2447,13 +2394,9 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, // -- edi : the function to call (checked to be a JSFunction) // ----------------------------------- StackArgumentsAccessor args(eax); - __ AssertFunction(edi, edx); + __ AssertCallableFunction(edi, edx); - Label class_constructor; __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); - __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset), - Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask)); - __ j(not_zero, &class_constructor); // Enter the context of the function; ToObject has to run in the function // context, and we also need to take the global proxy from the function @@ -2534,14 +2477,6 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, __ movzx_w( ecx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); __ InvokeFunctionCode(edi, no_reg, ecx, eax, InvokeType::kJump); - - // The function is a "classConstructor", need to raise an exception. - __ bind(&class_constructor); - { - FrameScope frame(masm, StackFrame::INTERNAL); - __ push(edi); - __ CallRuntime(Runtime::kThrowConstructorNonCallableError); - } } namespace { @@ -2665,7 +2600,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) { StackArgumentsAccessor args(argc); Label non_callable, non_smi, non_callable_jsfunction, non_jsboundfunction, - non_proxy, class_constructor; + non_proxy, non_wrapped_function, class_constructor; __ JumpIfSmi(target, &non_callable); __ bind(&non_smi); __ LoadMap(map, target); @@ -2694,9 +2629,17 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) { __ j(not_equal, &non_proxy); __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET); + // Check if target is a wrapped function and call CallWrappedFunction external + // builtin + __ bind(&non_proxy); + __ cmpw(instance_type, Immediate(JS_WRAPPED_FUNCTION_TYPE)); + __ j(not_equal, &non_wrapped_function); + __ Jump(BUILTIN_CODE(masm->isolate(), CallWrappedFunction), + RelocInfo::CODE_TARGET); + // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) // Check that the function is not a "classConstructor". - __ bind(&non_proxy); + __ bind(&non_wrapped_function); __ cmpw(instance_type, Immediate(JS_CLASS_CONSTRUCTOR_TYPE)); __ j(equal, &class_constructor); @@ -2980,19 +2923,19 @@ void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) { // Save all parameter registers. They might hold live values, we restore // them after the runtime call. - for (int reg_code : base::bits::IterateBitsBackwards( - WasmDebugBreakFrameConstants::kPushedGpRegs)) { - __ Push(Register::from_code(reg_code)); + for (Register reg : + base::Reversed(WasmDebugBreakFrameConstants::kPushedGpRegs)) { + __ Push(reg); } constexpr int kFpStackSize = kSimd128Size * WasmDebugBreakFrameConstants::kNumPushedFpRegisters; __ AllocateStackSpace(kFpStackSize); int offset = kFpStackSize; - for (int reg_code : base::bits::IterateBitsBackwards( - WasmDebugBreakFrameConstants::kPushedFpRegs)) { + for (DoubleRegister reg : + base::Reversed(WasmDebugBreakFrameConstants::kPushedFpRegs)) { offset -= kSimd128Size; - __ movdqu(Operand(esp, offset), DoubleRegister::from_code(reg_code)); + __ movdqu(Operand(esp, offset), reg); } // Initialize the JavaScript context with 0. CEntry will use it to @@ -3001,15 +2944,13 @@ void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) { __ CallRuntime(Runtime::kWasmDebugBreak, 0); // Restore registers. - for (int reg_code : - base::bits::IterateBits(WasmDebugBreakFrameConstants::kPushedFpRegs)) { - __ movdqu(DoubleRegister::from_code(reg_code), Operand(esp, offset)); + for (DoubleRegister reg : WasmDebugBreakFrameConstants::kPushedFpRegs) { + __ movdqu(reg, Operand(esp, offset)); offset += kSimd128Size; } __ add(esp, Immediate(kFpStackSize)); - for (int reg_code : - base::bits::IterateBits(WasmDebugBreakFrameConstants::kPushedGpRegs)) { - __ Pop(Register::from_code(reg_code)); + for (Register reg : WasmDebugBreakFrameConstants::kPushedGpRegs) { + __ Pop(reg); } } @@ -3026,6 +2967,16 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) { __ Trap(); } +void Builtins::Generate_WasmSuspend(MacroAssembler* masm) { + // TODO(v8:12191): Implement for this platform. + __ Trap(); +} + +void Builtins::Generate_WasmResume(MacroAssembler* masm) { + // TODO(v8:12191): Implement for this platform. + __ Trap(); +} + void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) { // Only needed on x64. __ Trap(); |