summaryrefslogtreecommitdiff
path: root/deps/v8/src/ia32/builtins-ia32.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/ia32/builtins-ia32.cc')
-rw-r--r--deps/v8/src/ia32/builtins-ia32.cc292
1 files changed, 159 insertions, 133 deletions
diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc
index f15fd1cd8..f8a85de98 100644
--- a/deps/v8/src/ia32/builtins-ia32.cc
+++ b/deps/v8/src/ia32/builtins-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,7 +29,7 @@
#if defined(V8_TARGET_ARCH_IA32)
-#include "codegen-inl.h"
+#include "codegen.h"
#include "deoptimizer.h"
#include "full-codegen.h"
@@ -70,7 +70,7 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
// JumpToExternalReference expects eax to contain the number of arguments
// including the receiver and the extra arguments.
__ add(Operand(eax), Immediate(num_extra_args + 1));
- __ JumpToExternalReference(ExternalReference(id));
+ __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
}
@@ -82,8 +82,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
Label non_function_call;
// Check that function is not a smi.
- __ test(edi, Immediate(kSmiTagMask));
- __ j(zero, &non_function_call);
+ __ JumpIfSmi(edi, &non_function_call);
// Check that function is a JSFunction.
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
__ j(not_equal, &non_function_call);
@@ -100,8 +99,10 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// Set expected number of arguments to zero (not changing eax).
__ Set(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
- __ jmp(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
- RelocInfo::CODE_TARGET);
+ Handle<Code> arguments_adaptor =
+ masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+ __ SetCallKind(ecx, CALL_AS_METHOD);
+ __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
}
@@ -128,7 +129,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Label undo_allocation;
#ifdef ENABLE_DEBUGGER_SUPPORT
ExternalReference debug_step_in_fp =
- ExternalReference::debug_step_in_fp_address();
+ ExternalReference::debug_step_in_fp_address(masm->isolate());
__ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0));
__ j(not_equal, &rt_call);
#endif
@@ -138,8 +139,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// edi: constructor
__ mov(eax, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi
- __ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &rt_call);
+ __ JumpIfSmi(eax, &rt_call);
// edi: constructor
// eax: initial map (if proven valid below)
__ CmpObjectType(eax, MAP_TYPE, ebx);
@@ -184,7 +184,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// ebx: JSObject
// edi: start of next object
__ mov(Operand(ebx, JSObject::kMapOffset), eax);
- __ mov(ecx, Factory::empty_fixed_array());
+ Factory* factory = masm->isolate()->factory();
+ __ mov(ecx, factory->empty_fixed_array());
__ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
__ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
// Set extra fields in the newly allocated object.
@@ -194,9 +195,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
{ Label loop, entry;
// To allow for truncation.
if (count_constructions) {
- __ mov(edx, Factory::one_pointer_filler_map());
+ __ mov(edx, factory->one_pointer_filler_map());
} else {
- __ mov(edx, Factory::undefined_value());
+ __ mov(edx, factory->undefined_value());
}
__ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
__ jmp(&entry);
@@ -252,7 +253,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// edi: FixedArray
// edx: number of elements
// ecx: start of next object
- __ mov(eax, Factory::fixed_array_map());
+ __ mov(eax, factory->fixed_array_map());
__ mov(Operand(edi, FixedArray::kMapOffset), eax); // setup the map
__ SmiTag(edx);
__ mov(Operand(edi, FixedArray::kLengthOffset), edx); // and length
@@ -262,7 +263,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// edi: FixedArray
// ecx: start of next object
{ Label loop, entry;
- __ mov(edx, Factory::undefined_value());
+ __ mov(edx, factory->undefined_value());
__ lea(eax, Operand(edi, FixedArray::kHeaderSize));
__ jmp(&entry);
__ bind(&loop);
@@ -334,14 +335,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Call the function.
if (is_api_function) {
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
- Handle<Code> code = Handle<Code>(
- Builtins::builtin(Builtins::HandleApiCallConstruct));
+ Handle<Code> code =
+ masm->isolate()->builtins()->HandleApiCallConstruct();
ParameterCount expected(0);
- __ InvokeCode(code, expected, expected,
- RelocInfo::CODE_TARGET, CALL_FUNCTION);
+ __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
+ CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
} else {
ParameterCount actual(eax);
- __ InvokeFunction(edi, actual, CALL_FUNCTION);
+ __ InvokeFunction(edi, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
}
// Restore context from the frame.
@@ -353,13 +355,12 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
Label use_receiver, exit;
// If the result is a smi, it is *not* an object in the ECMA sense.
- __ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &use_receiver, not_taken);
+ __ JumpIfSmi(eax, &use_receiver);
// If the type of the result (stored in its map) is less than
- // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
- __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
- __ j(above_equal, &exit, not_taken);
+ // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
+ __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
+ __ j(above_equal, &exit);
// Throw away the result of the constructor invocation and use the
// on-stack receiver as the result.
@@ -376,7 +377,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ pop(ecx);
__ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
__ push(ecx);
- __ IncrementCounter(&Counters::constructed_objects, 1);
+ __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
__ ret(0);
}
@@ -436,11 +437,12 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Invoke the code.
if (is_construct) {
- __ call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
+ __ call(masm->isolate()->builtins()->JSConstructCall(),
RelocInfo::CODE_TARGET);
} else {
ParameterCount actual(eax);
- __ InvokeFunction(edi, actual, CALL_FUNCTION);
+ __ InvokeFunction(edi, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
}
// Exit the JS frame. Notice that this also removes the empty
@@ -465,19 +467,25 @@ void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
// Enter an internal frame.
__ EnterInternalFrame();
- // Push a copy of the function onto the stack.
+ // Push a copy of the function.
__ push(edi);
+ // Push call kind information.
+ __ push(ecx);
__ push(edi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kLazyCompile, 1);
+
+ // Restore call kind information.
+ __ pop(ecx);
+ // Restore receiver.
__ pop(edi);
// Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
- __ lea(ecx, FieldOperand(eax, Code::kHeaderSize));
- __ jmp(Operand(ecx));
+ __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
+ __ jmp(Operand(eax));
}
@@ -487,17 +495,23 @@ void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
// Push a copy of the function onto the stack.
__ push(edi);
+ // Push call kind information.
+ __ push(ecx);
__ push(edi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kLazyRecompile, 1);
- // Restore function and tear down temporary frame.
+ // Restore call kind information.
+ __ pop(ecx);
+ // Restore receiver.
__ pop(edi);
+
+ // Tear down temporary frame.
__ LeaveInternalFrame();
// Do a tail-call of the compiled function.
- __ lea(ecx, FieldOperand(eax, Code::kHeaderSize));
- __ jmp(Operand(ecx));
+ __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
+ __ jmp(Operand(eax));
}
@@ -518,15 +532,15 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
__ SmiUntag(ecx);
// Switch on the state.
- NearLabel not_no_registers, not_tos_eax;
+ Label not_no_registers, not_tos_eax;
__ cmp(ecx, FullCodeGenerator::NO_REGISTERS);
- __ j(not_equal, &not_no_registers);
+ __ j(not_equal, &not_no_registers, Label::kNear);
__ ret(1 * kPointerSize); // Remove state.
__ bind(&not_no_registers);
__ mov(eax, Operand(esp, 2 * kPointerSize));
__ cmp(ecx, FullCodeGenerator::TOS_REG);
- __ j(not_equal, &not_tos_eax);
+ __ j(not_equal, &not_tos_eax, Label::kNear);
__ ret(2 * kPointerSize); // Remove state, eax.
__ bind(&not_tos_eax);
@@ -561,12 +575,14 @@ void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
+ Factory* factory = masm->isolate()->factory();
+
// 1. Make sure we have at least one argument.
{ Label done;
__ test(eax, Operand(eax));
- __ j(not_zero, &done, taken);
+ __ j(not_zero, &done);
__ pop(ebx);
- __ push(Immediate(Factory::undefined_value()));
+ __ push(Immediate(factory->undefined_value()));
__ push(ebx);
__ inc(eax);
__ bind(&done);
@@ -577,10 +593,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
Label non_function;
// 1 ~ return address.
__ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
- __ test(edi, Immediate(kSmiTagMask));
- __ j(zero, &non_function, not_taken);
+ __ JumpIfSmi(edi, &non_function);
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
- __ j(not_equal, &non_function, not_taken);
+ __ j(not_equal, &non_function);
// 3a. Patch the first argument if necessary when calling a function.
@@ -595,22 +610,24 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1 << SharedFunctionInfo::kStrictModeBitWithinByte);
__ j(not_equal, &shift_arguments);
+ // Do not transform the receiver for natives (shared already in ebx).
+ __ test_b(FieldOperand(ebx, SharedFunctionInfo::kNativeByteOffset),
+ 1 << SharedFunctionInfo::kNativeBitWithinByte);
+ __ j(not_equal, &shift_arguments);
+
// Compute the receiver in non-strict mode.
__ mov(ebx, Operand(esp, eax, times_4, 0)); // First argument.
- __ test(ebx, Immediate(kSmiTagMask));
- __ j(zero, &convert_to_object);
- __ cmp(ebx, Factory::null_value());
+ // Call ToObject on the receiver if it is not an object, or use the
+ // global object if it is null or undefined.
+ __ JumpIfSmi(ebx, &convert_to_object);
+ __ cmp(ebx, factory->null_value());
__ j(equal, &use_global_receiver);
- __ cmp(ebx, Factory::undefined_value());
+ __ cmp(ebx, factory->undefined_value());
__ j(equal, &use_global_receiver);
-
- // We don't use IsObjectJSObjectType here because we jump on success.
- __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
- __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
- __ sub(Operand(ecx), Immediate(FIRST_JS_OBJECT_TYPE));
- __ cmp(ecx, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
- __ j(below_equal, &shift_arguments);
+ STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
+ __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
+ __ j(above_equal, &shift_arguments);
__ bind(&convert_to_object);
__ EnterInternalFrame(); // In order to preserve argument count.
@@ -671,10 +688,11 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
{ Label function;
__ test(edi, Operand(edi));
- __ j(not_zero, &function, taken);
+ __ j(not_zero, &function);
__ Set(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
- __ jmp(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
+ __ SetCallKind(ecx, CALL_AS_METHOD);
+ __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
__ bind(&function);
}
@@ -687,11 +705,14 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
__ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
__ SmiUntag(ebx);
+ __ SetCallKind(ecx, CALL_AS_METHOD);
__ cmp(eax, Operand(ebx));
- __ j(not_equal, Handle<Code>(builtin(ArgumentsAdaptorTrampoline)));
+ __ j(not_equal,
+ masm->isolate()->builtins()->ArgumentsAdaptorTrampoline());
ParameterCount expected(0);
- __ InvokeCode(Operand(edx), expected, expected, JUMP_FUNCTION);
+ __ InvokeCode(Operand(edx), expected, expected, JUMP_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
}
@@ -707,7 +728,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// limit" is checked.
Label okay;
ExternalReference real_stack_limit =
- ExternalReference::address_of_real_stack_limit();
+ ExternalReference::address_of_real_stack_limit(masm->isolate());
__ mov(edi, Operand::StaticVariable(real_stack_limit));
// Make ecx the space we have left. The stack might already be overflowed
// here which will cause ecx to become negative.
@@ -719,7 +740,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ shl(edx, kPointerSizeLog2 - kSmiTagSize);
// Check if the arguments will overflow the stack.
__ cmp(ecx, Operand(edx));
- __ j(greater, &okay, taken); // Signed comparison.
+ __ j(greater, &okay); // Signed comparison.
// Out of stack space.
__ push(Operand(ebp, 4 * kPointerSize)); // push this
@@ -750,24 +771,25 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1 << SharedFunctionInfo::kStrictModeBitWithinByte);
__ j(not_equal, &push_receiver);
+ Factory* factory = masm->isolate()->factory();
+
+ // Do not transform the receiver for natives (shared already in ecx).
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
+ 1 << SharedFunctionInfo::kNativeBitWithinByte);
+ __ j(not_equal, &push_receiver);
+
// Compute the receiver in non-strict mode.
- __ test(ebx, Immediate(kSmiTagMask));
- __ j(zero, &call_to_object);
- __ cmp(ebx, Factory::null_value());
+ // Call ToObject on the receiver if it is not an object, or use the
+ // global object if it is null or undefined.
+ __ JumpIfSmi(ebx, &call_to_object);
+ __ cmp(ebx, factory->null_value());
__ j(equal, &use_global_receiver);
- __ cmp(ebx, Factory::undefined_value());
+ __ cmp(ebx, factory->undefined_value());
__ j(equal, &use_global_receiver);
+ STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
+ __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
+ __ j(above_equal, &push_receiver);
- // If given receiver is already a JavaScript object then there's no
- // reason for converting it.
- // We don't use IsObjectJSObjectType here because we jump on success.
- __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
- __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
- __ sub(Operand(ecx), Immediate(FIRST_JS_OBJECT_TYPE));
- __ cmp(ecx, LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
- __ j(below_equal, &push_receiver);
-
- // Convert the receiver to an object.
__ bind(&call_to_object);
__ push(ebx);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
@@ -795,7 +817,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ mov(edx, Operand(ebp, 2 * kPointerSize)); // load arguments
// Use inline caching to speed up access to arguments.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+ Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Initialize();
__ call(ic, RelocInfo::CODE_TARGET);
// It is important that we do not have a test instruction after the
// call. A test instruction after the call is used to indicate that
@@ -818,7 +840,8 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
ParameterCount actual(eax);
__ SmiUntag(eax);
__ mov(edi, Operand(ebp, 4 * kPointerSize));
- __ InvokeFunction(edi, actual, CALL_FUNCTION);
+ __ InvokeFunction(edi, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
__ LeaveInternalFrame();
__ ret(3 * kPointerSize); // remove this, receiver, and arguments
@@ -866,8 +889,9 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
// scratch1: initial map
// scratch2: start of next object
__ mov(FieldOperand(result, JSObject::kMapOffset), scratch1);
+ Factory* factory = masm->isolate()->factory();
__ mov(FieldOperand(result, JSArray::kPropertiesOffset),
- Factory::empty_fixed_array());
+ factory->empty_fixed_array());
// Field JSArray::kElementsOffset is initialized later.
__ mov(FieldOperand(result, JSArray::kLengthOffset), Immediate(0));
@@ -875,7 +899,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
// fixed array.
if (initial_capacity == 0) {
__ mov(FieldOperand(result, JSArray::kElementsOffset),
- Factory::empty_fixed_array());
+ factory->empty_fixed_array());
return;
}
@@ -892,7 +916,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
// scratch1: elements array
// scratch2: start of next object
__ mov(FieldOperand(scratch1, FixedArray::kMapOffset),
- Factory::fixed_array_map());
+ factory->fixed_array_map());
__ mov(FieldOperand(scratch1, FixedArray::kLengthOffset),
Immediate(Smi::FromInt(initial_capacity)));
@@ -903,7 +927,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
if (initial_capacity <= kLoopUnfoldLimit) {
// Use a scratch register here to have only one reloc info when unfolding
// the loop.
- __ mov(scratch3, Factory::the_hole_value());
+ __ mov(scratch3, factory->the_hole_value());
for (int i = 0; i < initial_capacity; i++) {
__ mov(FieldOperand(scratch1,
FixedArray::kHeaderSize + i * kPointerSize),
@@ -913,7 +937,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
Label loop, entry;
__ jmp(&entry);
__ bind(&loop);
- __ mov(Operand(scratch1, 0), Factory::the_hole_value());
+ __ mov(Operand(scratch1, 0), factory->the_hole_value());
__ add(Operand(scratch1), Immediate(kPointerSize));
__ bind(&entry);
__ cmp(scratch1, Operand(scratch2));
@@ -968,7 +992,8 @@ static void AllocateJSArray(MacroAssembler* masm,
// elements_array_end: start of next object
// array_size: size of array (smi)
__ mov(FieldOperand(result, JSObject::kMapOffset), elements_array);
- __ mov(elements_array, Factory::empty_fixed_array());
+ Factory* factory = masm->isolate()->factory();
+ __ mov(elements_array, factory->empty_fixed_array());
__ mov(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
// Field JSArray::kElementsOffset is initialized later.
__ mov(FieldOperand(result, JSArray::kLengthOffset), array_size);
@@ -987,7 +1012,7 @@ static void AllocateJSArray(MacroAssembler* masm,
// elements_array_end: start of next object
// array_size: size of array (smi)
__ mov(FieldOperand(elements_array, FixedArray::kMapOffset),
- Factory::fixed_array_map());
+ factory->fixed_array_map());
// For non-empty JSArrays the length of the FixedArray and the JSArray is the
// same.
__ mov(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
@@ -999,7 +1024,7 @@ static void AllocateJSArray(MacroAssembler* masm,
__ SmiUntag(array_size);
__ lea(edi, Operand(elements_array,
FixedArray::kHeaderSize - kHeapObjectTag));
- __ mov(eax, Factory::the_hole_value());
+ __ mov(eax, factory->the_hole_value());
__ cld();
// Do not use rep stos when filling less than kRepStosThreshold
// words.
@@ -1063,7 +1088,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
edi,
kPreallocatedArrayElements,
&prepare_generic_code_call);
- __ IncrementCounter(&Counters::array_function_native, 1);
+ __ IncrementCounter(masm->isolate()->counters()->array_function_native(), 1);
__ pop(ebx);
if (construct_call) {
__ pop(edi);
@@ -1119,7 +1144,8 @@ static void ArrayNativeCode(MacroAssembler* masm,
edi,
true,
&prepare_generic_code_call);
- __ IncrementCounter(&Counters::array_function_native, 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->array_function_native(), 1);
__ mov(eax, ebx);
__ pop(ebx);
if (construct_call) {
@@ -1146,7 +1172,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
edi,
false,
&prepare_generic_code_call);
- __ IncrementCounter(&Counters::array_function_native, 1);
+ __ IncrementCounter(counters->array_function_native(), 1);
__ mov(eax, ebx);
__ pop(ebx);
if (construct_call) {
@@ -1232,8 +1258,8 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
// Jump to the generic array code in case the specialized code cannot handle
// the construction.
__ bind(&generic_array_code);
- Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
- Handle<Code> array_code(code);
+ Handle<Code> array_code =
+ masm->isolate()->builtins()->ArrayCodeGeneric();
__ jmp(array_code, RelocInfo::CODE_TARGET);
}
@@ -1248,11 +1274,9 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
Label generic_constructor;
if (FLAG_debug_code) {
- // The array construct code is only set for the builtin Array function which
- // does always have a map.
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ebx);
- __ cmp(edi, Operand(ebx));
- __ Assert(equal, "Unexpected Array function");
+ // The array construct code is only set for the global and natives
+ // builtin Array functions which always have maps.
+
// Initial map for the builtin Array function should be a map.
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
@@ -1268,8 +1292,8 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
// Jump to the generic construct code in case the specialized code cannot
// handle the construction.
__ bind(&generic_constructor);
- Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
- Handle<Code> generic_construct_stub(code);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
__ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
}
@@ -1282,7 +1306,8 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// -- esp[(argc - n) * 4] : arg[n] (zero-based)
// -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
- __ IncrementCounter(&Counters::string_ctor_calls, 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->string_ctor_calls(), 1);
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
@@ -1311,7 +1336,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
edx, // Scratch 2.
false, // Input is known to be smi?
&not_cached);
- __ IncrementCounter(&Counters::string_ctor_cached_number, 1);
+ __ IncrementCounter(counters->string_ctor_cached_number(), 1);
__ bind(&argument_is_string);
// ----------- S t a t e -------------
// -- ebx : argument converted to string
@@ -1340,7 +1365,8 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
__ mov(FieldOperand(eax, HeapObject::kMapOffset), ecx);
// Set properties and elements.
- __ Set(ecx, Immediate(Factory::empty_fixed_array()));
+ Factory* factory = masm->isolate()->factory();
+ __ Set(ecx, Immediate(factory->empty_fixed_array()));
__ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
__ mov(FieldOperand(eax, JSObject::kElementsOffset), ecx);
@@ -1358,17 +1384,16 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
Label convert_argument;
__ bind(&not_cached);
STATIC_ASSERT(kSmiTag == 0);
- __ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &convert_argument);
+ __ JumpIfSmi(eax, &convert_argument);
Condition is_string = masm->IsObjectStringType(eax, ebx, ecx);
__ j(NegateCondition(is_string), &convert_argument);
__ mov(ebx, eax);
- __ IncrementCounter(&Counters::string_ctor_string_value, 1);
+ __ IncrementCounter(counters->string_ctor_string_value(), 1);
__ jmp(&argument_is_string);
// Invoke the conversion builtin and put the result into ebx.
__ bind(&convert_argument);
- __ IncrementCounter(&Counters::string_ctor_conversions, 1);
+ __ IncrementCounter(counters->string_ctor_conversions(), 1);
__ EnterInternalFrame();
__ push(edi); // Preserve the function.
__ push(eax);
@@ -1381,7 +1406,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// Load the empty string into ebx, remove the receiver from the
// stack, and jump back to the case where the argument is a string.
__ bind(&no_arguments);
- __ Set(ebx, Immediate(Factory::empty_string()));
+ __ Set(ebx, Immediate(factory->empty_string()));
__ pop(ecx);
__ lea(esp, Operand(esp, kPointerSize));
__ push(ecx);
@@ -1390,7 +1415,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// At this point the argument is already a string. Call runtime to
// create a string wrapper.
__ bind(&gc_required);
- __ IncrementCounter(&Counters::string_ctor_gc_required, 1);
+ __ IncrementCounter(counters->string_ctor_gc_required(), 1);
__ EnterInternalFrame();
__ push(ebx);
__ CallRuntime(Runtime::kNewStringWrapper, 1);
@@ -1409,12 +1434,12 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
// Push the function on the stack.
__ push(edi);
- // Preserve the number of arguments on the stack. Must preserve both
- // eax and ebx because these registers are used when copying the
+ // Preserve the number of arguments on the stack. Must preserve eax,
+ // ebx and ecx because these registers are used when copying the
// arguments and the receiver.
ASSERT(kSmiTagSize == 1);
- __ lea(ecx, Operand(eax, eax, times_1, kSmiTag));
- __ push(ecx);
+ __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
+ __ push(edi);
}
@@ -1437,11 +1462,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : actual number of arguments
// -- ebx : expected number of arguments
+ // -- ecx : call kind information
// -- edx : code entry to call
// -----------------------------------
Label invoke, dont_adapt_arguments;
- __ IncrementCounter(&Counters::arguments_adaptors, 1);
+ __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
Label enough, too_few;
__ cmp(eax, Operand(ebx));
@@ -1456,14 +1482,14 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Copy receiver and all expected arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
__ lea(eax, Operand(ebp, eax, times_4, offset));
- __ mov(ecx, -1); // account for receiver
+ __ mov(edi, -1); // account for receiver
Label copy;
__ bind(&copy);
- __ inc(ecx);
+ __ inc(edi);
__ push(Operand(eax, 0));
__ sub(Operand(eax), Immediate(kPointerSize));
- __ cmp(ecx, Operand(ebx));
+ __ cmp(edi, Operand(ebx));
__ j(less, &copy);
__ jmp(&invoke);
}
@@ -1475,30 +1501,33 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Copy receiver and all actual arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
__ lea(edi, Operand(ebp, eax, times_4, offset));
- __ mov(ecx, -1); // account for receiver
+ // ebx = expected - actual.
+ __ sub(ebx, Operand(eax));
+ // eax = -actual - 1
+ __ neg(eax);
+ __ sub(Operand(eax), Immediate(1));
Label copy;
__ bind(&copy);
- __ inc(ecx);
+ __ inc(eax);
__ push(Operand(edi, 0));
__ sub(Operand(edi), Immediate(kPointerSize));
- __ cmp(ecx, Operand(eax));
- __ j(less, &copy);
+ __ test(eax, Operand(eax));
+ __ j(not_zero, &copy);
// Fill remaining expected arguments with undefined values.
Label fill;
__ bind(&fill);
- __ inc(ecx);
- __ push(Immediate(Factory::undefined_value()));
- __ cmp(ecx, Operand(ebx));
+ __ inc(eax);
+ __ push(Immediate(masm->isolate()->factory()->undefined_value()));
+ __ cmp(eax, Operand(ebx));
__ j(less, &fill);
-
- // Restore function pointer.
- __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
}
// Call the entry point.
__ bind(&invoke);
+ // Restore function pointer.
+ __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ call(Operand(edx));
// Leave frame and return.
@@ -1514,10 +1543,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
- // We shouldn't be performing on-stack replacement in the first
- // place if the CPU features we need for the optimized Crankshaft
- // code aren't supported.
- CpuFeatures::Probe(false);
+ CpuFeatures::TryForceFeatureScope scope(SSE2);
if (!CpuFeatures::IsSupported(SSE2)) {
__ Abort("Unreachable code: Cannot optimize without SSE2 support.");
return;
@@ -1552,19 +1578,19 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
// If the result was -1 it means that we couldn't optimize the
// function. Just return and continue in the unoptimized version.
- NearLabel skip;
+ Label skip;
__ cmp(Operand(eax), Immediate(Smi::FromInt(-1)));
- __ j(not_equal, &skip);
+ __ j(not_equal, &skip, Label::kNear);
__ ret(0);
// If we decide not to perform on-stack replacement we perform a
// stack guard check to enable interrupts.
__ bind(&stack_check);
- NearLabel ok;
+ Label ok;
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(masm->isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
- __ j(above_equal, &ok, taken);
+ __ j(above_equal, &ok, Label::kNear);
StackCheckStub stub;
__ TailCallStub(&stub);
__ Abort("Unreachable code: returned from tail call.");
@@ -1584,7 +1610,7 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
#undef __
-
-} } // namespace v8::internal
+}
+} // namespace v8::internal
#endif // V8_TARGET_ARCH_IA32