summaryrefslogtreecommitdiff
path: root/deps/v8/src/mips
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/mips')
-rw-r--r--deps/v8/src/mips/assembler-mips.cc6
-rw-r--r--deps/v8/src/mips/builtins-mips.cc56
-rw-r--r--deps/v8/src/mips/code-stubs-mips.cc293
-rw-r--r--deps/v8/src/mips/code-stubs-mips.h3
-rw-r--r--deps/v8/src/mips/deoptimizer-mips.cc8
-rw-r--r--deps/v8/src/mips/full-codegen-mips.cc179
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.cc226
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.h17
-rw-r--r--deps/v8/src/mips/lithium-mips.cc32
-rw-r--r--deps/v8/src/mips/lithium-mips.h16
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.cc18
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.h10
-rw-r--r--deps/v8/src/mips/regexp-macro-assembler-mips.cc4
-rw-r--r--deps/v8/src/mips/regexp-macro-assembler-mips.h1
-rw-r--r--deps/v8/src/mips/simulator-mips.cc15
-rw-r--r--deps/v8/src/mips/stub-cache-mips.cc95
16 files changed, 694 insertions, 285 deletions
diff --git a/deps/v8/src/mips/assembler-mips.cc b/deps/v8/src/mips/assembler-mips.cc
index e36b97f18..1b666ec6c 100644
--- a/deps/v8/src/mips/assembler-mips.cc
+++ b/deps/v8/src/mips/assembler-mips.cc
@@ -237,10 +237,14 @@ void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
// See assembler-mips-inl.h for inlined constructors.
Operand::Operand(Handle<Object> handle) {
+#ifdef DEBUG
+ Isolate* isolate = Isolate::Current();
+#endif
+ ALLOW_HANDLE_DEREF(isolate, "using and embedding raw address");
rm_ = no_reg;
// Verify all Objects referred by code are NOT in new space.
Object* obj = *handle;
- ASSERT(!HEAP->InNewSpace(obj));
+ ASSERT(!isolate->heap()->InNewSpace(obj));
if (obj->IsHeapObject()) {
imm32_ = reinterpret_cast<intptr_t>(handle.location());
rmode_ = RelocInfo::EMBEDDED_OBJECT;
diff --git a/deps/v8/src/mips/builtins-mips.cc b/deps/v8/src/mips/builtins-mips.cc
index 1901f9c41..700bcc4a5 100644
--- a/deps/v8/src/mips/builtins-mips.cc
+++ b/deps/v8/src/mips/builtins-mips.cc
@@ -317,8 +317,7 @@ static void AllocateJSArray(MacroAssembler* masm,
// entering the generic code. In both cases argc in a0 needs to be preserved.
// Both registers are preserved by this code so no need to differentiate between
// construct call and normal call.
-static void ArrayNativeCode(MacroAssembler* masm,
- Label* call_generic_code) {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) {
Counters* counters = masm->isolate()->counters();
Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
has_non_smi_element, finish, cant_transition_map, not_double;
@@ -546,7 +545,7 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
}
-void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
+void Builtins::Generate_CommonArrayConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : number of arguments
// -- a1 : constructor function
@@ -566,48 +565,17 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
__ GetObjectType(a3, a3, t0);
__ Assert(eq, "Unexpected initial map for Array function (4)",
t0, Operand(MAP_TYPE));
-
- if (FLAG_optimize_constructed_arrays) {
- // We should either have undefined in a2 or a valid jsglobalpropertycell
- Label okay_here;
- Handle<Object> undefined_sentinel(
- masm->isolate()->heap()->undefined_value(), masm->isolate());
- Handle<Map> global_property_cell_map(
- masm->isolate()->heap()->global_property_cell_map());
- __ Branch(&okay_here, eq, a2, Operand(undefined_sentinel));
- __ lw(a3, FieldMemOperand(a2, 0));
- __ Assert(eq, "Expected property cell in register a3",
- a3, Operand(global_property_cell_map));
- __ bind(&okay_here);
- }
- }
-
- if (FLAG_optimize_constructed_arrays) {
- Label not_zero_case, not_one_case;
- __ Branch(&not_zero_case, ne, a0, Operand(zero_reg));
- ArrayNoArgumentConstructorStub no_argument_stub;
- __ TailCallStub(&no_argument_stub);
-
- __ bind(&not_zero_case);
- __ Branch(&not_one_case, gt, a0, Operand(1));
- ArraySingleArgumentConstructorStub single_argument_stub;
- __ TailCallStub(&single_argument_stub);
-
- __ bind(&not_one_case);
- ArrayNArgumentsConstructorStub n_argument_stub;
- __ TailCallStub(&n_argument_stub);
- } else {
- Label generic_constructor;
- // Run the native code for the Array function called as a constructor.
- ArrayNativeCode(masm, &generic_constructor);
-
- // Jump to the generic construct code in case the specialized code cannot
- // handle the construction.
- __ bind(&generic_constructor);
- Handle<Code> generic_construct_stub =
- masm->isolate()->builtins()->JSConstructStubGeneric();
- __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
+ Label generic_constructor;
+ // Run the native code for the Array function called as a constructor.
+ ArrayNativeCode(masm, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc
index 27fd73231..733c3694d 100644
--- a/deps/v8/src/mips/code-stubs-mips.cc
+++ b/deps/v8/src/mips/code-stubs-mips.cc
@@ -97,16 +97,33 @@ void TransitionElementsKindStub::InitializeInterfaceDescriptor(
}
-static void InitializeArrayConstructorDescriptor(Isolate* isolate,
+void CompareNilICStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { a0 };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(CompareNilIC_Miss);
+ descriptor->miss_handler_ =
+ ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate);
+}
+
+
+static void InitializeArrayConstructorDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor,
+ int constant_stack_parameter_count) {
// register state
- // a1 -- constructor function
+ // a0 -- number of arguments
// a2 -- type info cell with elements kind
- // a0 -- number of arguments to the constructor function
- static Register registers[] = { a1, a2 };
- descriptor->register_param_count_ = 2;
- // stack param count needs (constructor pointer, and single argument)
- descriptor->stack_parameter_count_ = &a0;
+ static Register registers[] = { a2 };
+ descriptor->register_param_count_ = 1;
+ if (constant_stack_parameter_count != 0) {
+ // stack param count needs (constructor pointer, and single argument)
+ descriptor->stack_parameter_count_ = &a0;
+ }
+ descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->register_params_ = registers;
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
descriptor->deoptimization_handler_ =
@@ -117,21 +134,21 @@ static void InitializeArrayConstructorDescriptor(Isolate* isolate,
void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
}
void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
}
void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
}
@@ -161,6 +178,30 @@ static void EmitCheckForHeapNumber(MacroAssembler* masm, Register operand,
}
+void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
+ // Update the static counter each time a new code stub is generated.
+ Isolate* isolate = masm->isolate();
+ isolate->counters()->code_stubs()->Increment();
+
+ CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+ int param_count = descriptor->register_param_count_;
+ {
+ // Call the runtime system in a fresh internal frame.
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ ASSERT(descriptor->register_param_count_ == 0 ||
+ a0.is(descriptor->register_params_[param_count - 1]));
+ // Push arguments
+ for (int i = 0; i < param_count; ++i) {
+ __ push(descriptor->register_params_[i]);
+ }
+ ExternalReference miss = descriptor->miss_handler_;
+ __ CallExternalReference(miss, descriptor->register_param_count_);
+ }
+
+ __ Ret();
+}
+
+
void ToNumberStub::Generate(MacroAssembler* masm) {
// The ToNumber stub takes one argument in a0.
Label check_heap_number, call_builtin;
@@ -1561,7 +1602,7 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
AllowExternalCallThatCantCauseGC scope(masm);
__ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
- __ li(a0, Operand(ExternalReference::isolate_address()));
+ __ li(a0, Operand(ExternalReference::isolate_address(masm->isolate())));
__ CallCFunction(
ExternalReference::store_buffer_overflow_function(masm->isolate()),
argument_count);
@@ -2487,12 +2528,17 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
__ Addu(scratch2, scratch1, Operand(0x40000000));
// If not try to return a heap number.
__ Branch(&return_heap_number, lt, scratch2, Operand(zero_reg));
- // Check for minus zero. Return heap number for minus zero.
+ // Check for minus zero. Return heap number for minus zero if
+ // double results are allowed; otherwise transition.
Label not_zero;
__ Branch(&not_zero, ne, scratch1, Operand(zero_reg));
__ mfc1(scratch2, f11);
__ And(scratch2, scratch2, HeapNumber::kSignMask);
- __ Branch(&return_heap_number, ne, scratch2, Operand(zero_reg));
+ __ Branch(result_type_ <= BinaryOpIC::INT32 ? &transition
+ : &return_heap_number,
+ ne,
+ scratch2,
+ Operand(zero_reg));
__ bind(&not_zero);
// Tag the result and return.
@@ -2505,21 +2551,18 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
__ bind(&return_heap_number);
// Return a heap number, or fall through to type transition or runtime
// call if we can't.
- if (result_type_ >= ((op_ == Token::DIV) ? BinaryOpIC::NUMBER
- : BinaryOpIC::INT32)) {
- // We are using FPU registers so s0 is available.
- heap_number_result = s0;
- BinaryOpStub_GenerateHeapResultAllocation(masm,
- heap_number_result,
- heap_number_map,
- scratch1,
- scratch2,
- &call_runtime,
- mode_);
- __ mov(v0, heap_number_result);
- __ sdc1(f10, FieldMemOperand(v0, HeapNumber::kValueOffset));
- __ Ret();
- }
+ // We are using FPU registers so s0 is available.
+ heap_number_result = s0;
+ BinaryOpStub_GenerateHeapResultAllocation(masm,
+ heap_number_result,
+ heap_number_map,
+ scratch1,
+ scratch2,
+ &call_runtime,
+ mode_);
+ __ mov(v0, heap_number_result);
+ __ sdc1(f10, FieldMemOperand(v0, HeapNumber::kValueOffset));
+ __ Ret();
// A DIV operation expecting an integer result falls through
// to type transition.
@@ -3303,6 +3346,9 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
+ if (FLAG_optimize_constructed_arrays) {
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ }
}
@@ -3384,7 +3430,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
__ AssertStackIsAligned();
- __ li(a2, Operand(ExternalReference::isolate_address()));
+ __ li(a2, Operand(ExternalReference::isolate_address(isolate)));
// To let the GC traverse the return address of the exit frames, we need to
// know where the return address is. The CEntryStub is unmovable, so
@@ -4663,7 +4709,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Argument 9: Pass current isolate address.
// CFunctionArgumentOperand handles MIPS stack argument slots.
- __ li(a0, Operand(ExternalReference::isolate_address()));
+ __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
__ sw(a0, MemOperand(sp, 5 * kPointerSize));
// Argument 8: Indicate that this is a direct call from JavaScript.
@@ -5057,7 +5103,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
Handle<Object> terminal_kind_sentinel =
TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(),
LAST_FAST_ELEMENTS_KIND);
- __ Branch(&miss, ne, a3, Operand(terminal_kind_sentinel));
+ __ Branch(&miss, gt, a3, Operand(terminal_kind_sentinel));
// Make sure the function is the Array() function
__ LoadArrayFunction(a3);
__ Branch(&megamorphic, ne, a1, Operand(a3));
@@ -7277,7 +7323,7 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
__ Move(address, regs_.address());
__ Move(a0, regs_.object());
__ Move(a1, address);
- __ li(a2, Operand(ExternalReference::isolate_address()));
+ __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate())));
AllowExternalCallThatCantCauseGC scope(masm);
if (mode == INCREMENTAL_COMPACTION) {
@@ -7523,6 +7569,189 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
}
+template<class T>
+static void CreateArrayDispatch(MacroAssembler* masm) {
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ Branch(&next, ne, a3, Operand(kind));
+ T stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
+ // a2 - type info cell
+ // a3 - kind
+ // a0 - number of arguments
+ // a1 - constructor?
+ // sp[0] - last argument
+ ASSERT(FAST_SMI_ELEMENTS == 0);
+ ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ ASSERT(FAST_ELEMENTS == 2);
+ ASSERT(FAST_HOLEY_ELEMENTS == 3);
+ ASSERT(FAST_DOUBLE_ELEMENTS == 4);
+ ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
+
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ // is the low bit set? If so, we are holey and that is good.
+ Label normal_sequence;
+ __ And(at, a3, Operand(1));
+ __ Branch(&normal_sequence, ne, at, Operand(zero_reg));
+
+ // look at the first argument
+ __ lw(t1, MemOperand(sp, 0));
+ __ Branch(&normal_sequence, eq, t1, Operand(zero_reg));
+
+ // We are going to create a holey array, but our kind is non-holey.
+ // Fix kind and retry
+ __ Addu(a3, a3, Operand(1));
+ __ Branch(&normal_sequence, eq, a2, Operand(undefined_sentinel));
+
+ // Save the resulting elements kind in type info
+ __ SmiTag(a3);
+ __ sw(a3, FieldMemOperand(a2, kPointerSize));
+ __ SmiUntag(a3);
+
+ __ bind(&normal_sequence);
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ Branch(&next, ne, a3, Operand(kind));
+ ArraySingleArgumentConstructorStub stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+template<class T>
+static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
+ int to_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= to_index; ++i) {
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ T stub(kind);
+ stub.GetCode(isolate)->set_is_pregenerated(true);
+ }
+}
+
+
+void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
+ isolate);
+}
+
+
+void ArrayConstructorStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a0 : argc (only if argument_count_ == ANY)
+ // -- a1 : constructor
+ // -- a2 : type info cell
+ // -- sp[0] : return address
+ // -- sp[4] : last argument
+ // -----------------------------------
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ if (FLAG_debug_code) {
+ // The array construct code is only set for the global and natives
+ // builtin Array functions which always have maps.
+
+ // Initial map for the builtin Array function should be a map.
+ __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi.
+ __ And(at, a3, Operand(kSmiTagMask));
+ __ Assert(ne, "Unexpected initial map for Array function",
+ at, Operand(zero_reg));
+ __ GetObjectType(a3, a3, t0);
+ __ Assert(eq, "Unexpected initial map for Array function",
+ t0, Operand(MAP_TYPE));
+
+ // We should either have undefined in ebx or a valid jsglobalpropertycell
+ Label okay_here;
+ Handle<Map> global_property_cell_map(
+ masm->isolate()->heap()->global_property_cell_map());
+ __ Branch(&okay_here, eq, a2, Operand(undefined_sentinel));
+ __ lw(a3, FieldMemOperand(a2, 0));
+ __ Assert(eq, "Expected property cell in register ebx",
+ a3, Operand(global_property_cell_map));
+ __ bind(&okay_here);
+ }
+
+ if (FLAG_optimize_constructed_arrays) {
+ Label no_info, switch_ready;
+ // Get the elements kind and case on that.
+ __ Branch(&no_info, eq, a2, Operand(undefined_sentinel));
+ __ lw(a3, FieldMemOperand(a2, kPointerSize));
+
+ // There is no info if the call site went megamorphic either
+ // TODO(mvstanton): Really? I thought if it was the array function that
+ // the cell wouldn't get stamped as megamorphic.
+ __ Branch(&no_info, eq, a3,
+ Operand(TypeFeedbackCells::MegamorphicSentinel(masm->isolate())));
+ __ SmiUntag(a3);
+ __ jmp(&switch_ready);
+ __ bind(&no_info);
+ __ li(a3, Operand(GetInitialFastElementsKind()));
+ __ bind(&switch_ready);
+
+ if (argument_count_ == ANY) {
+ Label not_zero_case, not_one_case;
+ __ And(at, a0, a0);
+ __ Branch(&not_zero_case, ne, at, Operand(zero_reg));
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+
+ __ bind(&not_zero_case);
+ __ Branch(&not_one_case, gt, a0, Operand(1));
+ CreateArrayDispatchOneArgument(masm);
+
+ __ bind(&not_one_case);
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else if (argument_count_ == NONE) {
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+ } else if (argument_count_ == ONE) {
+ CreateArrayDispatchOneArgument(masm);
+ } else if (argument_count_ == MORE_THAN_ONE) {
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else {
+ UNREACHABLE();
+ }
+ } else {
+ Label generic_constructor;
+ // Run the native code for the Array function called as a constructor.
+ ArrayNativeCode(masm, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
+ }
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/deps/v8/src/mips/code-stubs-mips.h b/deps/v8/src/mips/code-stubs-mips.h
index 2370d4537..3a84644a1 100644
--- a/deps/v8/src/mips/code-stubs-mips.h
+++ b/deps/v8/src/mips/code-stubs-mips.h
@@ -35,6 +35,9 @@ namespace v8 {
namespace internal {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
+
+
// Compute a transcendental math function natively, or call the
// TranscendentalCache runtime function.
class TranscendentalCacheStub: public PlatformCodeStub {
diff --git a/deps/v8/src/mips/deoptimizer-mips.cc b/deps/v8/src/mips/deoptimizer-mips.cc
index 7896f2013..ecf408732 100644
--- a/deps/v8/src/mips/deoptimizer-mips.cc
+++ b/deps/v8/src/mips/deoptimizer-mips.cc
@@ -591,8 +591,6 @@ void Deoptimizer::CopyDoubleRegisters(FrameDescription* output_frame) {
void Deoptimizer::EntryGenerator::Generate() {
GeneratePrologue();
- Isolate* isolate = masm()->isolate();
-
// Unlike on ARM we don't save all the registers, just the useful ones.
// For the rest, there are gaps on the stack, so the offsets remain the same.
const int kNumberOfRegisters = Register::kNumRegisters;
@@ -653,12 +651,12 @@ void Deoptimizer::EntryGenerator::Generate() {
// a2: bailout id already loaded.
// a3: code address or 0 already loaded.
__ sw(t0, CFunctionArgumentOperand(5)); // Fp-to-sp delta.
- __ li(t1, Operand(ExternalReference::isolate_address()));
+ __ li(t1, Operand(ExternalReference::isolate_address(isolate())));
__ sw(t1, CFunctionArgumentOperand(6)); // Isolate.
// Call Deoptimizer::New().
{
AllowExternalCallThatCantCauseGC scope(masm());
- __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
+ __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate()), 6);
}
// Preserve "deoptimizer" object in register v0 and get the input
@@ -725,7 +723,7 @@ void Deoptimizer::EntryGenerator::Generate() {
{
AllowExternalCallThatCantCauseGC scope(masm());
__ CallCFunction(
- ExternalReference::compute_output_frames_function(isolate), 1);
+ ExternalReference::compute_output_frames_function(isolate()), 1);
}
__ pop(a0); // Restore deoptimizer object (class Deoptimizer).
diff --git a/deps/v8/src/mips/full-codegen-mips.cc b/deps/v8/src/mips/full-codegen-mips.cc
index bc0d85543..a6fd39aa1 100644
--- a/deps/v8/src/mips/full-codegen-mips.cc
+++ b/deps/v8/src/mips/full-codegen-mips.cc
@@ -1926,6 +1926,156 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
}
+void FullCodeGenerator::VisitYield(Yield* expr) {
+ Comment cmnt(masm_, "[ Yield");
+ // Evaluate yielded value first; the initial iterator definition depends on
+ // this. It stays on the stack while we update the iterator.
+ VisitForStackValue(expr->expression());
+
+ switch (expr->yield_kind()) {
+ case Yield::INITIAL:
+ case Yield::SUSPEND: {
+ VisitForStackValue(expr->generator_object());
+ __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
+ __ lw(context_register(),
+ MemOperand(fp, StandardFrameConstants::kContextOffset));
+
+ Label resume;
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+ __ Branch(&resume, ne, result_register(), Operand(at));
+ __ pop(result_register());
+ if (expr->yield_kind() == Yield::SUSPEND) {
+ // TODO(wingo): Box into { value: VALUE, done: false }.
+ }
+ EmitReturnSequence();
+
+ __ bind(&resume);
+ context()->Plug(result_register());
+ break;
+ }
+
+ case Yield::FINAL: {
+ VisitForAccumulatorValue(expr->generator_object());
+ __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
+ __ sw(a1, FieldMemOperand(result_register(),
+ JSGeneratorObject::kContinuationOffset));
+ __ pop(result_register());
+ // TODO(wingo): Box into { value: VALUE, done: true }.
+
+ // Exit all nested statements.
+ NestedStatement* current = nesting_stack_;
+ int stack_depth = 0;
+ int context_length = 0;
+ while (current != NULL) {
+ current = current->Exit(&stack_depth, &context_length);
+ }
+ __ Drop(stack_depth);
+ EmitReturnSequence();
+ break;
+ }
+
+ case Yield::DELEGATING:
+ UNIMPLEMENTED();
+ }
+}
+
+
+void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
+ Expression *value,
+ JSGeneratorObject::ResumeMode resume_mode) {
+ // The value stays in a0, and is ultimately read by the resumed generator, as
+ // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. a1
+ // will hold the generator object until the activation has been resumed.
+ VisitForStackValue(generator);
+ VisitForAccumulatorValue(value);
+ __ pop(a1);
+
+ // Check generator state.
+ Label wrong_state, done;
+ __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
+ STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
+ __ Branch(&wrong_state, le, a3, Operand(zero_reg));
+
+ // Load suspended function and context.
+ __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
+ __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
+
+ // Load receiver and store as the first argument.
+ __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
+ __ push(a2);
+
+ // Push holes for the rest of the arguments to the generator function.
+ __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
+ __ lw(a3,
+ FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
+ Label push_argument_holes, push_frame;
+ __ bind(&push_argument_holes);
+ __ Subu(a3, a3, Operand(1));
+ __ Branch(&push_frame, lt, a3, Operand(zero_reg));
+ __ push(a2);
+ __ jmp(&push_argument_holes);
+
+ // Enter a new JavaScript frame, and initialize its slots as they were when
+ // the generator was suspended.
+ Label resume_frame;
+ __ bind(&push_frame);
+ __ Call(&resume_frame);
+ __ jmp(&done);
+ __ bind(&resume_frame);
+ __ push(ra); // Return address.
+ __ push(fp); // Caller's frame pointer.
+ __ mov(fp, sp);
+ __ push(cp); // Callee's context.
+ __ push(t0); // Callee's JS Function.
+
+ // Load the operand stack size.
+ __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
+ __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
+ __ SmiUntag(a3);
+
+ // If we are sending a value and there is no operand stack, we can jump back
+ // in directly.
+ if (resume_mode == JSGeneratorObject::SEND) {
+ Label slow_resume;
+ __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
+ __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset));
+ __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
+ __ SmiUntag(a2);
+ __ Addu(a3, a3, Operand(a2));
+ __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
+ __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
+ __ Jump(a3);
+ __ bind(&slow_resume);
+ }
+
+ // Otherwise, we push holes for the operand stack and call the runtime to fix
+ // up the stack and the handlers.
+ Label push_operand_holes, call_resume;
+ __ bind(&push_operand_holes);
+ __ Subu(a3, a3, Operand(1));
+ __ Branch(&call_resume, lt, a3, Operand(zero_reg));
+ __ push(a2);
+ __ b(&push_operand_holes);
+ __ bind(&call_resume);
+ __ push(a1);
+ __ push(result_register());
+ __ Push(Smi::FromInt(resume_mode));
+ __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
+ // Not reached: the runtime call returns elsewhere.
+ __ stop("not-reached");
+
+ // Throw error if we attempt to operate on a running generator.
+ __ bind(&wrong_state);
+ __ push(a1);
+ __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
+
+ __ bind(&done);
+ context()->Plug(result_register());
+}
+
+
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Literal* key = prop->key()->AsLiteral();
@@ -4398,26 +4548,21 @@ void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
VisitForAccumulatorValue(sub_expr);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
- Heap::RootListIndex nil_value = nil == kNullValue ?
- Heap::kNullValueRootIndex :
- Heap::kUndefinedValueRootIndex;
+ EqualityKind kind = expr->op() == Token::EQ_STRICT
+ ? kStrictEquality : kNonStrictEquality;
__ mov(a0, result_register());
- __ LoadRoot(a1, nil_value);
- if (expr->op() == Token::EQ_STRICT) {
+ if (kind == kStrictEquality) {
+ Heap::RootListIndex nil_value = nil == kNullValue ?
+ Heap::kNullValueRootIndex :
+ Heap::kUndefinedValueRootIndex;
+ __ LoadRoot(a1, nil_value);
Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
} else {
- Heap::RootListIndex other_nil_value = nil == kNullValue ?
- Heap::kUndefinedValueRootIndex :
- Heap::kNullValueRootIndex;
- __ Branch(if_true, eq, a0, Operand(a1));
- __ LoadRoot(a1, other_nil_value);
- __ Branch(if_true, eq, a0, Operand(a1));
- __ JumpIfSmi(a0, if_false);
- // It can be an undetectable object.
- __ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset));
- __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
- __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
- Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
+ Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(),
+ kNonStrictEquality,
+ nil);
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
+ Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
}
context()->Plug(if_true, if_false);
}
diff --git a/deps/v8/src/mips/lithium-codegen-mips.cc b/deps/v8/src/mips/lithium-codegen-mips.cc
index 77949313e..0c2983f23 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.cc
+++ b/deps/v8/src/mips/lithium-codegen-mips.cc
@@ -257,38 +257,21 @@ bool LCodeGen::GenerateBody() {
!is_aborted() && current_instruction_ < instructions_->length();
current_instruction_++) {
LInstruction* instr = instructions_->at(current_instruction_);
+
+ // Don't emit code for basic blocks with a replacement.
if (instr->IsLabel()) {
- LLabel* label = LLabel::cast(instr);
- emit_instructions = !label->HasReplacement();
+ emit_instructions = !LLabel::cast(instr)->HasReplacement();
}
+ if (!emit_instructions) continue;
- if (emit_instructions) {
- if (FLAG_code_comments) {
- HValue* hydrogen = instr->hydrogen_value();
- if (hydrogen != NULL) {
- if (hydrogen->IsChange()) {
- HValue* changed_value = HChange::cast(hydrogen)->value();
- int use_id = 0;
- const char* use_mnemo = "dead";
- if (hydrogen->UseCount() >= 1) {
- HValue* use_value = hydrogen->uses().value();
- use_id = use_value->id();
- use_mnemo = use_value->Mnemonic();
- }
- Comment(";;; @%d: %s. <of #%d %s for #%d %s>",
- current_instruction_, instr->Mnemonic(),
- changed_value->id(), changed_value->Mnemonic(),
- use_id, use_mnemo);
- } else {
- Comment(";;; @%d: %s. <#%d>", current_instruction_,
- instr->Mnemonic(), hydrogen->id());
- }
- } else {
- Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
- }
- }
- instr->CompileToNative(this);
+ if (FLAG_code_comments && instr->HasInterestingComment(this)) {
+ Comment(";;; <@%d,#%d> %s",
+ current_instruction_,
+ instr->hydrogen_value()->id(),
+ instr->Mnemonic());
}
+
+ instr->CompileToNative(this);
}
return !is_aborted();
}
@@ -299,11 +282,14 @@ bool LCodeGen::GenerateDeferredCode() {
if (deferred_.length() > 0) {
for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
LDeferredCode* code = deferred_[i];
+ Comment(";;; <@%d,#%d> "
+ "-------------------- Deferred %s --------------------",
+ code->instruction_index(),
+ code->instr()->hydrogen_value()->id(),
+ code->instr()->Mnemonic());
__ bind(code->entry());
if (NeedsDeferredFrame()) {
- Comment(";;; Deferred build frame @%d: %s.",
- code->instruction_index(),
- code->instr()->Mnemonic());
+ Comment(";;; Build frame");
ASSERT(!frame_is_built_);
ASSERT(info()->IsStub());
frame_is_built_ = true;
@@ -311,15 +297,11 @@ bool LCodeGen::GenerateDeferredCode() {
__ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
__ push(scratch0());
__ Addu(fp, sp, Operand(2 * kPointerSize));
+ Comment(";;; Deferred code");
}
- Comment(";;; Deferred code @%d: %s.",
- code->instruction_index(),
- code->instr()->Mnemonic());
code->Generate();
if (NeedsDeferredFrame()) {
- Comment(";;; Deferred destroy frame @%d: %s.",
- code->instruction_index(),
- code->instr()->Mnemonic());
+ Comment(";;; Destroy frame");
ASSERT(frame_is_built_);
__ pop(at);
__ MultiPop(cp.bit() | fp.bit() | ra.bit());
@@ -346,8 +328,10 @@ bool LCodeGen::GenerateDeoptJumpTable() {
Abort("Generated code is too large");
}
+ if (deopt_jump_table_.length() > 0) {
+ Comment(";;; -------------------- Jump table --------------------");
+ }
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
- __ RecordComment("[ Deoptimization jump table");
Label table_start;
__ bind(&table_start);
Label needs_frame_not_call;
@@ -592,7 +576,7 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
pushed_arguments_index,
pushed_arguments_count);
bool has_closure_id = !info()->closure().is_null() &&
- *info()->closure() != *environment->closure();
+ !info()->closure().is_identical_to(environment->closure());
int closure_id = has_closure_id
? DefineDeoptimizationLiteral(environment->closure())
: Translation::kSelfLiteralId;
@@ -904,10 +888,13 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
Handle<FixedArray> literals =
factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
- for (int i = 0; i < deoptimization_literals_.length(); i++) {
- literals->set(i, *deoptimization_literals_[i]);
+ { ALLOW_HANDLE_DEREF(isolate(),
+ "copying a ZoneList of handles into a FixedArray");
+ for (int i = 0; i < deoptimization_literals_.length(); i++) {
+ literals->set(i, *deoptimization_literals_[i]);
+ }
+ data->SetLiteralArray(*literals);
}
- data->SetLiteralArray(*literals);
data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
@@ -1023,10 +1010,19 @@ void LCodeGen::RecordPosition(int position) {
}
+static const char* LabelType(LLabel* label) {
+ if (label->is_loop_header()) return " (loop header)";
+ if (label->is_osr_entry()) return " (OSR entry)";
+ return "";
+}
+
+
void LCodeGen::DoLabel(LLabel* label) {
- Comment(";;; -------------------- B%d%s --------------------",
+ Comment(";;; <@%d,#%d> -------------------- B%d%s --------------------",
+ current_instruction_,
+ label->hydrogen_value()->id(),
label->block_id(),
- label->is_loop_header() ? " (loop header)" : "");
+ LabelType(label));
__ bind(label->label());
current_block_ = label->block_id();
DoGap(label);
@@ -1480,6 +1476,7 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
void LCodeGen::DoConstantT(LConstantT* instr) {
Handle<Object> value = instr->value();
+ ALLOW_HANDLE_DEREF(isolate(), "smi check");
if (value->IsSmi()) {
__ li(ToRegister(instr->result()), Operand(value));
} else {
@@ -1756,10 +1753,9 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
}
-int LCodeGen::GetNextEmittedBlock(int block) {
- for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
- LLabel* label = chunk_->GetLabel(i);
- if (!label->HasReplacement()) return i;
+int LCodeGen::GetNextEmittedBlock() const {
+ for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) {
+ if (!chunk_->GetLabel(i)->HasReplacement()) return i;
}
return -1;
}
@@ -1767,7 +1763,7 @@ int LCodeGen::GetNextEmittedBlock(int block) {
void LCodeGen::EmitBranch(int left_block, int right_block,
Condition cc, Register src1, const Operand& src2) {
- int next_block = GetNextEmittedBlock(current_block_);
+ int next_block = GetNextEmittedBlock();
right_block = chunk_->LookupDestination(right_block);
left_block = chunk_->LookupDestination(left_block);
if (right_block == left_block) {
@@ -1786,7 +1782,7 @@ void LCodeGen::EmitBranch(int left_block, int right_block,
void LCodeGen::EmitBranchF(int left_block, int right_block,
Condition cc, FPURegister src1, FPURegister src2) {
- int next_block = GetNextEmittedBlock(current_block_);
+ int next_block = GetNextEmittedBlock();
right_block = chunk_->LookupDestination(right_block);
left_block = chunk_->LookupDestination(left_block);
if (right_block == left_block) {
@@ -1916,10 +1912,8 @@ void LCodeGen::DoBranch(LBranch* instr) {
void LCodeGen::EmitGoto(int block) {
- block = chunk_->LookupDestination(block);
- int next_block = GetNextEmittedBlock(current_block_);
- if (block != next_block) {
- __ jmp(chunk_->GetAssemblyLabel(block));
+ if (!IsNextEmittedBlock(block)) {
+ __ jmp(chunk_->GetAssemblyLabel(chunk_->LookupDestination(block)));
}
}
@@ -2552,20 +2546,21 @@ void LCodeGen::DoReturn(LReturn* instr) {
if (NeedsEagerFrame()) {
__ mov(sp, fp);
__ Pop(ra, fp);
-
- if (instr->has_constant_parameter_count()) {
- int parameter_count = ToInteger32(instr->constant_parameter_count());
- int32_t sp_delta = (parameter_count + 1) * kPointerSize;
- if (sp_delta != 0) {
- __ Addu(sp, sp, Operand(sp_delta));
- }
- } else {
- Register reg = ToRegister(instr->parameter_count());
- __ Addu(reg, reg, Operand(1));
- __ sll(at, reg, kPointerSizeLog2);
- __ Addu(sp, sp, at);
+ }
+ if (instr->has_constant_parameter_count()) {
+ int parameter_count = ToInteger32(instr->constant_parameter_count());
+ int32_t sp_delta = (parameter_count + 1) * kPointerSize;
+ if (sp_delta != 0) {
+ __ Addu(sp, sp, Operand(sp_delta));
}
+ } else {
+ Register reg = ToRegister(instr->parameter_count());
+ // The argument count parameter is a smi
+ __ SmiUntag(reg);
+ __ sll(at, reg, kPointerSizeLog2);
+ __ Addu(sp, sp, at);
}
+
__ Jump(ra);
}
@@ -2883,16 +2878,24 @@ void LCodeGen::DoLoadExternalArrayPointer(
void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Register arguments = ToRegister(instr->arguments());
- Register length = ToRegister(instr->length());
- Register index = ToRegister(instr->index());
Register result = ToRegister(instr->result());
- // There are two words between the frame pointer and the last argument.
- // Subtracting from length accounts for one of them, add one more.
- __ subu(length, length, index);
- __ Addu(length, length, Operand(1));
- __ sll(length, length, kPointerSizeLog2);
- __ Addu(at, arguments, Operand(length));
- __ lw(result, MemOperand(at, 0));
+ if (instr->length()->IsConstantOperand() &&
+ instr->index()->IsConstantOperand()) {
+ int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
+ int const_length = ToInteger32(LConstantOperand::cast(instr->length()));
+ int index = (const_length - const_index) + 1;
+ __ lw(result, MemOperand(arguments, index * kPointerSize));
+ } else {
+ Register length = ToRegister(instr->length());
+ Register index = ToRegister(instr->index());
+ // There are two words between the frame pointer and the last argument.
+ // Subtracting from length accounts for one of them, add one more.
+ __ subu(length, length, index);
+ __ Addu(length, length, Operand(1));
+ __ sll(length, length, kPointerSizeLog2);
+ __ Addu(at, arguments, Operand(length));
+ __ lw(result, MemOperand(at, 0));
+ }
}
@@ -3323,12 +3326,15 @@ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
+ int formal_parameter_count,
int arity,
LInstruction* instr,
CallKind call_kind,
A1State a1_state) {
- bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
- function->shared()->formal_parameter_count() == arity;
+ bool dont_adapt_arguments =
+ formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel;
+ bool can_invoke_directly =
+ dont_adapt_arguments || formal_parameter_count == arity;
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
@@ -3343,7 +3349,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
// Set r0 to arguments count if adaption is not needed. Assumes that r0
// is available to write to at this point.
- if (!function->NeedsArgumentsAdaption()) {
+ if (dont_adapt_arguments) {
__ li(a0, Operand(arity));
}
@@ -3357,7 +3363,9 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
} else {
SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
ParameterCount count(arity);
- __ InvokeFunction(function, count, CALL_FUNCTION, generator, call_kind);
+ ParameterCount expected(formal_parameter_count);
+ __ InvokeFunction(
+ function, expected, count, CALL_FUNCTION, generator, call_kind);
}
// Restore context.
@@ -3368,7 +3376,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
__ mov(a0, v0);
- CallKnownFunction(instr->function(),
+ CallKnownFunction(instr->hydrogen()->function(),
+ instr->hydrogen()->formal_parameter_count(),
instr->arity(),
instr,
CALL_AS_METHOD,
@@ -3780,7 +3789,8 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
ASSERT(ToRegister(instr->function()).is(a1));
ASSERT(instr->HasPointerMap());
- if (instr->known_function().is_null()) {
+ Handle<JSFunction> known_function = instr->hydrogen()->known_function();
+ if (known_function.is_null()) {
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
@@ -3788,7 +3798,8 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
__ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
} else {
- CallKnownFunction(instr->known_function(),
+ CallKnownFunction(known_function,
+ instr->hydrogen()->formal_parameter_count(),
instr->arity(),
instr,
CALL_AS_METHOD,
@@ -3848,7 +3859,8 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
- CallKnownFunction(instr->target(),
+ CallKnownFunction(instr->hydrogen()->target(),
+ instr->hydrogen()->formal_parameter_count(),
instr->arity(),
instr,
CALL_AS_FUNCTION,
@@ -3879,10 +3891,18 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
__ li(a0, Operand(instr->arity()));
__ li(a2, Operand(instr->hydrogen()->property_cell()));
- Handle<Code> array_construct_code =
- isolate()->builtins()->ArrayConstructCode();
-
- CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+ Object* cell_value = instr->hydrogen()->property_cell()->value();
+ ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+ if (instr->arity() == 0) {
+ ArrayNoArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else if (instr->arity() == 1) {
+ ArraySingleArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else {
+ ArrayNArgumentsConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ }
}
@@ -4890,6 +4910,7 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Register reg = ToRegister(instr->value());
Handle<JSFunction> target = instr->hydrogen()->target();
+ ALLOW_HANDLE_DEREF(isolate(), "smi check");
if (isolate()->heap()->InNewSpace(*target)) {
Register reg = ToRegister(instr->value());
Handle<JSGlobalPropertyCell> cell =
@@ -5029,16 +5050,12 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
Register scratch = ToRegister(instr->temp());
Register scratch2 = ToRegister(instr->temp2());
Handle<JSFunction> constructor = instr->hydrogen()->constructor();
- Handle<Map> initial_map(constructor->initial_map());
+ Handle<Map> initial_map = instr->hydrogen()->constructor_initial_map();
int instance_size = initial_map->instance_size();
ASSERT(initial_map->pre_allocated_property_fields() +
initial_map->unused_property_fields() -
initial_map->inobject_properties() == 0);
- // Allocate memory for the object. The initial map might change when
- // the constructor's prototype changes, but instance size and property
- // counts remain unchanged (if slack tracking finished).
- ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
__ Allocate(instance_size, result, scratch, scratch2, deferred->entry(),
TAG_OBJECT);
@@ -5073,8 +5090,7 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
Register result = ToRegister(instr->result());
- Handle<JSFunction> constructor = instr->hydrogen()->constructor();
- Handle<Map> initial_map(constructor->initial_map());
+ Handle<Map> initial_map = instr->hydrogen()->constructor_initial_map();
int instance_size = initial_map->instance_size();
// TODO(3095996): Get rid of this. For now, we need to make the
@@ -5157,7 +5173,7 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
- Handle<FixedArray> literals(instr->environment()->closure()->literals());
+ Handle<FixedArray> literals = instr->hydrogen()->literals();
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate_elements_kind();
AllocationSiteMode allocation_site_mode =
@@ -5215,7 +5231,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
- Handle<FixedArray> literals(instr->environment()->closure()->literals());
+ Handle<FixedArray> literals = instr->hydrogen()->literals();
Handle<FixedArray> constant_properties =
instr->hydrogen()->constant_properties();
@@ -5229,7 +5245,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
__ li(a0, Operand(Smi::FromInt(flags)));
// Pick the right runtime function or stub to call.
- int properties_count = constant_properties->length() / 2;
+ int properties_count = instr->hydrogen()->constant_properties_length() / 2;
if (instr->hydrogen()->depth() > 1) {
__ Push(a3, a2, a1, a0);
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
@@ -5307,19 +5323,17 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning.
- Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
- if (!pretenure && shared_info->num_literals() == 0) {
- FastNewClosureStub stub(shared_info->language_mode(),
- shared_info->is_generator());
- __ li(a1, Operand(shared_info));
+ if (!pretenure && instr->hydrogen()->has_no_literals()) {
+ FastNewClosureStub stub(instr->hydrogen()->language_mode(),
+ instr->hydrogen()->is_generator());
+ __ li(a1, Operand(instr->hydrogen()->shared_info()));
__ push(a1);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
} else {
- __ li(a2, Operand(shared_info));
- __ li(a1, Operand(pretenure
- ? factory()->true_value()
- : factory()->false_value()));
+ __ li(a2, Operand(instr->hydrogen()->shared_info()));
+ __ li(a1, Operand(pretenure ? factory()->true_value()
+ : factory()->false_value()));
__ Push(cp, a2, a1);
CallRuntime(Runtime::kNewClosure, 3, instr);
}
diff --git a/deps/v8/src/mips/lithium-codegen-mips.h b/deps/v8/src/mips/lithium-codegen-mips.h
index 01d0ffcbe..f082c01dd 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.h
+++ b/deps/v8/src/mips/lithium-codegen-mips.h
@@ -79,10 +79,20 @@ class LCodeGen BASE_EMBEDDED {
Heap* heap() const { return isolate()->heap(); }
Zone* zone() const { return zone_; }
+ // TODO(svenpanne) Use this consistently.
+ int LookupDestination(int block_id) const {
+ return chunk()->LookupDestination(block_id);
+ }
+
+ bool IsNextEmittedBlock(int block_id) const {
+ return LookupDestination(block_id) == GetNextEmittedBlock();
+ }
+
bool NeedsEagerFrame() const {
return GetStackSlotCount() > 0 ||
info()->is_non_deferred_calling() ||
- !info()->IsStub();
+ !info()->IsStub() ||
+ info()->requires_frame();
}
bool NeedsDeferredFrame() const {
return !NeedsEagerFrame() && info()->is_deferred_calling();
@@ -189,13 +199,13 @@ class LCodeGen BASE_EMBEDDED {
LPlatformChunk* chunk() const { return chunk_; }
Scope* scope() const { return scope_; }
- HGraph* graph() const { return chunk_->graph(); }
+ HGraph* graph() const { return chunk()->graph(); }
Register scratch0() { return kLithiumScratchReg; }
Register scratch1() { return kLithiumScratchReg2; }
DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
- int GetNextEmittedBlock(int block);
+ int GetNextEmittedBlock() const;
LInstruction* GetNextInstruction();
void EmitClassOfTest(Label* if_true,
@@ -257,6 +267,7 @@ class LCodeGen BASE_EMBEDDED {
// Generate a direct call to a known function. Expects the function
// to be in a1.
void CallKnownFunction(Handle<JSFunction> function,
+ int formal_parameter_count,
int arity,
LInstruction* instr,
CallKind call_kind,
diff --git a/deps/v8/src/mips/lithium-mips.cc b/deps/v8/src/mips/lithium-mips.cc
index afa806c4e..c2f89867d 100644
--- a/deps/v8/src/mips/lithium-mips.cc
+++ b/deps/v8/src/mips/lithium-mips.cc
@@ -192,6 +192,11 @@ const char* LArithmeticT::Mnemonic() const {
}
+bool LGoto::HasInterestingComment(LCodeGen* gen) const {
+ return !gen->IsNextEmittedBlock(block_id());
+}
+
+
void LGoto::PrintDataTo(StringStream* stream) {
stream->Add("B%d", block_id());
}
@@ -823,11 +828,15 @@ void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
HEnvironment* last_environment = pred->last_environment();
for (int i = 0; i < block->phis()->length(); ++i) {
HPhi* phi = block->phis()->at(i);
- last_environment->SetValueAt(phi->merged_index(), phi);
+ if (phi->merged_index() < last_environment->length()) {
+ last_environment->SetValueAt(phi->merged_index(), phi);
+ }
}
for (int i = 0; i < block->deleted_phis()->length(); ++i) {
- last_environment->SetValueAt(block->deleted_phis()->at(i),
- graph_->GetConstantUndefined());
+ if (block->deleted_phis()->at(i) < last_environment->length()) {
+ last_environment->SetValueAt(block->deleted_phis()->at(i),
+ graph_->GetConstantUndefined());
+ }
}
block->UpdateEnvironment(last_environment);
// Pick up the outgoing argument count of one of the predecessors.
@@ -984,12 +993,14 @@ LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
+ info()->MarkAsRequiresFrame();
return DefineAsRegister(
new(zone()) LArgumentsLength(UseRegister(length->value())));
}
LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
+ info()->MarkAsRequiresFrame();
return DefineAsRegister(new(zone()) LArgumentsElements);
}
@@ -2295,7 +2306,8 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
ASSERT(info()->IsStub());
CodeStubInterfaceDescriptor* descriptor =
info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
- Register reg = descriptor->register_params_[instr->index()];
+ int index = static_cast<int>(instr->index());
+ Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index);
return DefineFixed(result, reg);
}
}
@@ -2327,9 +2339,17 @@ LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
+ info()->MarkAsRequiresFrame();
LOperand* args = UseRegister(instr->arguments());
- LOperand* length = UseTempRegister(instr->length());
- LOperand* index = UseRegister(instr->index());
+ LOperand* length;
+ LOperand* index;
+ if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
+ length = UseRegisterOrConstant(instr->length());
+ index = UseOrConstant(instr->index());
+ } else {
+ length = UseTempRegister(instr->length());
+ index = Use(instr->index());
+ }
return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
}
diff --git a/deps/v8/src/mips/lithium-mips.h b/deps/v8/src/mips/lithium-mips.h
index b0fc59a3b..cfca64452 100644
--- a/deps/v8/src/mips/lithium-mips.h
+++ b/deps/v8/src/mips/lithium-mips.h
@@ -279,6 +279,8 @@ class LInstruction: public ZoneObject {
LOperand* FirstInput() { return InputAt(0); }
LOperand* Output() { return HasResult() ? result() : NULL; }
+ virtual bool HasInterestingComment(LCodeGen* gen) const { return true; }
+
#ifdef DEBUG
void VerifyCall();
#endif
@@ -378,6 +380,10 @@ class LInstructionGap: public LGap {
public:
explicit LInstructionGap(HBasicBlock* block) : LGap(block) { }
+ virtual bool HasInterestingComment(LCodeGen* gen) const {
+ return !IsRedundant();
+ }
+
DECLARE_CONCRETE_INSTRUCTION(InstructionGap, "gap")
};
@@ -386,6 +392,7 @@ class LGoto: public LTemplateInstruction<0, 0, 0> {
public:
explicit LGoto(int block_id) : block_id_(block_id) { }
+ virtual bool HasInterestingComment(LCodeGen* gen) const;
DECLARE_CONCRETE_INSTRUCTION(Goto, "goto")
virtual void PrintDataTo(StringStream* stream);
virtual bool IsControl() const { return true; }
@@ -433,12 +440,14 @@ class LLabel: public LGap {
explicit LLabel(HBasicBlock* block)
: LGap(block), replacement_(NULL) { }
+ virtual bool HasInterestingComment(LCodeGen* gen) const { return false; }
DECLARE_CONCRETE_INSTRUCTION(Label, "label")
virtual void PrintDataTo(StringStream* stream);
int block_id() const { return block()->block_id(); }
bool is_loop_header() const { return block()->IsLoopHeader(); }
+ bool is_osr_entry() const { return block()->is_osr_entry(); }
Label* label() { return &label_; }
LLabel* replacement() const { return replacement_; }
void set_replacement(LLabel* label) { replacement_ = label; }
@@ -452,6 +461,7 @@ class LLabel: public LGap {
class LParameter: public LTemplateInstruction<1, 0, 0> {
public:
+ virtual bool HasInterestingComment(LCodeGen* gen) const { return false; }
DECLARE_CONCRETE_INSTRUCTION(Parameter, "parameter")
};
@@ -469,6 +479,7 @@ class LCallStub: public LTemplateInstruction<1, 0, 0> {
class LUnknownOSRValue: public LTemplateInstruction<1, 0, 0> {
public:
+ virtual bool HasInterestingComment(LCodeGen* gen) const { return false; }
DECLARE_CONCRETE_INSTRUCTION(UnknownOSRValue, "unknown-osr-value")
};
@@ -1790,7 +1801,6 @@ class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
- Handle<JSFunction> known_function() { return hydrogen()->known_function(); }
};
@@ -1858,7 +1868,6 @@ class LCallKnownGlobal: public LTemplateInstruction<1, 0, 0> {
virtual void PrintDataTo(StringStream* stream);
- Handle<JSFunction> target() const { return hydrogen()->target(); }
int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -2429,8 +2438,6 @@ class LFunctionLiteral: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(FunctionLiteral, "function-literal")
DECLARE_HYDROGEN_ACCESSOR(FunctionLiteral)
-
- Handle<SharedFunctionInfo> shared_info() { return hydrogen()->shared_info(); }
};
@@ -2507,6 +2514,7 @@ class LOsrEntry: public LTemplateInstruction<0, 0, 0> {
public:
LOsrEntry();
+ virtual bool HasInterestingComment(LCodeGen* gen) const { return false; }
DECLARE_CONCRETE_INSTRUCTION(OsrEntry, "osr-entry")
LOperand** SpilledRegisterArray() { return register_spills_; }
diff --git a/deps/v8/src/mips/macro-assembler-mips.cc b/deps/v8/src/mips/macro-assembler-mips.cc
index 6f9891469..220d9fe0c 100644
--- a/deps/v8/src/mips/macro-assembler-mips.cc
+++ b/deps/v8/src/mips/macro-assembler-mips.cc
@@ -83,6 +83,7 @@ void MacroAssembler::StoreRoot(Register source,
void MacroAssembler::LoadHeapObject(Register result,
Handle<HeapObject> object) {
+ ALLOW_HANDLE_DEREF(isolate(), "using raw address");
if (isolate()->heap()->InNewSpace(*object)) {
Handle<JSGlobalPropertyCell> cell =
isolate()->factory()->NewJSGlobalPropertyCell(object);
@@ -2457,6 +2458,7 @@ void MacroAssembler::Jump(Handle<Code> code,
const Operand& rt,
BranchDelaySlot bd) {
ASSERT(RelocInfo::IsCodeTarget(rmode));
+ ALLOW_HANDLE_DEREF(isolate(), "embedding raw address");
Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond, rs, rt, bd);
}
@@ -2544,6 +2546,7 @@ int MacroAssembler::CallSize(Handle<Code> code,
Register rs,
const Operand& rt,
BranchDelaySlot bd) {
+ ALLOW_HANDLE_DEREF(isolate(), "using raw address");
return CallSize(reinterpret_cast<Address>(code.location()),
rmode, cond, rs, rt, bd);
}
@@ -2564,6 +2567,7 @@ void MacroAssembler::Call(Handle<Code> code,
SetRecordedAstId(ast_id);
rmode = RelocInfo::CODE_TARGET_WITH_ID;
}
+ ALLOW_HANDLE_DEREF(isolate(), "embedding raw address");
Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd);
ASSERT_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd),
SizeOfCodeGeneratedSince(&start));
@@ -3743,6 +3747,7 @@ void MacroAssembler::InvokeFunction(Register function,
void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
+ const ParameterCount& expected,
const ParameterCount& actual,
InvokeFlag flag,
const CallWrapper& call_wrapper,
@@ -3754,7 +3759,6 @@ void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
LoadHeapObject(a1, function);
lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
- ParameterCount expected(function->shared()->formal_parameter_count());
// We call indirectly through the code field in the function to
// allow recompilation to take effect without changing any of the
// call sites.
@@ -3921,8 +3925,9 @@ void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
- PrepareCallCFunction(0, a0);
- CallCFunction(ExternalReference::log_enter_external_function(isolate()), 0);
+ PrepareCallCFunction(1, a0);
+ li(a0, Operand(ExternalReference::isolate_address(isolate())));
+ CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
@@ -3941,8 +3946,9 @@ void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
- PrepareCallCFunction(0, a0);
- CallCFunction(ExternalReference::log_leave_external_function(isolate()), 0);
+ PrepareCallCFunction(1, a0);
+ li(a0, Operand(ExternalReference::isolate_address(isolate())));
+ CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
@@ -3996,7 +4002,7 @@ void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
mov(s0, v0);
mov(a0, v0);
PrepareCallCFunction(1, s1);
- li(a0, Operand(ExternalReference::isolate_address()));
+ li(a0, Operand(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate()),
1);
mov(v0, s0);
diff --git a/deps/v8/src/mips/macro-assembler-mips.h b/deps/v8/src/mips/macro-assembler-mips.h
index 125cc8aaf..e914f2402 100644
--- a/deps/v8/src/mips/macro-assembler-mips.h
+++ b/deps/v8/src/mips/macro-assembler-mips.h
@@ -188,10 +188,10 @@ class MacroAssembler: public Assembler {
void Call(Register target, COND_ARGS);
static int CallSize(Address target, RelocInfo::Mode rmode, COND_ARGS);
void Call(Address target, RelocInfo::Mode rmode, COND_ARGS);
- static int CallSize(Handle<Code> code,
- RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
- TypeFeedbackId ast_id = TypeFeedbackId::None(),
- COND_ARGS);
+ int CallSize(Handle<Code> code,
+ RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
+ TypeFeedbackId ast_id = TypeFeedbackId::None(),
+ COND_ARGS);
void Call(Handle<Code> code,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
TypeFeedbackId ast_id = TypeFeedbackId::None(),
@@ -289,6 +289,7 @@ class MacroAssembler: public Assembler {
void LoadHeapObject(Register dst, Handle<HeapObject> object);
void LoadObject(Register result, Handle<Object> object) {
+ ALLOW_HANDLE_DEREF(isolate(), "heap object check");
if (object->IsHeapObject()) {
LoadHeapObject(result, Handle<HeapObject>::cast(object));
} else {
@@ -882,6 +883,7 @@ class MacroAssembler: public Assembler {
CallKind call_kind);
void InvokeFunction(Handle<JSFunction> function,
+ const ParameterCount& expected,
const ParameterCount& actual,
InvokeFlag flag,
const CallWrapper& call_wrapper,
diff --git a/deps/v8/src/mips/regexp-macro-assembler-mips.cc b/deps/v8/src/mips/regexp-macro-assembler-mips.cc
index 2fbc0eaa5..7289296d5 100644
--- a/deps/v8/src/mips/regexp-macro-assembler-mips.cc
+++ b/deps/v8/src/mips/regexp-macro-assembler-mips.cc
@@ -388,7 +388,7 @@ void RegExpMacroAssemblerMIPS::CheckNotBackReferenceIgnoreCase(
// Address of current input position.
__ Addu(a1, current_input_offset(), Operand(end_of_input_address()));
// Isolate.
- __ li(a3, Operand(ExternalReference::isolate_address()));
+ __ li(a3, Operand(ExternalReference::isolate_address(masm_->isolate())));
{
AllowExternalCallThatCantCauseGC scope(masm_);
@@ -901,7 +901,7 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
__ PrepareCallCFunction(num_arguments, a0);
__ mov(a0, backtrack_stackpointer());
__ Addu(a1, frame_pointer(), Operand(kStackHighEnd));
- __ li(a2, Operand(ExternalReference::isolate_address()));
+ __ li(a2, Operand(ExternalReference::isolate_address(masm_->isolate())));
ExternalReference grow_stack =
ExternalReference::re_grow_stack(masm_->isolate());
__ CallCFunction(grow_stack, num_arguments);
diff --git a/deps/v8/src/mips/regexp-macro-assembler-mips.h b/deps/v8/src/mips/regexp-macro-assembler-mips.h
index 8dd52a484..3ad64f9ae 100644
--- a/deps/v8/src/mips/regexp-macro-assembler-mips.h
+++ b/deps/v8/src/mips/regexp-macro-assembler-mips.h
@@ -229,6 +229,7 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
inline void CallCFunctionUsingStub(ExternalReference function,
int num_arguments);
+ Isolate* isolate() const { return masm_->isolate(); }
MacroAssembler* masm_;
diff --git a/deps/v8/src/mips/simulator-mips.cc b/deps/v8/src/mips/simulator-mips.cc
index bc384357c..467345807 100644
--- a/deps/v8/src/mips/simulator-mips.cc
+++ b/deps/v8/src/mips/simulator-mips.cc
@@ -26,8 +26,8 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <stdlib.h>
-#include <math.h>
#include <limits.h>
+#include <cmath>
#include <cstdarg>
#include "v8.h"
@@ -1155,7 +1155,7 @@ bool Simulator::test_fcsr_bit(uint32_t cc) {
bool Simulator::set_fcsr_round_error(double original, double rounded) {
bool ret = false;
- if (!isfinite(original) || !isfinite(rounded)) {
+ if (!std::isfinite(original) || !std::isfinite(rounded)) {
set_fcsr_bit(kFCSRInvalidOpFlagBit, true);
ret = true;
}
@@ -2067,25 +2067,28 @@ void Simulator::DecodeTypeRegister(Instruction* instr) {
set_fpu_register_double(fd_reg, sqrt(fs));
break;
case C_UN_D:
- set_fcsr_bit(fcsr_cc, isnan(fs) || isnan(ft));
+ set_fcsr_bit(fcsr_cc, std::isnan(fs) || std::isnan(ft));
break;
case C_EQ_D:
set_fcsr_bit(fcsr_cc, (fs == ft));
break;
case C_UEQ_D:
- set_fcsr_bit(fcsr_cc, (fs == ft) || (isnan(fs) || isnan(ft)));
+ set_fcsr_bit(fcsr_cc,
+ (fs == ft) || (std::isnan(fs) || std::isnan(ft)));
break;
case C_OLT_D:
set_fcsr_bit(fcsr_cc, (fs < ft));
break;
case C_ULT_D:
- set_fcsr_bit(fcsr_cc, (fs < ft) || (isnan(fs) || isnan(ft)));
+ set_fcsr_bit(fcsr_cc,
+ (fs < ft) || (std::isnan(fs) || std::isnan(ft)));
break;
case C_OLE_D:
set_fcsr_bit(fcsr_cc, (fs <= ft));
break;
case C_ULE_D:
- set_fcsr_bit(fcsr_cc, (fs <= ft) || (isnan(fs) || isnan(ft)));
+ set_fcsr_bit(fcsr_cc,
+ (fs <= ft) || (std::isnan(fs) || std::isnan(ft)));
break;
case CVT_W_D: // Convert double to word.
// Rounding modes are not yet supported.
diff --git a/deps/v8/src/mips/stub-cache-mips.cc b/deps/v8/src/mips/stub-cache-mips.cc
index b9757fa13..e110c47c6 100644
--- a/deps/v8/src/mips/stub-cache-mips.cc
+++ b/deps/v8/src/mips/stub-cache-mips.cc
@@ -715,7 +715,7 @@ static void PushInterceptorArguments(MacroAssembler* masm,
__ Push(scratch, receiver, holder);
__ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
__ push(scratch);
- __ li(scratch, Operand(ExternalReference::isolate_address()));
+ __ li(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
__ push(scratch);
}
@@ -789,7 +789,7 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
__ li(t2, call_data);
}
- __ li(t3, Operand(ExternalReference::isolate_address()));
+ __ li(t3, Operand(ExternalReference::isolate_address(masm->isolate())));
// Store JS function, call data and isolate.
__ sw(t1, MemOperand(sp, 1 * kPointerSize));
__ sw(t2, MemOperand(sp, 2 * kPointerSize));
@@ -951,7 +951,9 @@ class CallInterceptorCompiler BASE_EMBEDDED {
CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
? CALL_AS_FUNCTION
: CALL_AS_METHOD;
- __ InvokeFunction(optimization.constant_function(), arguments_,
+ Handle<JSFunction> function = optimization.constant_function();
+ ParameterCount expected(function);
+ __ InvokeFunction(function, expected, arguments_,
JUMP_FUNCTION, NullCallWrapper(), call_kind);
}
@@ -1165,7 +1167,7 @@ Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
}
// Log the check depth.
- LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
+ LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
if (!holder.is_identical_to(first) || check == CHECK_ALL_MAPS) {
// Check the holder map.
@@ -1292,13 +1294,13 @@ void BaseLoadStubCompiler::GenerateLoadCallback(
__ lw(scratch3(), FieldMemOperand(scratch3(),
ExecutableAccessorInfo::kDataOffset));
} else {
- __ li(scratch3(), Handle<Object>(callback->data(),
- callback->GetIsolate()));
+ __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
}
__ Subu(sp, sp, 4 * kPointerSize);
__ sw(reg, MemOperand(sp, 3 * kPointerSize));
__ sw(scratch3(), MemOperand(sp, 2 * kPointerSize));
- __ li(scratch3(), Operand(ExternalReference::isolate_address()));
+ __ li(scratch3(),
+ Operand(ExternalReference::isolate_address(isolate())));
__ sw(scratch3(), MemOperand(sp, 1 * kPointerSize));
__ sw(name(), MemOperand(sp, 0 * kPointerSize));
@@ -1323,10 +1325,8 @@ void BaseLoadStubCompiler::GenerateLoadCallback(
const int kStackUnwindSpace = 5;
Address getter_address = v8::ToCData<Address>(callback->getter());
ApiFunction fun(getter_address);
- ExternalReference ref =
- ExternalReference(&fun,
- ExternalReference::DIRECT_GETTER_CALL,
- masm()->isolate());
+ ExternalReference ref = ExternalReference(
+ &fun, ExternalReference::DIRECT_GETTER_CALL, isolate());
__ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
}
@@ -1411,7 +1411,7 @@ void BaseLoadStubCompiler::GenerateLoadInterceptor(
this->name(), interceptor_holder);
ExternalReference ref = ExternalReference(
- IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
+ IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
__ TailCallExternalReference(ref, 6, 1);
}
}
@@ -1721,11 +1721,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ bind(&no_fast_elements_check);
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address(
- masm()->isolate());
+ ExternalReference::new_space_allocation_top_address(isolate());
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address(
- masm()->isolate());
+ ExternalReference::new_space_allocation_limit_address(isolate());
const int kAllocationDelta = 4;
// Load top and check if it is the end of elements.
@@ -1762,10 +1760,8 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Ret();
}
__ bind(&call_builtin);
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
- masm()->isolate()),
- argc + 1,
- 1);
+ __ TailCallExternalReference(
+ ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1);
}
// Handle call cache miss.
@@ -1849,10 +1845,8 @@ Handle<Code> CallStubCompiler::CompileArrayPopCall(
__ Ret();
__ bind(&call_builtin);
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
- masm()->isolate()),
- argc + 1,
- 1);
+ __ TailCallExternalReference(
+ ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
// Handle call cache miss.
__ bind(&miss);
@@ -2091,8 +2085,9 @@ Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
// Tail call the full function. We do not have to patch the receiver
// because the function makes no use of it.
__ bind(&slow);
- __ InvokeFunction(
- function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+ ParameterCount expected(function);
+ __ InvokeFunction(function, expected, arguments(),
+ JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
__ bind(&miss);
// a2: function name.
@@ -2221,8 +2216,9 @@ Handle<Code> CallStubCompiler::CompileMathFloorCall(
__ bind(&slow);
// Tail call the full function. We do not have to patch the receiver
// because the function makes no use of it.
- __ InvokeFunction(
- function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+ ParameterCount expected(function);
+ __ InvokeFunction(function, expected, arguments(),
+ JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
__ bind(&miss);
// a2: function name.
@@ -2322,8 +2318,9 @@ Handle<Code> CallStubCompiler::CompileMathAbsCall(
// Tail call the full function. We do not have to patch the receiver
// because the function makes no use of it.
__ bind(&slow);
- __ InvokeFunction(
- function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+ ParameterCount expected(function);
+ __ InvokeFunction(function, expected, arguments(),
+ JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
__ bind(&miss);
// a2: function name.
@@ -2413,8 +2410,7 @@ void CallStubCompiler::CompileHandlerFrontend(Handle<Object> object,
ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
switch (check) {
case RECEIVER_MAP_CHECK:
- __ IncrementCounter(masm()->isolate()->counters()->call_const(),
- 1, a0, a3);
+ __ IncrementCounter(isolate()->counters()->call_const(), 1, a0, a3);
// Check that the maps haven't changed.
CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0,
@@ -2498,8 +2494,9 @@ void CallStubCompiler::CompileHandlerBackend(Handle<JSFunction> function) {
CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
? CALL_AS_FUNCTION
: CALL_AS_METHOD;
- __ InvokeFunction(
- function, arguments(), JUMP_FUNCTION, NullCallWrapper(), call_kind);
+ ParameterCount expected(function);
+ __ InvokeFunction(function, expected, arguments(),
+ JUMP_FUNCTION, NullCallWrapper(), call_kind);
}
@@ -2605,7 +2602,7 @@ Handle<Code> CallStubCompiler::CompileCallGlobal(
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
- Counters* counters = masm()->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
ParameterCount expected(function->shared()->formal_parameter_count());
CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
@@ -2649,8 +2646,7 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
// Do tail-call to the runtime system.
ExternalReference store_callback_property =
- ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
- masm()->isolate());
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
__ TailCallExternalReference(store_callback_property, 4, 1);
// Handle store cache miss.
@@ -2686,8 +2682,9 @@ void StoreStubCompiler::GenerateStoreViaSetter(
__ push(a1);
__ push(a0);
ParameterCount actual(1);
- __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
- CALL_AS_METHOD);
+ ParameterCount expected(setter);
+ __ InvokeFunction(setter, expected, actual,
+ CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
} else {
// If we generate a global code snippet for deoptimization only, remember
// the place to continue after deoptimization.
@@ -2733,8 +2730,7 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
- ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
- masm()->isolate());
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
__ TailCallExternalReference(store_ic_property, 4, 1);
// Handle store cache miss.
@@ -2772,7 +2768,7 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal(
__ mov(v0, a0); // Stored value must be returned in v0.
// Cells are always rescanned, so no write barrier here.
- Counters* counters = masm()->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(
counters->named_store_global_inline(), 1, scratch1(), scratch2());
__ Ret();
@@ -2867,8 +2863,9 @@ void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
// Call the JavaScript getter with the receiver on the stack.
__ push(a0);
ParameterCount actual(0);
- __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
- CALL_AS_METHOD);
+ ParameterCount expected(getter);
+ __ InvokeFunction(getter, expected, actual,
+ CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
} else {
// If we generate a global code snippet for deoptimization only, remember
// the place to continue after deoptimization.
@@ -2912,7 +2909,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
HandlerFrontendFooter(&success, &miss);
__ bind(&success);
- Counters* counters = masm()->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
__ mov(v0, t0);
__ Ret();
@@ -3090,8 +3087,8 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
__ bind(&next);
} else {
// Set the property to the constant value.
- Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i),
- masm()->isolate());
+ Handle<Object> constant(
+ shared->GetThisPropertyAssignmentConstant(i), isolate());
__ li(a2, Operand(constant));
__ sw(a2, MemOperand(t5));
__ Addu(t5, t5, kPointerSize);
@@ -3119,7 +3116,7 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
__ sll(t0, a1, kPointerSizeLog2);
__ Addu(sp, sp, t0);
__ Addu(sp, sp, Operand(kPointerSize));
- Counters* counters = masm()->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->constructed_objects(), 1, a1, a2);
__ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2);
__ Ret();
@@ -3128,7 +3125,7 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
// construction.
__ bind(&generic_stub_call);
Handle<Code> generic_construct_stub =
- masm()->isolate()->builtins()->JSConstructStubGeneric();
+ isolate()->builtins()->JSConstructStubGeneric();
__ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
// Return the generated code.