summaryrefslogtreecommitdiff
path: root/chromium/v8/src/mips/lithium-codegen-mips.cc
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/v8/src/mips/lithium-codegen-mips.cc')
-rw-r--r--chromium/v8/src/mips/lithium-codegen-mips.cc285
1 files changed, 161 insertions, 124 deletions
diff --git a/chromium/v8/src/mips/lithium-codegen-mips.cc b/chromium/v8/src/mips/lithium-codegen-mips.cc
index 2bc52e4f3ff..88e7eb8f1d4 100644
--- a/chromium/v8/src/mips/lithium-codegen-mips.cc
+++ b/chromium/v8/src/mips/lithium-codegen-mips.cc
@@ -91,7 +91,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
}
-void LChunkBuilder::Abort(BailoutReason reason) {
+void LChunkBuilder::Abort(const char* reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -268,8 +268,6 @@ bool LCodeGen::GenerateBody() {
instr->Mnemonic());
}
- RecordAndUpdatePosition(instr->position());
-
instr->CompileToNative(this);
}
EnsureSpaceForLazyDeopt();
@@ -283,10 +281,6 @@ bool LCodeGen::GenerateDeferredCode() {
if (deferred_.length() > 0) {
for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
LDeferredCode* code = deferred_[i];
-
- int pos = instructions_->at(code->instruction_index())->position();
- RecordAndUpdatePosition(pos);
-
Comment(";;; <@%d,#%d> "
"-------------------- Deferred %s --------------------",
code->instruction_index(),
@@ -330,7 +324,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
// end of the jump table.
if (!is_int16((masm()->pc_offset() / Assembler::kInstrSize) +
deopt_jump_table_.length() * 12)) {
- Abort(kGeneratedCodeIsTooLarge);
+ Abort("Generated code is too large");
}
if (deopt_jump_table_.length() > 0) {
@@ -417,7 +411,7 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
ASSERT(constant->HasSmiValue());
__ li(scratch, Operand(Smi::FromInt(constant->Integer32Value())));
} else if (r.IsDouble()) {
- Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
+ Abort("EmitLoadRegister: Unsupported double immediate.");
} else {
ASSERT(r.IsTagged());
__ LoadObject(scratch, literal);
@@ -455,9 +449,9 @@ DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
__ cvt_d_w(dbl_scratch, flt_scratch);
return dbl_scratch;
} else if (r.IsDouble()) {
- Abort(kUnsupportedDoubleImmediate);
+ Abort("unsupported double immediate");
} else if (r.IsTagged()) {
- Abort(kUnsupportedTaggedImmediate);
+ Abort("unsupported tagged immediate");
}
} else if (op->IsStackSlot() || op->IsArgument()) {
MemOperand mem_op = ToMemOperand(op);
@@ -526,14 +520,14 @@ Operand LCodeGen::ToOperand(LOperand* op) {
ASSERT(constant->HasInteger32Value());
return Operand(constant->Integer32Value());
} else if (r.IsDouble()) {
- Abort(kToOperandUnsupportedDoubleImmediate);
+ Abort("ToOperand Unsupported double immediate.");
}
ASSERT(r.IsTagged());
return Operand(constant->handle());
} else if (op->IsRegister()) {
return Operand(ToRegister(op));
} else if (op->IsDoubleRegister()) {
- Abort(kToOperandIsDoubleRegisterUnimplemented);
+ Abort("ToOperand IsDoubleRegister unimplemented");
return Operand(0);
}
// Stack slots not implemented, use ToMemOperand instead.
@@ -597,57 +591,37 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
break;
}
- int object_index = 0;
- int dematerialized_index = 0;
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
- AddToTranslation(environment,
- translation,
+
+ // TODO(mstarzinger): Introduce marker operands to indicate that this value
+ // is not present and must be reconstructed from the deoptimizer. Currently
+ // this is only used for the arguments object.
+ if (value == NULL) {
+ int arguments_count = environment->values()->length() - translation_size;
+ translation->BeginArgumentsObject(arguments_count);
+ for (int i = 0; i < arguments_count; ++i) {
+ LOperand* value = environment->values()->at(translation_size + i);
+ AddToTranslation(translation,
+ value,
+ environment->HasTaggedValueAt(translation_size + i),
+ environment->HasUint32ValueAt(translation_size + i));
+ }
+ continue;
+ }
+
+ AddToTranslation(translation,
value,
environment->HasTaggedValueAt(i),
- environment->HasUint32ValueAt(i),
- &object_index,
- &dematerialized_index);
+ environment->HasUint32ValueAt(i));
}
}
-void LCodeGen::AddToTranslation(LEnvironment* environment,
- Translation* translation,
+void LCodeGen::AddToTranslation(Translation* translation,
LOperand* op,
bool is_tagged,
- bool is_uint32,
- int* object_index_pointer,
- int* dematerialized_index_pointer) {
- if (op == LEnvironment::materialization_marker()) {
- int object_index = (*object_index_pointer)++;
- if (environment->ObjectIsDuplicateAt(object_index)) {
- int dupe_of = environment->ObjectDuplicateOfAt(object_index);
- translation->DuplicateObject(dupe_of);
- return;
- }
- int object_length = environment->ObjectLengthAt(object_index);
- if (environment->ObjectIsArgumentsAt(object_index)) {
- translation->BeginArgumentsObject(object_length);
- } else {
- translation->BeginCapturedObject(object_length);
- }
- int dematerialized_index = *dematerialized_index_pointer;
- int env_offset = environment->translation_size() + dematerialized_index;
- *dematerialized_index_pointer += object_length;
- for (int i = 0; i < object_length; ++i) {
- LOperand* value = environment->values()->at(env_offset + i);
- AddToTranslation(environment,
- translation,
- value,
- environment->HasTaggedValueAt(env_offset + i),
- environment->HasUint32ValueAt(env_offset + i),
- object_index_pointer,
- dematerialized_index_pointer);
- }
- return;
- }
-
+ bool is_uint32) {
if (op->IsStackSlot()) {
if (is_tagged) {
translation->StoreStackSlot(op->index());
@@ -774,7 +748,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
- Abort(kBailoutWasNotPrepared);
+ Abort("bailout was not prepared");
return;
}
@@ -787,7 +761,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
return;
}
- if (info()->ShouldTrapOnDeopt()) {
+ if (FLAG_trap_on_deopt && info()->IsOptimizing()) {
Label skip;
if (cc != al) {
__ Branch(&skip, NegateCondition(cc), src1, src2);
@@ -986,14 +960,6 @@ void LCodeGen::RecordPosition(int position) {
}
-void LCodeGen::RecordAndUpdatePosition(int position) {
- if (position >= 0 && position != old_position_) {
- masm()->positions_recorder()->RecordPosition(position);
- old_position_ = position;
- }
-}
-
-
static const char* LabelType(LLabel* label) {
if (label->is_loop_header()) return " (loop header)";
if (label->is_osr_entry()) return " (OSR entry)";
@@ -1536,11 +1502,7 @@ void LCodeGen::DoBitI(LBitI* instr) {
__ Or(result, left, right);
break;
case Token::BIT_XOR:
- if (right_op->IsConstantOperand() && right.immediate() == int32_t(~0)) {
- __ Nor(result, zero_reg, left);
- } else {
- __ Xor(result, left, right);
- }
+ __ Xor(result, left, right);
break;
default:
UNREACHABLE();
@@ -1801,7 +1763,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
__ Subu(at, at, Operand(encoding == String::ONE_BYTE_ENCODING
? one_byte_seq_type : two_byte_seq_type));
- __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
+ __ Check(eq, "Unexpected string type", at, Operand(zero_reg));
}
__ Addu(scratch,
@@ -1818,6 +1780,13 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
}
+void LCodeGen::DoBitNotI(LBitNotI* instr) {
+ Register input = ToRegister(instr->value());
+ Register result = ToRegister(instr->result());
+ __ Nor(result, zero_reg, Operand(input));
+}
+
+
void LCodeGen::DoThrow(LThrow* instr) {
Register input_reg = EmitLoadRegister(instr->value(), at);
__ push(input_reg);
@@ -2921,6 +2890,90 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
}
+void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
+ Register object,
+ Handle<Map> type,
+ Handle<String> name,
+ LEnvironment* env) {
+ LookupResult lookup(isolate());
+ type->LookupDescriptor(NULL, *name, &lookup);
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
+ if (lookup.IsField()) {
+ int index = lookup.GetLocalFieldIndexFromMap(*type);
+ int offset = index * kPointerSize;
+ if (index < 0) {
+ // Negative property indices are in-object properties, indexed
+ // from the end of the fixed part of the object.
+ __ lw(result, FieldMemOperand(object, offset + type->instance_size()));
+ } else {
+ // Non-negative property indices are in the properties array.
+ __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
+ __ lw(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
+ }
+ } else if (lookup.IsConstant()) {
+ Handle<Object> constant(lookup.GetConstantFromMap(*type), isolate());
+ __ LoadObject(result, constant);
+ } else {
+ // Negative lookup.
+ // Check prototypes.
+ Handle<HeapObject> current(HeapObject::cast((*type)->prototype()));
+ Heap* heap = type->GetHeap();
+ while (*current != heap->null_value()) {
+ __ LoadHeapObject(result, current);
+ __ lw(result, FieldMemOperand(result, HeapObject::kMapOffset));
+ DeoptimizeIf(ne, env, result, Operand(Handle<Map>(current->map())));
+ current =
+ Handle<HeapObject>(HeapObject::cast(current->map()->prototype()));
+ }
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
+ }
+}
+
+
+void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
+ Register object = ToRegister(instr->object());
+ Register result = ToRegister(instr->result());
+ Register object_map = scratch0();
+
+ int map_count = instr->hydrogen()->types()->length();
+ bool need_generic = instr->hydrogen()->need_generic();
+
+ if (map_count == 0 && !need_generic) {
+ DeoptimizeIf(al, instr->environment());
+ return;
+ }
+ Handle<String> name = instr->hydrogen()->name();
+ Label done;
+ __ lw(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
+ for (int i = 0; i < map_count; ++i) {
+ bool last = (i == map_count - 1);
+ Handle<Map> map = instr->hydrogen()->types()->at(i);
+ Label check_passed;
+ __ CompareMapAndBranch(object_map, map, &check_passed, eq, &check_passed);
+ if (last && !need_generic) {
+ DeoptimizeIf(al, instr->environment());
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
+ } else {
+ Label next;
+ __ Branch(&next);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
+ __ Branch(&done);
+ __ bind(&next);
+ }
+ }
+ if (need_generic) {
+ __ li(a2, Operand(name));
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ }
+ __ bind(&done);
+}
+
+
void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
ASSERT(ToRegister(instr->object()).is(a0));
ASSERT(ToRegister(instr->result()).is(v0));
@@ -3016,7 +3069,7 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -3102,7 +3155,7 @@ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -3373,7 +3426,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
void LCodeGen::DoPushArgument(LPushArgument* instr) {
LOperand* argument = instr->value();
if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
- Abort(kDoPushArgumentNotImplementedForDoubleType);
+ Abort("DoPushArgument not implemented for double type.");
} else {
Register argument_reg = EmitLoadRegister(argument, at);
__ push(argument_reg);
@@ -3592,7 +3645,7 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) {
FPURegister input = ToDoubleRegister(instr->value());
FPURegister result = ToDoubleRegister(instr->result());
__ abs_d(result, input);
- } else if (r.IsSmiOrInteger32()) {
+ } else if (r.IsInteger32()) {
EmitIntegerMathAbs(instr);
} else {
// Representation is tagged.
@@ -4198,7 +4251,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -4276,7 +4329,7 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort(kArrayIndexConstantValueTooBig);
+ Abort("array index constant value too big.");
}
} else {
key = ToRegister(instr->key());
@@ -4405,13 +4458,12 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
__ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
scratch, GetRAState(), kDontSaveFPRegs);
} else {
- PushSafepointRegistersScope scope(
- this, Safepoint::kWithRegistersAndDoubles);
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
__ mov(a0, object_reg);
__ li(a1, Operand(to_map));
TransitionElementsKindStub stub(from_kind, to_kind);
__ CallStub(&stub);
- RecordSafepointWithRegistersAndDoubles(
+ RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
}
__ bind(&not_applicable);
@@ -4717,7 +4769,7 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
__ Move(reg, scratch0(), input_reg);
Label canonicalize;
__ Branch(&canonicalize, ne, scratch0(), Operand(kHoleNanUpper32));
- __ li(reg, factory()->undefined_value());
+ __ li(reg, factory()->the_hole_value());
__ Branch(&done);
__ bind(&canonicalize);
__ Move(input_reg,
@@ -5122,7 +5174,7 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
AllowDeferredHandleDereference smi_check;
if (isolate()->heap()->InNewSpace(*target)) {
Register reg = ToRegister(instr->value());
- Handle<Cell> cell = isolate()->factory()->NewCell(target);
+ Handle<Cell> cell = isolate()->factory()->NewPropertyCell(target);
__ li(at, Operand(Handle<Object>(cell)));
__ lw(at, FieldMemOperand(at, Cell::kValueOffset));
DeoptimizeIf(ne, instr->environment(), reg,
@@ -5134,63 +5186,31 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
}
-void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
- {
- PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
- __ push(object);
- CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
- __ StoreToSafepointRegisterSlot(v0, scratch0());
- }
- __ And(at, scratch0(), Operand(kSmiTagMask));
- DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
+void LCodeGen::DoCheckMapCommon(Register map_reg,
+ Handle<Map> map,
+ LEnvironment* env) {
+ Label success;
+ __ CompareMapAndBranch(map_reg, map, &success, eq, &success);
+ DeoptimizeIf(al, env);
+ __ bind(&success);
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
- class DeferredCheckMaps: public LDeferredCode {
- public:
- DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
- : LDeferredCode(codegen), instr_(instr), object_(object) {
- SetExit(check_maps());
- }
- virtual void Generate() {
- codegen()->DoDeferredInstanceMigration(instr_, object_);
- }
- Label* check_maps() { return &check_maps_; }
- virtual LInstruction* instr() { return instr_; }
- private:
- LCheckMaps* instr_;
- Label check_maps_;
- Register object_;
- };
-
if (instr->hydrogen()->CanOmitMapChecks()) return;
Register map_reg = scratch0();
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
+ Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
__ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
-
- DeferredCheckMaps* deferred = NULL;
- if (instr->hydrogen()->has_migration_target()) {
- deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
- __ bind(deferred->check_maps());
- }
-
- Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMapAndBranch(map_reg, map, &success, eq, &success);
}
Handle<Map> map = map_set->last();
- // Do the CompareMap() directly within the Branch() and DeoptimizeIf().
- if (instr->hydrogen()->has_migration_target()) {
- __ Branch(deferred->entry(), ne, map_reg, Operand(map));
- } else {
- DeoptimizeIf(ne, instr->environment(), map_reg, Operand(map));
- }
-
+ DoCheckMapCommon(map_reg, map, instr->environment());
__ bind(&success);
}
@@ -5245,6 +5265,25 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
}
+void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
+ if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
+
+ Register prototype_reg = ToRegister(instr->temp());
+ Register map_reg = ToRegister(instr->temp2());
+
+ ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
+ ZoneList<Handle<Map> >* maps = instr->maps();
+
+ ASSERT(prototypes->length() == maps->length());
+
+ for (int i = 0; i < prototypes->length(); i++) {
+ __ LoadHeapObject(prototype_reg, prototypes->at(i));
+ __ lw(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset));
+ DoCheckMapCommon(map_reg, maps->at(i), instr->environment());
+ }
+}
+
+
void LCodeGen::DoAllocate(LAllocate* instr) {
class DeferredAllocate: public LDeferredCode {
public:
@@ -5625,8 +5664,6 @@ void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
if (info()->IsStub() && type == Deoptimizer::EAGER) {
type = Deoptimizer::LAZY;
}
-
- Comment(";;; deoptimize: %s", instr->hydrogen()->reason());
DeoptimizeIf(al, instr->environment(), type, zero_reg, Operand(zero_reg));
}