diff options
Diffstat (limited to 'deps/v8/src/crankshaft/hydrogen.cc')
-rw-r--r-- | deps/v8/src/crankshaft/hydrogen.cc | 373 |
1 files changed, 31 insertions, 342 deletions
diff --git a/deps/v8/src/crankshaft/hydrogen.cc b/deps/v8/src/crankshaft/hydrogen.cc index af3207e859..d9dc41221e 100644 --- a/deps/v8/src/crankshaft/hydrogen.cc +++ b/deps/v8/src/crankshaft/hydrogen.cc @@ -4008,7 +4008,7 @@ bool HOptimizedGraphBuilder::BuildGraph() { // Set this predicate early to avoid handle deref during graph optimization. graph()->set_allow_code_motion( current_info()->IsStub() || - current_info()->shared_info()->opt_count() + 1 < FLAG_max_opt_count); + current_info()->shared_info()->deopt_count() + 1 < FLAG_max_deopt_count); // Perform any necessary OSR-specific cleanups or changes to the graph. osr()->FinishGraph(); @@ -6573,13 +6573,15 @@ void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(Variable* var, HValue* name = Add<HConstant>(var->name()); HValue* vector_value = Add<HConstant>(vector); HValue* slot_value = Add<HConstant>(vector->GetIndex(slot)); + DCHECK(vector->IsStoreGlobalIC(slot)); DCHECK_EQ(vector->GetLanguageMode(slot), function_language_mode()); - Callable callable = CodeFactory::StoreICInOptimizedCode( + Callable callable = CodeFactory::StoreGlobalICInOptimizedCode( isolate(), function_language_mode()); HValue* stub = Add<HConstant>(callable.code()); HValue* values[] = {global_object, name, value, slot_value, vector_value}; - HCallWithDescriptor* instr = Add<HCallWithDescriptor>( - Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values)); + HCallWithDescriptor* instr = + Add<HCallWithDescriptor>(Code::STORE_GLOBAL_IC, stub, 0, + callable.descriptor(), ArrayVector(values)); USE(instr); DCHECK(instr->HasObservableSideEffects()); Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); @@ -6806,8 +6808,7 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) { } } - -void HOptimizedGraphBuilder::VisitYield(Yield* expr) { +void HOptimizedGraphBuilder::VisitSuspend(Suspend* expr) { // Generators are not optimized, so we should never get here. UNREACHABLE(); } @@ -7166,7 +7167,7 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess( elements_kind != GetInitialFastElementsKind()) { possible_transitioned_maps.Add(map); } - if (IsSloppyArgumentsElements(elements_kind)) { + if (IsSloppyArgumentsElementsKind(elements_kind)) { HInstruction* result = BuildKeyedGeneric(access_type, expr, slot, object, key, val); *has_side_effects = result->HasObservableSideEffects(); @@ -7179,6 +7180,7 @@ HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess( Map* transitioned_map = map->FindElementsKindTransitionedMap(&possible_transitioned_maps); if (transitioned_map != nullptr) { + DCHECK(!map->is_stable()); transition_target.Add(handle(transitioned_map)); } else { transition_target.Add(Handle<Map>()); @@ -8100,7 +8102,8 @@ bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target, top_info()->parse_info()->ast_value_factory()); parse_info.set_ast_value_factory_owned(false); - CompilationInfo target_info(parse_info.zone(), &parse_info, target); + CompilationInfo target_info(parse_info.zone(), &parse_info, + target->GetIsolate(), target); if (inlining_kind != CONSTRUCT_CALL_RETURN && IsClassConstructor(target_shared->kind())) { @@ -8112,7 +8115,7 @@ bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target, TraceInline(target, caller, "target is being debugged"); return false; } - if (!Compiler::ParseAndAnalyze(target_info.parse_info())) { + if (!Compiler::ParseAndAnalyze(&target_info)) { if (target_info.isolate()->has_pending_exception()) { // Parse or scope error, never optimize this function. SetStackOverflow(); @@ -8818,7 +8821,7 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall( Handle<JSObject>::null(), true); // Threshold for fast inlined Array.shift(). - HConstant* inline_threshold = Add<HConstant>(JSArray::kMaxCopyElements); + HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16)); Drop(args_count_no_receiver); HValue* result; @@ -9744,6 +9747,7 @@ bool HOptimizedGraphBuilder::TryInlineArrayCall(Expression* expression, static bool IsAllocationInlineable(Handle<JSFunction> constructor) { return constructor->has_initial_map() && !IsDerivedConstructor(constructor->shared()->kind()) && + !constructor->initial_map()->is_dictionary_map() && constructor->initial_map()->instance_type() == JS_OBJECT_TYPE && constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize; @@ -9869,328 +9873,6 @@ void HOptimizedGraphBuilder::BuildInitializeInobjectProperties( } } - -HValue* HGraphBuilder::BuildAllocateEmptyArrayBuffer(HValue* byte_length) { - // We HForceRepresentation here to avoid allocations during an *-to-tagged - // HChange that could cause GC while the array buffer object is not fully - // initialized. - HObjectAccess byte_length_access(HObjectAccess::ForJSArrayBufferByteLength()); - byte_length = AddUncasted<HForceRepresentation>( - byte_length, byte_length_access.representation()); - HAllocate* result = - BuildAllocate(Add<HConstant>(JSArrayBuffer::kSizeWithInternalFields), - HType::JSObject(), JS_ARRAY_BUFFER_TYPE, HAllocationMode()); - - HValue* native_context = BuildGetNativeContext(); - Add<HStoreNamedField>( - result, HObjectAccess::ForMap(), - Add<HLoadNamedField>( - native_context, nullptr, - HObjectAccess::ForContextSlot(Context::ARRAY_BUFFER_MAP_INDEX))); - - HConstant* empty_fixed_array = - Add<HConstant>(isolate()->factory()->empty_fixed_array()); - Add<HStoreNamedField>( - result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset), - empty_fixed_array); - Add<HStoreNamedField>( - result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), - empty_fixed_array); - Add<HStoreNamedField>( - result, HObjectAccess::ForJSArrayBufferBackingStore().WithRepresentation( - Representation::Smi()), - graph()->GetConstant0()); - Add<HStoreNamedField>(result, byte_length_access, byte_length); - Add<HStoreNamedField>(result, HObjectAccess::ForJSArrayBufferBitFieldSlot(), - graph()->GetConstant0()); - Add<HStoreNamedField>( - result, HObjectAccess::ForJSArrayBufferBitField(), - Add<HConstant>((1 << JSArrayBuffer::IsExternal::kShift) | - (1 << JSArrayBuffer::IsNeuterable::kShift))); - - for (int field = 0; field < v8::ArrayBuffer::kInternalFieldCount; ++field) { - Add<HStoreNamedField>( - result, - HObjectAccess::ForObservableJSObjectOffset( - JSArrayBuffer::kSize + field * kPointerSize, Representation::Smi()), - graph()->GetConstant0()); - } - - return result; -} - - -template <class ViewClass> -void HGraphBuilder::BuildArrayBufferViewInitialization( - HValue* obj, - HValue* buffer, - HValue* byte_offset, - HValue* byte_length) { - - for (int offset = ViewClass::kSize; - offset < ViewClass::kSizeWithInternalFields; - offset += kPointerSize) { - Add<HStoreNamedField>(obj, - HObjectAccess::ForObservableJSObjectOffset(offset), - graph()->GetConstant0()); - } - - Add<HStoreNamedField>( - obj, - HObjectAccess::ForJSArrayBufferViewByteOffset(), - byte_offset); - Add<HStoreNamedField>( - obj, - HObjectAccess::ForJSArrayBufferViewByteLength(), - byte_length); - Add<HStoreNamedField>(obj, HObjectAccess::ForJSArrayBufferViewBuffer(), - buffer); -} - - -HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements( - ExternalArrayType array_type, - bool is_zero_byte_offset, - HValue* buffer, HValue* byte_offset, HValue* length) { - Handle<Map> external_array_map( - isolate()->heap()->MapForFixedTypedArray(array_type)); - - // The HForceRepresentation is to prevent possible deopt on int-smi - // conversion after allocation but before the new object fields are set. - length = AddUncasted<HForceRepresentation>(length, Representation::Smi()); - HValue* elements = Add<HAllocate>( - Add<HConstant>(FixedTypedArrayBase::kHeaderSize), HType::HeapObject(), - NOT_TENURED, external_array_map->instance_type(), - graph()->GetConstant0()); - - AddStoreMapConstant(elements, external_array_map); - Add<HStoreNamedField>(elements, - HObjectAccess::ForFixedArrayLength(), length); - - HValue* backing_store = Add<HLoadNamedField>( - buffer, nullptr, HObjectAccess::ForJSArrayBufferBackingStore()); - - HValue* typed_array_start; - if (is_zero_byte_offset) { - typed_array_start = backing_store; - } else { - HInstruction* external_pointer = - AddUncasted<HAdd>(backing_store, byte_offset); - // Arguments are checked prior to call to TypedArrayInitialize, - // including byte_offset. - external_pointer->ClearFlag(HValue::kCanOverflow); - typed_array_start = external_pointer; - } - - Add<HStoreNamedField>(elements, - HObjectAccess::ForFixedTypedArrayBaseBasePointer(), - graph()->GetConstant0()); - Add<HStoreNamedField>(elements, - HObjectAccess::ForFixedTypedArrayBaseExternalPointer(), - typed_array_start); - - return elements; -} - - -HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray( - ExternalArrayType array_type, size_t element_size, - ElementsKind fixed_elements_kind, HValue* byte_length, HValue* length, - bool initialize) { - STATIC_ASSERT( - (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0); - HValue* total_size; - - // if fixed array's elements are not aligned to object's alignment, - // we need to align the whole array to object alignment. - if (element_size % kObjectAlignment != 0) { - total_size = BuildObjectSizeAlignment( - byte_length, FixedTypedArrayBase::kHeaderSize); - } else { - total_size = AddUncasted<HAdd>(byte_length, - Add<HConstant>(FixedTypedArrayBase::kHeaderSize)); - total_size->ClearFlag(HValue::kCanOverflow); - } - - // The HForceRepresentation is to prevent possible deopt on int-smi - // conversion after allocation but before the new object fields are set. - length = AddUncasted<HForceRepresentation>(length, Representation::Smi()); - Handle<Map> fixed_typed_array_map( - isolate()->heap()->MapForFixedTypedArray(array_type)); - HAllocate* elements = Add<HAllocate>( - total_size, HType::HeapObject(), NOT_TENURED, - fixed_typed_array_map->instance_type(), graph()->GetConstant0()); - -#ifndef V8_HOST_ARCH_64_BIT - if (array_type == kExternalFloat64Array) { - elements->MakeDoubleAligned(); - } -#endif - - AddStoreMapConstant(elements, fixed_typed_array_map); - - Add<HStoreNamedField>(elements, - HObjectAccess::ForFixedArrayLength(), - length); - Add<HStoreNamedField>( - elements, HObjectAccess::ForFixedTypedArrayBaseBasePointer(), elements); - - Add<HStoreNamedField>( - elements, HObjectAccess::ForFixedTypedArrayBaseExternalPointer(), - Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset())); - - HValue* filler = Add<HConstant>(static_cast<int32_t>(0)); - - if (initialize) { - LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); - - HValue* backing_store = AddUncasted<HAdd>( - Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()), - elements, AddOfExternalAndTagged); - - HValue* key = builder.BeginBody( - Add<HConstant>(static_cast<int32_t>(0)), - length, Token::LT); - Add<HStoreKeyed>(backing_store, key, filler, elements, fixed_elements_kind); - - builder.EndBody(); - } - return elements; -} - - -void HOptimizedGraphBuilder::GenerateTypedArrayInitialize( - CallRuntime* expr) { - ZoneList<Expression*>* arguments = expr->arguments(); - - static const int kObjectArg = 0; - static const int kArrayIdArg = 1; - static const int kBufferArg = 2; - static const int kByteOffsetArg = 3; - static const int kByteLengthArg = 4; - static const int kInitializeArg = 5; - static const int kArgsLength = 6; - DCHECK(arguments->length() == kArgsLength); - - - CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg))); - HValue* obj = Pop(); - - if (!arguments->at(kArrayIdArg)->IsLiteral()) { - // This should never happen in real use, but can happen when fuzzing. - // Just bail out. - Bailout(kNeedSmiLiteral); - return; - } - Handle<Object> value = - static_cast<Literal*>(arguments->at(kArrayIdArg))->value(); - if (!value->IsSmi()) { - // This should never happen in real use, but can happen when fuzzing. - // Just bail out. - Bailout(kNeedSmiLiteral); - return; - } - int array_id = Smi::cast(*value)->value(); - - HValue* buffer; - if (!arguments->at(kBufferArg)->IsNullLiteral()) { - CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg))); - buffer = Pop(); - } else { - buffer = NULL; - } - - HValue* byte_offset; - bool is_zero_byte_offset; - - if (arguments->at(kByteOffsetArg)->IsLiteral() && - Smi::kZero == - *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) { - byte_offset = Add<HConstant>(static_cast<int32_t>(0)); - is_zero_byte_offset = true; - } else { - CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg))); - byte_offset = Pop(); - is_zero_byte_offset = false; - DCHECK(buffer != NULL); - } - - CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg))); - HValue* byte_length = Pop(); - - CHECK(arguments->at(kInitializeArg)->IsLiteral()); - bool initialize = static_cast<Literal*>(arguments->at(kInitializeArg)) - ->value() - ->BooleanValue(); - - NoObservableSideEffectsScope scope(this); - IfBuilder byte_offset_smi(this); - - if (!is_zero_byte_offset) { - byte_offset_smi.If<HIsSmiAndBranch>(byte_offset); - byte_offset_smi.Then(); - } - - ExternalArrayType array_type = - kExternalInt8Array; // Bogus initialization. - size_t element_size = 1; // Bogus initialization. - ElementsKind fixed_elements_kind = // Bogus initialization. - INT8_ELEMENTS; - Runtime::ArrayIdToTypeAndSize(array_id, - &array_type, - &fixed_elements_kind, - &element_size); - - - { // byte_offset is Smi. - HValue* allocated_buffer = buffer; - if (buffer == NULL) { - allocated_buffer = BuildAllocateEmptyArrayBuffer(byte_length); - } - BuildArrayBufferViewInitialization<JSTypedArray>(obj, allocated_buffer, - byte_offset, byte_length); - - - HInstruction* length = AddUncasted<HDiv>(byte_length, - Add<HConstant>(static_cast<int32_t>(element_size))); - // Callers (in typedarray.js) ensure that length <= %_MaxSmi(). - length = AddUncasted<HForceRepresentation>(length, Representation::Smi()); - - Add<HStoreNamedField>(obj, - HObjectAccess::ForJSTypedArrayLength(), - length); - - HValue* elements; - if (buffer != NULL) { - elements = BuildAllocateExternalElements( - array_type, is_zero_byte_offset, buffer, byte_offset, length); - } else { - DCHECK(is_zero_byte_offset); - elements = BuildAllocateFixedTypedArray(array_type, element_size, - fixed_elements_kind, byte_length, - length, initialize); - } - Add<HStoreNamedField>( - obj, HObjectAccess::ForElementsPointer(), elements); - } - - if (!is_zero_byte_offset) { - byte_offset_smi.Else(); - { // byte_offset is not Smi. - Push(obj); - CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg))); - Push(buffer); - Push(byte_offset); - Push(byte_length); - CHECK_ALIVE(VisitForValue(arguments->at(kInitializeArg))); - PushArgumentsFromEnvironment(kArgsLength); - Add<HCallRuntime>(expr->function(), kArgsLength); - } - } - byte_offset_smi.End(); -} - - void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) { DCHECK(expr->arguments()->length() == 0); HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue)); @@ -11021,7 +10703,6 @@ HValue* HGraphBuilder::BuildBinaryOperation( return instr; } - // Check for the form (%_ClassOf(foo) === 'BarClass'). static bool IsClassOfTest(CompareOperation* expr) { if (expr->op() != Token::EQ_STRICT) return false; @@ -11213,9 +10894,10 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) { // with the full codegen: We don't push both left and right values onto // the expression stack when one side is a special-case literal. Expression* sub_expr = NULL; - Handle<String> check; - if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) { - return HandleLiteralCompareTypeof(expr, sub_expr, check); + Literal* literal; + if (expr->IsLiteralCompareTypeof(&sub_expr, &literal)) { + return HandleLiteralCompareTypeof(expr, sub_expr, + Handle<String>::cast(literal->value())); } if (expr->IsLiteralCompareUndefined(&sub_expr)) { return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue); @@ -11559,6 +11241,11 @@ void HOptimizedGraphBuilder::VisitGetIterator(GetIterator* expr) { UNREACHABLE(); } +void HOptimizedGraphBuilder::VisitImportCallExpression( + ImportCallExpression* expr) { + UNREACHABLE(); +} + HValue* HOptimizedGraphBuilder::AddThisFunction() { return AddInstruction(BuildThisFunction()); } @@ -11621,6 +11308,14 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral( // properties to a safe value. BuildInitializeInobjectProperties(object, initial_map); + // Copy in-object properties. + if (initial_map->NumberOfFields() != 0 || + initial_map->unused_property_fields() > 0) { + BuildEmitInObjectProperties(boilerplate_object, object, site_context, + pretenure_flag); + } + + // Copy elements. Handle<FixedArrayBase> elements(boilerplate_object->elements()); int elements_size = (elements->length() > 0 && elements->map() != isolate()->heap()->fixed_cow_array_map()) ? @@ -11658,12 +11353,6 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral( object_elements_cow); } - // Copy in-object properties. - if (initial_map->NumberOfFields() != 0 || - initial_map->unused_property_fields() > 0) { - BuildEmitInObjectProperties(boilerplate_object, object, site_context, - pretenure_flag); - } return object; } |