// Copyright 2017 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #if !V8_ENABLE_WEBASSEMBLY #error This header should only be included if WebAssembly is enabled. #endif // !V8_ENABLE_WEBASSEMBLY #ifndef V8_WASM_WASM_OBJECTS_INL_H_ #define V8_WASM_WASM_OBJECTS_INL_H_ #include #include "src/base/memory.h" #include "src/common/ptr-compr.h" #include "src/heap/heap-write-barrier-inl.h" #include "src/objects/contexts-inl.h" #include "src/objects/foreign.h" #include "src/objects/heap-number.h" #include "src/objects/js-array-buffer-inl.h" #include "src/objects/js-function-inl.h" #include "src/objects/js-objects-inl.h" #include "src/objects/managed.h" #include "src/objects/oddball-inl.h" #include "src/objects/script-inl.h" #include "src/roots/roots.h" #include "src/wasm/wasm-code-manager.h" #include "src/wasm/wasm-module.h" #include "src/wasm/wasm-objects.h" // Has to be the last include (doesn't have include guards) #include "src/objects/object-macros.h" namespace v8 { namespace internal { #include "torque-generated/src/wasm/wasm-objects-tq-inl.inc" TQ_OBJECT_CONSTRUCTORS_IMPL(WasmTagObject) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmExceptionTag) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmCapiFunctionData) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmExportedFunctionData) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmGlobalObject) OBJECT_CONSTRUCTORS_IMPL(WasmInstanceObject, JSObject) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmObject) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmMemoryObject) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmModuleObject) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmTableObject) TQ_OBJECT_CONSTRUCTORS_IMPL(AsmWasmData) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmFunctionData) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmApiFunctionRef) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmInternalFunction) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmTypeInfo) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmStruct) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmArray) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmContinuationObject) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmSuspenderObject) TQ_OBJECT_CONSTRUCTORS_IMPL(WasmResumeData) CAST_ACCESSOR(WasmInstanceObject) #define OPTIONAL_ACCESSORS(holder, name, type, offset) \ DEF_GETTER(holder, has_##name, bool) { \ Object value = TaggedField::load(cage_base, *this); \ return !value.IsUndefined(GetReadOnlyRoots(cage_base)); \ } \ ACCESSORS_CHECKED2(holder, name, type, offset, \ !value.IsUndefined(GetReadOnlyRoots(cage_base)), true) #define PRIMITIVE_ACCESSORS(holder, name, type, offset) \ type holder::name() const { \ return ReadMaybeUnalignedValue(FIELD_ADDR(*this, offset)); \ } \ void holder::set_##name(type value) { \ WriteMaybeUnalignedValue(FIELD_ADDR(*this, offset), value); \ } #define SANDBOXED_POINTER_ACCESSORS(holder, name, type, offset) \ type holder::name() const { \ PtrComprCageBase sandbox_base = GetPtrComprCageBase(*this); \ Address value = ReadSandboxedPointerField(offset, sandbox_base); \ return reinterpret_cast(value); \ } \ void holder::set_##name(type value) { \ PtrComprCageBase sandbox_base = GetPtrComprCageBase(*this); \ Address addr = reinterpret_cast
(value); \ WriteSandboxedPointerField(offset, sandbox_base, addr); \ } // WasmModuleObject wasm::NativeModule* WasmModuleObject::native_module() const { return managed_native_module().raw(); } const std::shared_ptr& WasmModuleObject::shared_native_module() const { return managed_native_module().get(); } const wasm::WasmModule* WasmModuleObject::module() const { // TODO(clemensb): Remove this helper (inline in callers). return native_module()->module(); } bool WasmModuleObject::is_asm_js() { bool asm_js = is_asmjs_module(module()); DCHECK_EQ(asm_js, script().IsUserJavaScript()); return asm_js; } // WasmMemoryObject OPTIONAL_ACCESSORS(WasmMemoryObject, instances, WeakArrayList, kInstancesOffset) // WasmGlobalObject ACCESSORS(WasmGlobalObject, untagged_buffer, JSArrayBuffer, kUntaggedBufferOffset) ACCESSORS(WasmGlobalObject, tagged_buffer, FixedArray, kTaggedBufferOffset) wasm::ValueType WasmGlobalObject::type() const { return wasm::ValueType::FromRawBitField(static_cast(raw_type())); } void WasmGlobalObject::set_type(wasm::ValueType value) { set_raw_type(static_cast(value.raw_bit_field())); } int WasmGlobalObject::type_size() const { return type().value_kind_size(); } Address WasmGlobalObject::address() const { DCHECK_NE(type(), wasm::kWasmAnyRef); DCHECK_LE(offset() + type_size(), untagged_buffer().byte_length()); return Address(untagged_buffer().backing_store()) + offset(); } int32_t WasmGlobalObject::GetI32() { return base::ReadUnalignedValue(address()); } int64_t WasmGlobalObject::GetI64() { return base::ReadUnalignedValue(address()); } float WasmGlobalObject::GetF32() { return base::ReadUnalignedValue(address()); } double WasmGlobalObject::GetF64() { return base::ReadUnalignedValue(address()); } Handle WasmGlobalObject::GetRef() { // We use this getter for externref, funcref, and stringref. DCHECK(type().is_reference()); return handle(tagged_buffer().get(offset()), GetIsolate()); } void WasmGlobalObject::SetI32(int32_t value) { base::WriteUnalignedValue(address(), value); } void WasmGlobalObject::SetI64(int64_t value) { base::WriteUnalignedValue(address(), value); } void WasmGlobalObject::SetF32(float value) { base::WriteUnalignedValue(address(), value); } void WasmGlobalObject::SetF64(double value) { base::WriteUnalignedValue(address(), value); } void WasmGlobalObject::SetRef(Handle value) { DCHECK(type().is_object_reference()); tagged_buffer().set(offset(), *value); } // WasmInstanceObject SANDBOXED_POINTER_ACCESSORS(WasmInstanceObject, memory_start, byte*, kMemoryStartOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, memory_size, size_t, kMemorySizeOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, isolate_root, Address, kIsolateRootOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, stack_limit_address, Address, kStackLimitAddressOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, real_stack_limit_address, Address, kRealStackLimitAddressOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, new_allocation_limit_address, Address*, kNewAllocationLimitAddressOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, new_allocation_top_address, Address*, kNewAllocationTopAddressOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, old_allocation_limit_address, Address*, kOldAllocationLimitAddressOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, old_allocation_top_address, Address*, kOldAllocationTopAddressOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, isorecursive_canonical_types, const uint32_t*, kIsorecursiveCanonicalTypesOffset) SANDBOXED_POINTER_ACCESSORS(WasmInstanceObject, globals_start, byte*, kGlobalsStartOffset) ACCESSORS(WasmInstanceObject, imported_mutable_globals, ByteArray, kImportedMutableGlobalsOffset) ACCESSORS(WasmInstanceObject, imported_function_targets, FixedAddressArray, kImportedFunctionTargetsOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table_size, uint32_t, kIndirectFunctionTableSizeOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table_sig_ids, uint32_t*, kIndirectFunctionTableSigIdsOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table_targets, Address*, kIndirectFunctionTableTargetsOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, jump_table_start, Address, kJumpTableStartOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, hook_on_function_call_address, Address, kHookOnFunctionCallAddressOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, tiering_budget_array, uint32_t*, kTieringBudgetArrayOffset) ACCESSORS(WasmInstanceObject, data_segment_starts, FixedAddressArray, kDataSegmentStartsOffset) ACCESSORS(WasmInstanceObject, data_segment_sizes, FixedUInt32Array, kDataSegmentSizesOffset) ACCESSORS(WasmInstanceObject, dropped_elem_segments, FixedUInt8Array, kDroppedElemSegmentsOffset) PRIMITIVE_ACCESSORS(WasmInstanceObject, break_on_entry, uint8_t, kBreakOnEntryOffset) ACCESSORS(WasmInstanceObject, module_object, WasmModuleObject, kModuleObjectOffset) ACCESSORS(WasmInstanceObject, exports_object, JSObject, kExportsObjectOffset) ACCESSORS(WasmInstanceObject, native_context, Context, kNativeContextOffset) OPTIONAL_ACCESSORS(WasmInstanceObject, memory_object, WasmMemoryObject, kMemoryObjectOffset) OPTIONAL_ACCESSORS(WasmInstanceObject, untagged_globals_buffer, JSArrayBuffer, kUntaggedGlobalsBufferOffset) OPTIONAL_ACCESSORS(WasmInstanceObject, tagged_globals_buffer, FixedArray, kTaggedGlobalsBufferOffset) OPTIONAL_ACCESSORS(WasmInstanceObject, imported_mutable_globals_buffers, FixedArray, kImportedMutableGlobalsBuffersOffset) OPTIONAL_ACCESSORS(WasmInstanceObject, tables, FixedArray, kTablesOffset) OPTIONAL_ACCESSORS(WasmInstanceObject, indirect_function_tables, FixedArray, kIndirectFunctionTablesOffset) ACCESSORS(WasmInstanceObject, imported_function_refs, FixedArray, kImportedFunctionRefsOffset) OPTIONAL_ACCESSORS(WasmInstanceObject, indirect_function_table_refs, FixedArray, kIndirectFunctionTableRefsOffset) OPTIONAL_ACCESSORS(WasmInstanceObject, tags_table, FixedArray, kTagsTableOffset) OPTIONAL_ACCESSORS(WasmInstanceObject, wasm_internal_functions, FixedArray, kWasmInternalFunctionsOffset) ACCESSORS(WasmInstanceObject, managed_object_maps, FixedArray, kManagedObjectMapsOffset) ACCESSORS(WasmInstanceObject, feedback_vectors, FixedArray, kFeedbackVectorsOffset) void WasmInstanceObject::clear_padding() { if (FIELD_SIZE(kOptionalPaddingOffset) != 0) { DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset)); memset(reinterpret_cast(address() + kOptionalPaddingOffset), 0, FIELD_SIZE(kOptionalPaddingOffset)); } } ImportedFunctionEntry::ImportedFunctionEntry( Handle instance, int index) : instance_(instance), index_(index) { DCHECK_GE(index, 0); DCHECK_LT(index, instance->module()->num_imported_functions); } // WasmExceptionPackage OBJECT_CONSTRUCTORS_IMPL(WasmExceptionPackage, JSObject) CAST_ACCESSOR(WasmExceptionPackage) // WasmExportedFunction WasmExportedFunction::WasmExportedFunction(Address ptr) : JSFunction(ptr) { SLOW_DCHECK(IsWasmExportedFunction(*this)); } CAST_ACCESSOR(WasmExportedFunction) // WasmInternalFunction EXTERNAL_POINTER_ACCESSORS(WasmInternalFunction, call_target, Address, kCallTargetOffset, kWasmInternalFunctionCallTargetTag) // WasmFunctionData ACCESSORS(WasmFunctionData, internal, WasmInternalFunction, kInternalOffset) EXTERNAL_POINTER_ACCESSORS(WasmExportedFunctionData, sig, wasm::FunctionSig*, kSigOffset, kWasmExportedFunctionDataSignatureTag) // WasmJSFunction WasmJSFunction::WasmJSFunction(Address ptr) : JSFunction(ptr) { SLOW_DCHECK(IsWasmJSFunction(*this)); } CAST_ACCESSOR(WasmJSFunction) // WasmJSFunctionData TQ_OBJECT_CONSTRUCTORS_IMPL(WasmJSFunctionData) // WasmCapiFunction WasmCapiFunction::WasmCapiFunction(Address ptr) : JSFunction(ptr) { SLOW_DCHECK(IsWasmCapiFunction(*this)); } CAST_ACCESSOR(WasmCapiFunction) // WasmExternalFunction WasmExternalFunction::WasmExternalFunction(Address ptr) : JSFunction(ptr) { SLOW_DCHECK(IsWasmExternalFunction(*this)); } CAST_ACCESSOR(WasmExternalFunction) // WasmIndirectFunctionTable TQ_OBJECT_CONSTRUCTORS_IMPL(WasmIndirectFunctionTable) PRIMITIVE_ACCESSORS(WasmIndirectFunctionTable, sig_ids, uint32_t*, kSigIdsOffset) PRIMITIVE_ACCESSORS(WasmIndirectFunctionTable, targets, Address*, kTargetsOffset) OPTIONAL_ACCESSORS(WasmIndirectFunctionTable, managed_native_allocations, Foreign, kManagedNativeAllocationsOffset) // WasmTypeInfo EXTERNAL_POINTER_ACCESSORS(WasmTypeInfo, native_type, Address, kNativeTypeOffset, kWasmTypeInfoNativeTypeTag) #undef OPTIONAL_ACCESSORS #undef READ_PRIMITIVE_FIELD #undef WRITE_PRIMITIVE_FIELD #undef PRIMITIVE_ACCESSORS #undef SANDBOXED_POINTER_ACCESSORS wasm::ValueType WasmTableObject::type() { return wasm::ValueType::FromRawBitField(raw_type()); } bool WasmMemoryObject::has_maximum_pages() { return maximum_pages() >= 0; } // static Handle WasmObject::ReadValueAt(Isolate* isolate, Handle obj, wasm::ValueType type, uint32_t offset) { Address field_address = obj->GetFieldAddress(offset); switch (type.kind()) { case wasm::kI8: { int8_t value = base::Memory(field_address); return handle(Smi::FromInt(value), isolate); } case wasm::kI16: { int16_t value = base::Memory(field_address); return handle(Smi::FromInt(value), isolate); } case wasm::kI32: { int32_t value = base::Memory(field_address); return isolate->factory()->NewNumberFromInt(value); } case wasm::kI64: { int64_t value = base::ReadUnalignedValue(field_address); return BigInt::FromInt64(isolate, value); } case wasm::kF32: { float value = base::Memory(field_address); return isolate->factory()->NewNumber(value); } case wasm::kF64: { double value = base::ReadUnalignedValue(field_address); return isolate->factory()->NewNumber(value); } case wasm::kS128: // TODO(v8:11804): implement UNREACHABLE(); case wasm::kRef: case wasm::kRefNull: { ObjectSlot slot(field_address); return handle(slot.load(isolate), isolate); } case wasm::kRtt: // Rtt values are not supposed to be made available to JavaScript side. UNREACHABLE(); case wasm::kVoid: case wasm::kBottom: UNREACHABLE(); } } // static MaybeHandle WasmObject::ToWasmValue(Isolate* isolate, wasm::ValueType type, Handle value) { switch (type.kind()) { case wasm::kI8: case wasm::kI16: case wasm::kI32: case wasm::kF32: case wasm::kF64: return Object::ToNumber(isolate, value); case wasm::kI64: return BigInt::FromObject(isolate, value); case wasm::kRef: case wasm::kRefNull: { // TODO(v8:11804): implement ref type check UNREACHABLE(); } case wasm::kS128: // TODO(v8:11804): implement UNREACHABLE(); case wasm::kRtt: // Rtt values are not supposed to be made available to JavaScript side. UNREACHABLE(); case wasm::kVoid: case wasm::kBottom: UNREACHABLE(); } } // Conversions from Numeric objects. // static template ElementType WasmObject::FromNumber(Object value) { // The value must already be prepared for storing to numeric fields. DCHECK(value.IsNumber()); if (value.IsSmi()) { return static_cast(Smi::ToInt(value)); } else if (value.IsHeapNumber()) { double double_value = HeapNumber::cast(value).value(); if (std::is_same::value || std::is_same::value) { return static_cast(double_value); } else { CHECK(std::is_integral::value); return static_cast(DoubleToInt32(double_value)); } } UNREACHABLE(); } // static void WasmObject::WriteValueAt(Isolate* isolate, Handle obj, wasm::ValueType type, uint32_t offset, Handle value) { Address field_address = obj->GetFieldAddress(offset); switch (type.kind()) { case wasm::kI8: { auto scalar_value = FromNumber(*value); base::Memory(field_address) = scalar_value; break; } case wasm::kI16: { auto scalar_value = FromNumber(*value); base::Memory(field_address) = scalar_value; break; } case wasm::kI32: { auto scalar_value = FromNumber(*value); base::Memory(field_address) = scalar_value; break; } case wasm::kI64: { int64_t scalar_value = BigInt::cast(*value).AsInt64(); base::WriteUnalignedValue(field_address, scalar_value); break; } case wasm::kF32: { auto scalar_value = FromNumber(*value); base::Memory(field_address) = scalar_value; break; } case wasm::kF64: { auto scalar_value = FromNumber(*value); base::WriteUnalignedValue(field_address, scalar_value); break; } case wasm::kRef: case wasm::kRefNull: // TODO(v8:11804): implement UNREACHABLE(); case wasm::kS128: // TODO(v8:11804): implement UNREACHABLE(); case wasm::kRtt: // Rtt values are not supposed to be made available to JavaScript side. UNREACHABLE(); case wasm::kVoid: case wasm::kBottom: UNREACHABLE(); } } wasm::StructType* WasmStruct::type(Map map) { WasmTypeInfo type_info = map.wasm_type_info(); return reinterpret_cast(type_info.native_type()); } wasm::StructType* WasmStruct::GcSafeType(Map map) { DCHECK_EQ(WASM_STRUCT_TYPE, map.instance_type()); HeapObject raw = HeapObject::cast(map.constructor_or_back_pointer()); // The {WasmTypeInfo} might be in the middle of being moved, which is why we // can't read its map for a checked cast. But we can rely on its native type // pointer being intact in the old location. WasmTypeInfo type_info = WasmTypeInfo::unchecked_cast(raw); return reinterpret_cast(type_info.native_type()); } int WasmStruct::Size(const wasm::StructType* type) { // Object size must fit into a Smi (because of filler objects), and its // computation must not overflow. static_assert(Smi::kMaxValue <= kMaxInt); DCHECK_LE(type->total_fields_size(), Smi::kMaxValue - kHeaderSize); return std::max(kHeaderSize + static_cast(type->total_fields_size()), Heap::kMinObjectSizeInTaggedWords * kTaggedSize); } // static void WasmStruct::EncodeInstanceSizeInMap(int instance_size, Map map) { // WasmStructs can be bigger than the {map.instance_size_in_words} field // can describe; yet we have to store the instance size somewhere on the // map so that the GC can read it without relying on any other objects // still being around. To solve this problem, we store the instance size // in two other fields that are otherwise unused for WasmStructs. static_assert(0xFFFF - kHeaderSize > wasm::kMaxValueTypeSize * wasm::kV8MaxWasmStructFields); map.SetWasmByte1(instance_size & 0xFF); map.SetWasmByte2(instance_size >> 8); } // static int WasmStruct::DecodeInstanceSizeFromMap(Map map) { return (map.WasmByte2() << 8) | map.WasmByte1(); } int WasmStruct::GcSafeSize(Map map) { return DecodeInstanceSizeFromMap(map); } wasm::StructType* WasmStruct::type() const { return type(map()); } Address WasmStruct::RawFieldAddress(int raw_offset) { int offset = WasmStruct::kHeaderSize + raw_offset; return FIELD_ADDR(*this, offset); } ObjectSlot WasmStruct::RawField(int raw_offset) { return ObjectSlot(RawFieldAddress(raw_offset)); } // static Handle WasmStruct::GetField(Isolate* isolate, Handle obj, uint32_t field_index) { wasm::StructType* type = obj->type(); CHECK_LT(field_index, type->field_count()); wasm::ValueType field_type = type->field(field_index); int offset = WasmStruct::kHeaderSize + type->field_offset(field_index); return ReadValueAt(isolate, obj, field_type, offset); } // static void WasmStruct::SetField(Isolate* isolate, Handle obj, uint32_t field_index, Handle value) { wasm::StructType* type = obj->type(); CHECK_LT(field_index, type->field_count()); wasm::ValueType field_type = type->field(field_index); int offset = WasmStruct::kHeaderSize + type->field_offset(field_index); WriteValueAt(isolate, obj, field_type, offset, value); } wasm::ArrayType* WasmArray::type(Map map) { DCHECK_EQ(WASM_ARRAY_TYPE, map.instance_type()); WasmTypeInfo type_info = map.wasm_type_info(); return reinterpret_cast(type_info.native_type()); } wasm::ArrayType* WasmArray::GcSafeType(Map map) { DCHECK_EQ(WASM_ARRAY_TYPE, map.instance_type()); HeapObject raw = HeapObject::cast(map.constructor_or_back_pointer()); // The {WasmTypeInfo} might be in the middle of being moved, which is why we // can't read its map for a checked cast. But we can rely on its native type // pointer being intact in the old location. WasmTypeInfo type_info = WasmTypeInfo::unchecked_cast(raw); return reinterpret_cast(type_info.native_type()); } wasm::ArrayType* WasmArray::type() const { return type(map()); } int WasmArray::SizeFor(Map map, int length) { int element_size = DecodeElementSizeFromMap(map); return kHeaderSize + RoundUp(element_size * length, kTaggedSize); } uint32_t WasmArray::element_offset(uint32_t index) { DCHECK_LE(index, length()); return WasmArray::kHeaderSize + index * type()->element_type().value_kind_size(); } Address WasmArray::ElementAddress(uint32_t index) { return ptr() + element_offset(index) - kHeapObjectTag; } ObjectSlot WasmArray::ElementSlot(uint32_t index) { DCHECK_LE(index, length()); DCHECK(type()->element_type().is_reference()); return RawField(kHeaderSize + kTaggedSize * index); } // static Handle WasmArray::GetElement(Isolate* isolate, Handle array, uint32_t index) { if (index >= array->length()) { return isolate->factory()->undefined_value(); } wasm::ValueType element_type = array->type()->element_type(); return ReadValueAt(isolate, array, element_type, array->element_offset(index)); } // static void WasmArray::EncodeElementSizeInMap(int element_size, Map map) { map.SetWasmByte1(element_size); } // static int WasmArray::DecodeElementSizeFromMap(Map map) { return map.WasmByte1(); } EXTERNAL_POINTER_ACCESSORS(WasmContinuationObject, jmpbuf, Address, kJmpbufOffset, kWasmContinuationJmpbufTag) #include "src/objects/object-macros-undef.h" } // namespace internal } // namespace v8 #endif // V8_WASM_WASM_OBJECTS_INL_H_