summaryrefslogtreecommitdiff
path: root/chromium/v8/src/snapshot
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2018-08-24 12:15:48 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2018-08-28 13:30:04 +0000
commitb014812705fc80bff0a5c120dfcef88f349816dc (patch)
tree25a2e2d9fa285f1add86aa333389a839f81a39ae /chromium/v8/src/snapshot
parent9f4560b1027ae06fdb497023cdcaf91b8511fa74 (diff)
downloadqtwebengine-chromium-b014812705fc80bff0a5c120dfcef88f349816dc.tar.gz
BASELINE: Update Chromium to 68.0.3440.125
Change-Id: I23f19369e01f688e496f5bf179abb521ad73874f Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/v8/src/snapshot')
-rw-r--r--chromium/v8/src/snapshot/builtin-deserializer-allocator.cc12
-rw-r--r--chromium/v8/src/snapshot/builtin-deserializer-allocator.h2
-rw-r--r--chromium/v8/src/snapshot/code-serializer.cc16
-rw-r--r--chromium/v8/src/snapshot/code-serializer.h5
-rw-r--r--chromium/v8/src/snapshot/default-deserializer-allocator.cc5
-rw-r--r--chromium/v8/src/snapshot/deserializer.cc73
-rw-r--r--chromium/v8/src/snapshot/partial-deserializer.cc2
-rw-r--r--chromium/v8/src/snapshot/serializer-common.cc16
-rw-r--r--chromium/v8/src/snapshot/serializer-common.h7
-rw-r--r--chromium/v8/src/snapshot/serializer.cc180
-rw-r--r--chromium/v8/src/snapshot/serializer.h19
-rw-r--r--chromium/v8/src/snapshot/snapshot-common.cc38
-rw-r--r--chromium/v8/src/snapshot/snapshot.h29
-rw-r--r--chromium/v8/src/snapshot/startup-serializer.cc9
14 files changed, 265 insertions, 148 deletions
diff --git a/chromium/v8/src/snapshot/builtin-deserializer-allocator.cc b/chromium/v8/src/snapshot/builtin-deserializer-allocator.cc
index 59cab6d40a5..4e3d546fa0a 100644
--- a/chromium/v8/src/snapshot/builtin-deserializer-allocator.cc
+++ b/chromium/v8/src/snapshot/builtin-deserializer-allocator.cc
@@ -44,16 +44,16 @@ Address BuiltinDeserializerAllocator::Allocate(AllocationSpace space,
DCHECK(Internals::HasHeapObjectTag(obj));
return HeapObject::cast(obj)->address();
} else if (BSU::IsHandlerIndex(code_object_id)) {
- if (handler_allocation_ != nullptr) {
+ if (handler_allocation_ != kNullAddress) {
// Lazy deserialization.
DCHECK_NULL(handler_allocations_);
return handler_allocation_;
} else {
// Eager deserialization.
- DCHECK_NULL(handler_allocation_);
+ DCHECK_EQ(kNullAddress, handler_allocation_);
DCHECK_NOT_NULL(handler_allocations_);
int index = HandlerAllocationIndex(code_object_id);
- DCHECK_NOT_NULL(handler_allocations_->at(index));
+ DCHECK_NE(kNullAddress, handler_allocations_->at(index));
return handler_allocations_->at(index);
}
}
@@ -74,7 +74,7 @@ BuiltinDeserializerAllocator::CreateReservationsForEagerBuiltinsAndHandlers() {
uint32_t builtin_size =
deserializer()->ExtractCodeObjectSize(Builtins::kDeserializeLazy);
DCHECK_LE(builtin_size, MemoryAllocator::PageAreaSize(CODE_SPACE));
- result.push_back({builtin_size, nullptr, nullptr});
+ result.push_back({builtin_size, kNullAddress, kNullAddress});
}
for (int i = 0; i < BSU::kNumberOfBuiltins; i++) {
@@ -88,7 +88,7 @@ BuiltinDeserializerAllocator::CreateReservationsForEagerBuiltinsAndHandlers() {
uint32_t builtin_size = deserializer()->ExtractCodeObjectSize(i);
DCHECK_LE(builtin_size, MemoryAllocator::PageAreaSize(CODE_SPACE));
- result.push_back({builtin_size, nullptr, nullptr});
+ result.push_back({builtin_size, kNullAddress, kNullAddress});
}
// Reservations for bytecode handlers.
@@ -110,7 +110,7 @@ BuiltinDeserializerAllocator::CreateReservationsForEagerBuiltinsAndHandlers() {
const int index = BSU::BytecodeToIndex(bytecode, operand_scale);
uint32_t handler_size = deserializer()->ExtractCodeObjectSize(index);
DCHECK_LE(handler_size, MemoryAllocator::PageAreaSize(CODE_SPACE));
- result.push_back({handler_size, nullptr, nullptr});
+ result.push_back({handler_size, kNullAddress, kNullAddress});
});
return result;
diff --git a/chromium/v8/src/snapshot/builtin-deserializer-allocator.h b/chromium/v8/src/snapshot/builtin-deserializer-allocator.h
index 207f02811af..65c5872d7a1 100644
--- a/chromium/v8/src/snapshot/builtin-deserializer-allocator.h
+++ b/chromium/v8/src/snapshot/builtin-deserializer-allocator.h
@@ -135,7 +135,7 @@ class BuiltinDeserializerAllocator final {
// Stores the allocated space for a single handler during lazy
// deserialization.
- Address handler_allocation_ = nullptr;
+ Address handler_allocation_ = kNullAddress;
bool next_reference_is_weak_ = false;
diff --git a/chromium/v8/src/snapshot/code-serializer.cc b/chromium/v8/src/snapshot/code-serializer.cc
index 2697e9dce42..8dc98d836ba 100644
--- a/chromium/v8/src/snapshot/code-serializer.cc
+++ b/chromium/v8/src/snapshot/code-serializer.cc
@@ -8,6 +8,7 @@
#include "src/code-stubs.h"
#include "src/counters.h"
+#include "src/debug/debug.h"
#include "src/log.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
@@ -32,7 +33,7 @@ ScriptData::ScriptData(const byte* data, int length)
// static
ScriptCompiler::CachedData* CodeSerializer::Serialize(
- Handle<SharedFunctionInfo> info, Handle<String> source) {
+ Handle<SharedFunctionInfo> info) {
Isolate* isolate = info->GetIsolate();
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
HistogramTimerScope histogram_timer(isolate->counters()->compile_serialize());
@@ -45,8 +46,7 @@ ScriptCompiler::CachedData* CodeSerializer::Serialize(
Handle<Script> script(Script::cast(info->script()), isolate);
if (FLAG_trace_serializer) {
PrintF("[Serializing from");
- Object* script = info->script();
- Script::cast(script)->name()->ShortPrint();
+ script->name()->ShortPrint();
PrintF("]\n");
}
// TODO(7110): Enable serialization of Asm modules once the AsmWasmData is
@@ -54,11 +54,14 @@ ScriptCompiler::CachedData* CodeSerializer::Serialize(
if (script->ContainsAsmModule()) return nullptr;
if (isolate->debug()->is_loaded()) return nullptr;
+ isolate->heap()->read_only_space()->ClearStringPaddingIfNeeded();
+
// Serialize code object.
+ Handle<String> source(String::cast(script->source()), isolate);
CodeSerializer cs(isolate, SerializedCodeData::SourceHash(source));
DisallowHeapAllocation no_gc;
cs.reference_map()->AddAttachedReference(*source);
- ScriptData* script_data = cs.Serialize(info);
+ ScriptData* script_data = cs.SerializeSharedFunctionInfo(info);
if (FLAG_profile_deserialization) {
double ms = timer.Elapsed().InMillisecondsF();
@@ -75,11 +78,12 @@ ScriptCompiler::CachedData* CodeSerializer::Serialize(
return result;
}
-ScriptData* CodeSerializer::Serialize(Handle<HeapObject> obj) {
+ScriptData* CodeSerializer::SerializeSharedFunctionInfo(
+ Handle<SharedFunctionInfo> info) {
DisallowHeapAllocation no_gc;
VisitRootPointer(Root::kHandleScope, nullptr,
- Handle<Object>::cast(obj).location());
+ Handle<Object>::cast(info).location());
SerializeDeferredObjects();
Pad();
diff --git a/chromium/v8/src/snapshot/code-serializer.h b/chromium/v8/src/snapshot/code-serializer.h
index 8e97f47f2f7..f6b51bf9b1a 100644
--- a/chromium/v8/src/snapshot/code-serializer.h
+++ b/chromium/v8/src/snapshot/code-serializer.h
@@ -45,10 +45,9 @@ class ScriptData {
class CodeSerializer : public Serializer<> {
public:
- static ScriptCompiler::CachedData* Serialize(Handle<SharedFunctionInfo> info,
- Handle<String> source);
+ static ScriptCompiler::CachedData* Serialize(Handle<SharedFunctionInfo> info);
- ScriptData* Serialize(Handle<HeapObject> obj);
+ ScriptData* SerializeSharedFunctionInfo(Handle<SharedFunctionInfo> info);
V8_WARN_UNUSED_RESULT static MaybeHandle<SharedFunctionInfo> Deserialize(
Isolate* isolate, ScriptData* cached_data, Handle<String> source);
diff --git a/chromium/v8/src/snapshot/default-deserializer-allocator.cc b/chromium/v8/src/snapshot/default-deserializer-allocator.cc
index 37d57286bc4..4704ae66430 100644
--- a/chromium/v8/src/snapshot/default-deserializer-allocator.cc
+++ b/chromium/v8/src/snapshot/default-deserializer-allocator.cc
@@ -45,7 +45,7 @@ Address DefaultDeserializerAllocator::AllocateRaw(AllocationSpace space,
} else {
DCHECK_LT(space, kNumberOfPreallocatedSpaces);
Address address = high_water_[space];
- DCHECK_NOT_NULL(address);
+ DCHECK_NE(address, kNullAddress);
high_water_[space] += size;
#ifdef DEBUG
// Assert that the current reserved chunk is still big enough.
@@ -125,7 +125,8 @@ void DefaultDeserializerAllocator::DecodeReservation(
DCHECK_EQ(0, reservations_[FIRST_SPACE].size());
int current_space = FIRST_SPACE;
for (auto& r : res) {
- reservations_[current_space].push_back({r.chunk_size(), NULL, NULL});
+ reservations_[current_space].push_back(
+ {r.chunk_size(), kNullAddress, kNullAddress});
if (r.is_last()) current_space++;
}
DCHECK_EQ(kNumberOfSpaces, current_space);
diff --git a/chromium/v8/src/snapshot/deserializer.cc b/chromium/v8/src/snapshot/deserializer.cc
index 6436228b20b..2090443c813 100644
--- a/chromium/v8/src/snapshot/deserializer.cc
+++ b/chromium/v8/src/snapshot/deserializer.cc
@@ -6,6 +6,7 @@
#include "src/assembler-inl.h"
#include "src/isolate.h"
+#include "src/objects/api-callbacks.h"
#include "src/objects/hash-table.h"
#include "src/objects/maybe-object.h"
#include "src/objects/string.h"
@@ -72,7 +73,7 @@ void Deserializer<AllocatorT>::VisitRootPointers(Root root,
// The space must be new space. Any other space would cause ReadChunk to try
// to update the remembered using nullptr as the address.
ReadData(reinterpret_cast<MaybeObject**>(start),
- reinterpret_cast<MaybeObject**>(end), NEW_SPACE, nullptr);
+ reinterpret_cast<MaybeObject**>(end), NEW_SPACE, kNullAddress);
}
template <class AllocatorT>
@@ -169,6 +170,7 @@ HeapObject* Deserializer<AllocatorT>::PostProcessNewObject(HeapObject* obj,
DCHECK(CanBeDeferred(obj));
}
}
+
if (obj->IsAllocationSite()) {
// Allocation sites are present in the snapshot, and must be linked into
// a list at deserialization time.
@@ -190,13 +192,13 @@ HeapObject* Deserializer<AllocatorT>::PostProcessNewObject(HeapObject* obj,
new_code_objects_.push_back(Code::cast(obj));
}
} else if (obj->IsAccessorInfo()) {
- if (isolate_->external_reference_redirector()) {
- accessor_infos_.push_back(AccessorInfo::cast(obj));
- }
+#ifdef USE_SIMULATOR
+ accessor_infos_.push_back(AccessorInfo::cast(obj));
+#endif
} else if (obj->IsCallHandlerInfo()) {
- if (isolate_->external_reference_redirector()) {
- call_handler_infos_.push_back(CallHandlerInfo::cast(obj));
- }
+#ifdef USE_SIMULATOR
+ call_handler_infos_.push_back(CallHandlerInfo::cast(obj));
+#endif
} else if (obj->IsExternalString()) {
if (obj->map() == isolate_->heap()->native_source_string_map()) {
ExternalOneByteString* string = ExternalOneByteString::cast(obj);
@@ -208,7 +210,7 @@ HeapObject* Deserializer<AllocatorT>::PostProcessNewObject(HeapObject* obj,
ExternalString* string = ExternalString::cast(obj);
uint32_t index = string->resource_as_uint32();
Address address =
- reinterpret_cast<Address>(isolate_->api_external_references()[index]);
+ static_cast<Address>(isolate_->api_external_references()[index]);
string->set_address_as_resource(address);
}
isolate_->heap()->RegisterExternalString(String::cast(obj));
@@ -353,7 +355,7 @@ Object* Deserializer<AllocatorT>::ReadDataSingle() {
MaybeObject** start = &o;
MaybeObject** end = start + 1;
int source_space = NEW_SPACE;
- Address current_object = nullptr;
+ Address current_object = kNullAddress;
CHECK(ReadData(start, end, source_space, current_object));
HeapObject* heap_object;
@@ -380,7 +382,7 @@ bool Deserializer<AllocatorT>::ReadData(MaybeObject** current,
// are no new space objects in current boot snapshots, so it's not needed,
// but that may change.
bool write_barrier_needed =
- (current_object_address != nullptr && source_space != NEW_SPACE &&
+ (current_object_address != kNullAddress && source_space != NEW_SPACE &&
source_space != CODE_SPACE);
while (current < limit) {
byte data = source_.Get();
@@ -488,7 +490,7 @@ bool Deserializer<AllocatorT>::ReadData(MaybeObject** current,
case kSkip: {
int size = source_.GetInt();
current = reinterpret_cast<MaybeObject**>(
- reinterpret_cast<intptr_t>(current) + size);
+ reinterpret_cast<Address>(current) + size);
break;
}
@@ -538,20 +540,21 @@ bool Deserializer<AllocatorT>::ReadData(MaybeObject** current,
CHECK_NOT_NULL(isolate->embedded_blob());
EmbeddedData d = EmbeddedData::FromBlob();
- const uint8_t* address = d.InstructionStartOfBuiltin(builtin_index);
- CHECK_NOT_NULL(address);
+ Address address = d.InstructionStartOfBuiltin(builtin_index);
+ CHECK_NE(kNullAddress, address);
if (RelocInfo::OffHeapTargetIsCodedSpecially()) {
Address location_of_branch_data = reinterpret_cast<Address>(current);
+ int skip = Assembler::deserialization_special_target_size(
+ location_of_branch_data);
Assembler::deserialization_set_special_target_at(
location_of_branch_data,
Code::cast(HeapObject::FromAddress(current_object_address)),
- const_cast<Address>(address));
- location_of_branch_data += Assembler::kSpecialTargetSize;
+ address);
+ location_of_branch_data += skip;
current = reinterpret_cast<MaybeObject**>(location_of_branch_data);
} else {
- MaybeObject* o =
- reinterpret_cast<MaybeObject*>(const_cast<uint8_t*>(address));
+ MaybeObject* o = reinterpret_cast<MaybeObject*>(address);
UnalignedCopy(current, &o);
current++;
}
@@ -601,8 +604,9 @@ bool Deserializer<AllocatorT>::ReadData(MaybeObject** current,
// Do not move current.
case kVariableRawCode: {
int size_in_bytes = source_.GetInt();
- source_.CopyRaw(current_object_address + Code::kDataStart,
- size_in_bytes);
+ source_.CopyRaw(
+ reinterpret_cast<byte*>(current_object_address + Code::kDataStart),
+ size_in_bytes);
break;
}
@@ -636,7 +640,7 @@ bool Deserializer<AllocatorT>::ReadData(MaybeObject** current,
DCHECK_WITH_MSG(
reference_id < num_api_references_,
"too few external references provided through the API");
- address = reinterpret_cast<Address>(
+ address = static_cast<Address>(
isolate->api_external_references()[reference_id]);
} else {
address = reinterpret_cast<Address>(NoExternalReferencesCallback);
@@ -750,24 +754,6 @@ bool Deserializer<AllocatorT>::ReadData(MaybeObject** current,
return true;
}
-namespace {
-
-int FixupJSConstructStub(Isolate* isolate, int builtin_id) {
- if (isolate->serializer_enabled()) return builtin_id;
-
- if (FLAG_harmony_restrict_constructor_return &&
- builtin_id == Builtins::kJSConstructStubGenericUnrestrictedReturn) {
- return Builtins::kJSConstructStubGenericRestrictedReturn;
- } else if (!FLAG_harmony_restrict_constructor_return &&
- builtin_id == Builtins::kJSConstructStubGenericRestrictedReturn) {
- return Builtins::kJSConstructStubGenericUnrestrictedReturn;
- } else {
- return builtin_id;
- }
-}
-
-} // namespace
-
template <class AllocatorT>
void** Deserializer<AllocatorT>::ReadExternalReferenceCase(
HowToCode how, Isolate* isolate, void** current,
@@ -779,10 +765,12 @@ void** Deserializer<AllocatorT>::ReadExternalReferenceCase(
if (how == kFromCode) {
Address location_of_branch_data = reinterpret_cast<Address>(current);
+ int skip =
+ Assembler::deserialization_special_target_size(location_of_branch_data);
Assembler::deserialization_set_special_target_at(
location_of_branch_data,
Code::cast(HeapObject::FromAddress(current_object_address)), address);
- location_of_branch_data += Assembler::kSpecialTargetSize;
+ location_of_branch_data += skip;
current = reinterpret_cast<void**>(location_of_branch_data);
} else {
void* new_current = reinterpret_cast<void**>(address);
@@ -838,8 +826,7 @@ MaybeObject** Deserializer<AllocatorT>::ReadDataCase(
emit_write_barrier = isolate->heap()->InNewSpace(new_object);
} else {
DCHECK_EQ(where, kBuiltin);
- int raw_id = MaybeReplaceWithDeserializeLazy(source_.GetInt());
- int builtin_id = FixupJSConstructStub(isolate, raw_id);
+ int builtin_id = MaybeReplaceWithDeserializeLazy(source_.GetInt());
new_object = isolate->builtins()->builtin(builtin_id);
emit_write_barrier = false;
}
@@ -861,11 +848,13 @@ MaybeObject** Deserializer<AllocatorT>::ReadDataCase(
if (how == kFromCode) {
DCHECK(!allocator()->next_reference_is_weak());
Address location_of_branch_data = reinterpret_cast<Address>(current);
+ int skip = Assembler::deserialization_special_target_size(
+ location_of_branch_data);
Assembler::deserialization_set_special_target_at(
location_of_branch_data,
Code::cast(HeapObject::FromAddress(current_object_address)),
reinterpret_cast<Address>(new_object));
- location_of_branch_data += Assembler::kSpecialTargetSize;
+ location_of_branch_data += skip;
current = reinterpret_cast<MaybeObject**>(location_of_branch_data);
current_was_incremented = true;
} else {
diff --git a/chromium/v8/src/snapshot/partial-deserializer.cc b/chromium/v8/src/snapshot/partial-deserializer.cc
index afa6aa5fc5e..626106a3532 100644
--- a/chromium/v8/src/snapshot/partial-deserializer.cc
+++ b/chromium/v8/src/snapshot/partial-deserializer.cc
@@ -39,7 +39,7 @@ MaybeHandle<Object> PartialDeserializer::Deserialize(
DisallowHeapAllocation no_gc;
// Keep track of the code space start and end pointers in case new
// code objects were unserialized
- OldSpace* code_space = isolate->heap()->code_space();
+ CodeSpace* code_space = isolate->heap()->code_space();
Address start_address = code_space->top();
Object* root;
VisitRootPointer(Root::kPartialSnapshotCache, nullptr, &root);
diff --git a/chromium/v8/src/snapshot/serializer-common.cc b/chromium/v8/src/snapshot/serializer-common.cc
index d4f0c9eff72..d5a8cf1273b 100644
--- a/chromium/v8/src/snapshot/serializer-common.cc
+++ b/chromium/v8/src/snapshot/serializer-common.cc
@@ -34,7 +34,7 @@ ExternalReferenceEncoder::ExternalReferenceEncoder(Isolate* isolate) {
const intptr_t* api_references = isolate->api_external_references();
if (api_references == nullptr) return;
for (uint32_t i = 0; api_references[i] != 0; ++i) {
- Address addr = reinterpret_cast<Address>(api_references[i]);
+ Address addr = static_cast<Address>(api_references[i]);
// Ignore duplicate references.
// This can happen due to ICF. See http://crbug.com/726896.
if (map_->Get(addr).IsNothing()) map_->Set(addr, Value::Encode(i, true));
@@ -47,10 +47,11 @@ ExternalReferenceEncoder::~ExternalReferenceEncoder() {
if (!i::FLAG_external_reference_stats) return;
if (api_references_ == nullptr) return;
for (uint32_t i = 0; api_references_[i] != 0; ++i) {
- Address addr = reinterpret_cast<Address>(api_references_[i]);
+ Address addr = static_cast<Address>(api_references_[i]);
DCHECK(map_->Get(addr).IsJust());
- v8::base::OS::Print("index=%5d count=%5d %-60s\n", i, count_[i],
- ExternalReferenceTable::ResolveSymbol(addr));
+ v8::base::OS::Print(
+ "index=%5d count=%5d %-60s\n", i, count_[i],
+ ExternalReferenceTable::ResolveSymbol(reinterpret_cast<void*>(addr)));
}
#endif // DEBUG
}
@@ -70,7 +71,7 @@ ExternalReferenceEncoder::Value ExternalReferenceEncoder::Encode(
Address address) {
Maybe<uint32_t> maybe_index = map_->Get(address);
if (maybe_index.IsNothing()) {
- void* addr = address;
+ void* addr = reinterpret_cast<void*>(address);
v8::base::OS::PrintError("Unknown external reference %p.\n", addr);
v8::base::OS::PrintError("%s", ExternalReferenceTable::ResolveSymbol(addr));
v8::base::OS::Abort();
@@ -86,8 +87,9 @@ const char* ExternalReferenceEncoder::NameOfAddress(Isolate* isolate,
Address address) const {
Maybe<uint32_t> maybe_index = map_->Get(address);
if (maybe_index.IsNothing()) return "<unknown>";
- return isolate->heap()->external_reference_table()->name(
- maybe_index.FromJust());
+ Value value(maybe_index.FromJust());
+ if (value.is_from_api()) return "<from api>";
+ return isolate->heap()->external_reference_table()->name(value.index());
}
void SerializedData::AllocateData(uint32_t size) {
diff --git a/chromium/v8/src/snapshot/serializer-common.h b/chromium/v8/src/snapshot/serializer-common.h
index 26c2da2247a..566046abd21 100644
--- a/chromium/v8/src/snapshot/serializer-common.h
+++ b/chromium/v8/src/snapshot/serializer-common.h
@@ -15,6 +15,7 @@
namespace v8 {
namespace internal {
+class CallHandlerInfo;
class Isolate;
class ExternalReferenceEncoder {
@@ -29,7 +30,6 @@ class ExternalReferenceEncoder {
bool is_from_api() const { return IsFromAPI::decode(value_); }
uint32_t index() const { return Index::decode(value_); }
- uint32_t raw() const { return value_; }
private:
class Index : public BitField<uint32_t, 0, 31> {};
@@ -319,11 +319,12 @@ class SerializedData {
protected:
void SetHeaderValue(uint32_t offset, uint32_t value) {
- WriteLittleEndianValue(data_ + offset, value);
+ WriteLittleEndianValue(reinterpret_cast<Address>(data_) + offset, value);
}
uint32_t GetHeaderValue(uint32_t offset) const {
- return ReadLittleEndianValue<uint32_t>(data_ + offset);
+ return ReadLittleEndianValue<uint32_t>(reinterpret_cast<Address>(data_) +
+ offset);
}
void AllocateData(uint32_t size);
diff --git a/chromium/v8/src/snapshot/serializer.cc b/chromium/v8/src/snapshot/serializer.cc
index c093707e5e3..5a0828c7d75 100644
--- a/chromium/v8/src/snapshot/serializer.cc
+++ b/chromium/v8/src/snapshot/serializer.cc
@@ -25,13 +25,19 @@ Serializer<AllocatorT>::Serializer(Isolate* isolate)
if (FLAG_serialization_statistics) {
instance_type_count_ = NewArray<int>(kInstanceTypes);
instance_type_size_ = NewArray<size_t>(kInstanceTypes);
+ read_only_instance_type_count_ = NewArray<int>(kInstanceTypes);
+ read_only_instance_type_size_ = NewArray<size_t>(kInstanceTypes);
for (int i = 0; i < kInstanceTypes; i++) {
instance_type_count_[i] = 0;
instance_type_size_[i] = 0;
+ read_only_instance_type_count_[i] = 0;
+ read_only_instance_type_size_[i] = 0;
}
} else {
instance_type_count_ = nullptr;
instance_type_size_ = nullptr;
+ read_only_instance_type_count_ = nullptr;
+ read_only_instance_type_size_ = nullptr;
}
#endif // OBJECT_PRINT
}
@@ -43,16 +49,24 @@ Serializer<AllocatorT>::~Serializer() {
if (instance_type_count_ != nullptr) {
DeleteArray(instance_type_count_);
DeleteArray(instance_type_size_);
+ DeleteArray(read_only_instance_type_count_);
+ DeleteArray(read_only_instance_type_size_);
}
#endif // OBJECT_PRINT
}
#ifdef OBJECT_PRINT
template <class AllocatorT>
-void Serializer<AllocatorT>::CountInstanceType(Map* map, int size) {
+void Serializer<AllocatorT>::CountInstanceType(Map* map, int size,
+ AllocationSpace space) {
int instance_type = map->instance_type();
- instance_type_count_[instance_type]++;
- instance_type_size_[instance_type] += size;
+ if (space != RO_SPACE) {
+ instance_type_count_[instance_type]++;
+ instance_type_size_[instance_type] += size;
+ } else {
+ read_only_instance_type_count_[instance_type]++;
+ read_only_instance_type_size_[instance_type] += size;
+ }
}
#endif // OBJECT_PRINT
@@ -72,6 +86,21 @@ void Serializer<AllocatorT>::OutputStatistics(const char* name) {
}
INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
#undef PRINT_INSTANCE_TYPE
+ size_t read_only_total = 0;
+#define UPDATE_TOTAL(Name) \
+ read_only_total += read_only_instance_type_size_[Name];
+ INSTANCE_TYPE_LIST(UPDATE_TOTAL)
+#undef UPDATE_TOTAL
+ if (read_only_total > 0) {
+ PrintF("\n Read Only Instance types (count and bytes):\n");
+#define PRINT_INSTANCE_TYPE(Name) \
+ if (read_only_instance_type_count_[Name]) { \
+ PrintF("%10d %10" PRIuS " %s\n", read_only_instance_type_count_[Name], \
+ read_only_instance_type_size_[Name], #Name); \
+ }
+ INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
+#undef PRINT_INSTANCE_TYPE
+ }
PrintF("\n");
#endif // OBJECT_PRINT
}
@@ -319,9 +348,11 @@ template <class AllocatorT>
Code* Serializer<AllocatorT>::CopyCode(Code* code) {
code_buffer_.clear(); // Clear buffer without deleting backing store.
int size = code->CodeSize();
- code_buffer_.insert(code_buffer_.end(), code->address(),
- code->address() + size);
- return Code::cast(HeapObject::FromAddress(&code_buffer_.front()));
+ code_buffer_.insert(code_buffer_.end(),
+ reinterpret_cast<byte*>(code->address()),
+ reinterpret_cast<byte*>(code->address() + size));
+ return Code::cast(HeapObject::FromAddress(
+ reinterpret_cast<Address>(&code_buffer_.front())));
}
template <class AllocatorT>
@@ -360,7 +391,7 @@ void Serializer<AllocatorT>::ObjectSerializer::SerializePrologue(
#ifdef OBJECT_PRINT
if (FLAG_serialization_statistics) {
- serializer_->CountInstanceType(map, size);
+ serializer_->CountInstanceType(map, size, space);
}
#endif // OBJECT_PRINT
@@ -526,7 +557,7 @@ void Serializer<
sink_->PutInt(bytes_to_output, "length");
// Serialize string header (except for map).
- Address string_start = string->address();
+ uint8_t* string_start = reinterpret_cast<uint8_t*>(string->address());
for (int i = HeapObject::kHeaderSize; i < SeqString::kHeaderSize; i++) {
sink_->PutSection(string_start[i], "StringHeader");
}
@@ -578,12 +609,16 @@ void Serializer<AllocatorT>::ObjectSerializer::Serialize() {
if (object_->IsExternalString()) {
SerializeExternalString();
return;
- } else if (object_->IsSeqOneByteString()) {
- // Clear padding bytes at the end. Done here to avoid having to do this
- // at allocation sites in generated code.
- SeqOneByteString::cast(object_)->clear_padding();
- } else if (object_->IsSeqTwoByteString()) {
- SeqTwoByteString::cast(object_)->clear_padding();
+ } else if (!serializer_->isolate()->heap()->InReadOnlySpace(object_)) {
+ // Only clear padding for strings outside RO_SPACE. RO_SPACE should have
+ // been cleared elsewhere.
+ if (object_->IsSeqOneByteString()) {
+ // Clear padding bytes at the end. Done here to avoid having to do this
+ // at allocation sites in generated code.
+ SeqOneByteString::cast(object_)->clear_padding();
+ } else if (object_->IsSeqTwoByteString()) {
+ SeqTwoByteString::cast(object_)->clear_padding();
+ }
}
if (object_->IsJSTypedArray()) {
SerializeJSTypedArray();
@@ -776,7 +811,7 @@ void Serializer<AllocatorT>::ObjectSerializer::VisitExternalReference(
"ExternalRef");
}
sink_->PutInt(skip, "SkipB4ExternalRef");
- DCHECK_NOT_NULL(target); // Code does not reference null.
+ DCHECK_NE(target, kNullAddress); // Code does not reference null.
sink_->PutInt(encoded_reference.index(), "reference index");
bytes_processed_so_far_ += rinfo->target_address_size();
}
@@ -791,18 +826,18 @@ void Serializer<AllocatorT>::ObjectSerializer::VisitInternalReference(
// inline. That would cause the skip to be negative. Instead, we store the
// offset from code entry.
Address entry = Code::cast(object_)->entry();
- intptr_t pc_offset = rinfo->target_internal_reference_address() - entry;
- intptr_t target_offset = rinfo->target_internal_reference() - entry;
- DCHECK(0 <= pc_offset &&
- pc_offset <= Code::cast(object_)->raw_instruction_size());
- DCHECK(0 <= target_offset &&
- target_offset <= Code::cast(object_)->raw_instruction_size());
+ DCHECK_GE(rinfo->target_internal_reference_address(), entry);
+ uintptr_t pc_offset = rinfo->target_internal_reference_address() - entry;
+ DCHECK_LE(pc_offset, Code::cast(object_)->raw_instruction_size());
+ DCHECK_GE(rinfo->target_internal_reference(), entry);
+ uintptr_t target_offset = rinfo->target_internal_reference() - entry;
+ DCHECK_LE(target_offset, Code::cast(object_)->raw_instruction_size());
sink_->Put(rinfo->rmode() == RelocInfo::INTERNAL_REFERENCE
? kInternalReference
: kInternalReferenceEncoded,
"InternalRef");
- sink_->PutInt(static_cast<uintptr_t>(pc_offset), "internal ref address");
- sink_->PutInt(static_cast<uintptr_t>(target_offset), "internal ref value");
+ sink_->PutInt(pc_offset, "internal ref address");
+ sink_->PutInt(target_offset, "internal ref value");
}
template <class AllocatorT>
@@ -827,7 +862,7 @@ void Serializer<AllocatorT>::ObjectSerializer::VisitOffHeapTarget(
STATIC_ASSERT(EmbeddedData::kTableSize == Builtins::builtin_count);
CHECK(Builtins::IsEmbeddedBuiltin(host));
Address addr = rinfo->target_off_heap_target();
- CHECK_NOT_NULL(addr);
+ CHECK_NE(kNullAddress, addr);
CHECK_NOT_NULL(
InstructionStream::TryLookupCode(serializer_->isolate(), addr));
}
@@ -842,6 +877,46 @@ void Serializer<AllocatorT>::ObjectSerializer::VisitOffHeapTarget(
#endif
}
+namespace {
+class CompareRelocInfo {
+ public:
+ bool operator()(RelocInfo x, RelocInfo y) {
+ // Everything that does not use target_address_address will compare equal.
+ Address x_num = 0;
+ Address y_num = 0;
+ if (HasTargetAddressAddress(x.rmode())) {
+ x_num = x.target_address_address();
+ }
+ if (HasTargetAddressAddress(y.rmode())) {
+ y_num = y.target_address_address();
+ }
+ return x_num > y_num;
+ }
+
+ private:
+ static bool HasTargetAddressAddress(RelocInfo::Mode mode) {
+ return RelocInfo::IsEmbeddedObject(mode) || RelocInfo::IsCodeTarget(mode) ||
+ RelocInfo::IsExternalReference(mode) ||
+ RelocInfo::IsRuntimeEntry(mode);
+ }
+};
+} // namespace
+
+template <class AllocatorT>
+void Serializer<AllocatorT>::ObjectSerializer::VisitRelocInfo(
+ RelocIterator* it) {
+ std::priority_queue<RelocInfo, std::vector<RelocInfo>, CompareRelocInfo>
+ reloc_queue;
+ for (; !it->done(); it->next()) {
+ reloc_queue.push(*it->rinfo());
+ }
+ while (!reloc_queue.empty()) {
+ RelocInfo rinfo = reloc_queue.top();
+ reloc_queue.pop();
+ rinfo.Visit(this);
+ }
+}
+
template <class AllocatorT>
void Serializer<AllocatorT>::ObjectSerializer::VisitCodeTarget(
Code* host, RelocInfo* rinfo) {
@@ -872,23 +947,28 @@ void Serializer<AllocatorT>::ObjectSerializer::OutputRawData(Address up_to) {
}
#ifdef MEMORY_SANITIZER
// Check that we do not serialize uninitialized memory.
- __msan_check_mem_is_initialized(object_start + base, bytes_to_output);
+ __msan_check_mem_is_initialized(
+ reinterpret_cast<void*>(object_start + base), bytes_to_output);
#endif // MEMORY_SANITIZER
if (object_->IsBytecodeArray()) {
// The code age byte can be changed concurrently by GC.
const int bytes_to_age_byte = BytecodeArray::kBytecodeAgeOffset - base;
if (0 <= bytes_to_age_byte && bytes_to_age_byte < bytes_to_output) {
- sink_->PutRaw(object_start + base, bytes_to_age_byte, "Bytes");
+ sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
+ bytes_to_age_byte, "Bytes");
byte bytecode_age = BytecodeArray::kNoAgeBytecodeAge;
sink_->PutRaw(&bytecode_age, 1, "Bytes");
const int bytes_written = bytes_to_age_byte + 1;
- sink_->PutRaw(object_start + base + bytes_written,
- bytes_to_output - bytes_written, "Bytes");
+ sink_->PutRaw(
+ reinterpret_cast<byte*>(object_start + base + bytes_written),
+ bytes_to_output - bytes_written, "Bytes");
} else {
- sink_->PutRaw(object_start + base, bytes_to_output, "Bytes");
+ sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
+ bytes_to_output, "Bytes");
}
} else {
- sink_->PutRaw(object_start + base, bytes_to_output, "Bytes");
+ sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
+ bytes_to_output, "Bytes");
}
}
}
@@ -900,7 +980,8 @@ int Serializer<AllocatorT>::ObjectSerializer::SkipTo(Address to) {
int to_skip = up_to_offset - bytes_processed_so_far_;
bytes_processed_so_far_ += to_skip;
// This assert will fail if the reloc info gives us the target_address_address
- // locations in a non-ascending order. Luckily that doesn't happen.
+ // locations in a non-ascending order. We make sure this doesn't happen by
+ // sorting the relocation info.
DCHECK_GE(to_skip, 0);
return to_skip;
}
@@ -909,24 +990,22 @@ template <class AllocatorT>
void Serializer<AllocatorT>::ObjectSerializer::OutputCode(int size) {
DCHECK_EQ(kPointerSize, bytes_processed_so_far_);
Code* code = Code::cast(object_);
- if (FLAG_predictable) {
- // To make snapshots reproducible, we make a copy of the code object
- // and wipe all pointers in the copy, which we then serialize.
- code = serializer_->CopyCode(code);
- int mode_mask = RelocInfo::kCodeTargetMask |
- RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
- RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
- RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
- RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
- RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED);
- for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
- RelocInfo* rinfo = it.rinfo();
- rinfo->WipeOut();
- }
- // We need to wipe out the header fields *after* wiping out the
- // relocations, because some of these fields are needed for the latter.
- code->WipeOutHeader();
+ // To make snapshots reproducible, we make a copy of the code object
+ // and wipe all pointers in the copy, which we then serialize.
+ code = serializer_->CopyCode(code);
+ int mode_mask = RelocInfo::kCodeTargetMask |
+ RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
+ RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
+ RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
+ RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
+ RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED);
+ for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
+ RelocInfo* rinfo = it.rinfo();
+ rinfo->WipeOut();
}
+ // We need to wipe out the header fields *after* wiping out the
+ // relocations, because some of these fields are needed for the latter.
+ code->WipeOutHeader();
Address start = code->address() + Code::kDataStart;
int bytes_to_output = size - Code::kDataStart;
@@ -936,9 +1015,10 @@ void Serializer<AllocatorT>::ObjectSerializer::OutputCode(int size) {
#ifdef MEMORY_SANITIZER
// Check that we do not serialize uninitialized memory.
- __msan_check_mem_is_initialized(start, bytes_to_output);
+ __msan_check_mem_is_initialized(reinterpret_cast<void*>(start),
+ bytes_to_output);
#endif // MEMORY_SANITIZER
- sink_->PutRaw(start, bytes_to_output, "Code");
+ sink_->PutRaw(reinterpret_cast<byte*>(start), bytes_to_output, "Code");
}
// Explicit instantiation.
diff --git a/chromium/v8/src/snapshot/serializer.h b/chromium/v8/src/snapshot/serializer.h
index 9f8db6ccfe2..c387bc046a1 100644
--- a/chromium/v8/src/snapshot/serializer.h
+++ b/chromium/v8/src/snapshot/serializer.h
@@ -21,11 +21,11 @@ namespace internal {
class CodeAddressMap : public CodeEventLogger {
public:
explicit CodeAddressMap(Isolate* isolate) : isolate_(isolate) {
- isolate->logger()->addCodeEventListener(this);
+ isolate->logger()->AddCodeEventListener(this);
}
~CodeAddressMap() override {
- isolate_->logger()->removeCodeEventListener(this);
+ isolate_->logger()->RemoveCodeEventListener(this);
}
void CodeMoveEvent(AbstractCode* from, Address to) override {
@@ -96,12 +96,13 @@ class CodeAddressMap : public CodeEventLogger {
}
base::HashMap::Entry* FindOrCreateEntry(Address code_address) {
- return impl_.LookupOrInsert(code_address,
- ComputePointerHash(code_address));
+ return impl_.LookupOrInsert(reinterpret_cast<void*>(code_address),
+ ComputeAddressHash(code_address));
}
base::HashMap::Entry* FindEntry(Address code_address) {
- return impl_.Lookup(code_address, ComputePointerHash(code_address));
+ return impl_.Lookup(reinterpret_cast<void*>(code_address),
+ ComputeAddressHash(code_address));
}
void RemoveEntry(base::HashMap::Entry* entry) {
@@ -225,7 +226,7 @@ class Serializer : public SerializerDeserializer {
void OutputStatistics(const char* name);
#ifdef OBJECT_PRINT
- void CountInstanceType(Map* map, int size);
+ void CountInstanceType(Map* map, int size, AllocationSpace space);
#endif // OBJECT_PRINT
#ifdef DEBUG
@@ -255,6 +256,8 @@ class Serializer : public SerializerDeserializer {
static const int kInstanceTypes = LAST_TYPE + 1;
int* instance_type_count_;
size_t* instance_type_size_;
+ int* read_only_instance_type_count_;
+ size_t* read_only_instance_type_size_;
#endif // OBJECT_PRINT
#ifdef DEBUG
@@ -266,6 +269,8 @@ class Serializer : public SerializerDeserializer {
DISALLOW_COPY_AND_ASSIGN(Serializer);
};
+class RelocInfoIterator;
+
template <class AllocatorT>
class Serializer<AllocatorT>::ObjectSerializer : public ObjectVisitor {
public:
@@ -299,6 +304,8 @@ class Serializer<AllocatorT>::ObjectSerializer : public ObjectVisitor {
void VisitCodeTarget(Code* host, RelocInfo* target) override;
void VisitRuntimeEntry(Code* host, RelocInfo* reloc) override;
void VisitOffHeapTarget(Code* host, RelocInfo* target) override;
+ // Relocation info needs to be visited sorted by target_address_address.
+ void VisitRelocInfo(RelocIterator* it) override;
private:
void SerializePrologue(AllocationSpace space, int size, Map* map);
diff --git a/chromium/v8/src/snapshot/snapshot-common.cc b/chromium/v8/src/snapshot/snapshot-common.cc
index a33c468bb89..902bda4a0f2 100644
--- a/chromium/v8/src/snapshot/snapshot-common.cc
+++ b/chromium/v8/src/snapshot/snapshot-common.cc
@@ -343,11 +343,11 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
if (Builtins::IsIsolateIndependent(i)) {
DCHECK(!Builtins::IsLazy(i));
- // Sanity-check that the given builtin is process-independent and does not
+ // Sanity-check that the given builtin is isolate-independent and does not
// use the trampoline register in its calling convention.
if (!code->IsProcessIndependent()) {
saw_unsafe_builtin = true;
- fprintf(stderr, "%s is not process-independent.\n", Builtins::name(i));
+ fprintf(stderr, "%s is not isolate-independent.\n", Builtins::name(i));
}
if (BuiltinAliasesOffHeapTrampolineRegister(isolate, code)) {
saw_unsafe_builtin = true;
@@ -368,7 +368,11 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
lengths[i] = 0;
}
}
- CHECK(!saw_unsafe_builtin);
+ CHECK_WITH_MSG(
+ !saw_unsafe_builtin,
+ "One or more builtins marked as isolate-independent either contains "
+ "isolate-dependent code or aliases the off-heap trampoline register. "
+ "If in doubt, ask jgruber@");
const uint32_t blob_size = RawDataOffset() + raw_data_size;
uint8_t* blob = new uint8_t[blob_size];
@@ -389,11 +393,21 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
uint8_t* dst = blob + RawDataOffset() + offset;
DCHECK_LE(RawDataOffset() + offset + code->raw_instruction_size(),
blob_size);
- std::memcpy(dst, code->raw_instruction_start(),
+ std::memcpy(dst, reinterpret_cast<uint8_t*>(code->raw_instruction_start()),
code->raw_instruction_size());
}
- return {blob, blob_size};
+ EmbeddedData d(blob, blob_size);
+
+ // Hash the blob and store the result.
+ STATIC_ASSERT(HashSize() == kSizetSize);
+ const size_t hash = d.CreateHash();
+ std::memcpy(blob + HashOffset(), &hash, HashSize());
+
+ DCHECK_EQ(hash, d.CreateHash());
+ DCHECK_EQ(hash, d.Hash());
+
+ return d;
}
EmbeddedData EmbeddedData::FromBlob() {
@@ -404,13 +418,13 @@ EmbeddedData EmbeddedData::FromBlob() {
return {data, size};
}
-const uint8_t* EmbeddedData::InstructionStartOfBuiltin(int i) const {
+Address EmbeddedData::InstructionStartOfBuiltin(int i) const {
DCHECK(Builtins::IsBuiltinId(i));
-
const uint32_t* offsets = Offsets();
const uint8_t* result = RawData() + offsets[i];
- DCHECK_LT(result, data_ + size_);
- return result;
+ DCHECK_LE(result, data_ + size_);
+ DCHECK_IMPLIES(result == data_ + size_, InstructionSizeOfBuiltin(i) == 0);
+ return reinterpret_cast<Address>(result);
}
uint32_t EmbeddedData::InstructionSizeOfBuiltin(int i) const {
@@ -418,6 +432,12 @@ uint32_t EmbeddedData::InstructionSizeOfBuiltin(int i) const {
const uint32_t* lengths = Lengths();
return lengths[i];
}
+
+size_t EmbeddedData::CreateHash() const {
+ STATIC_ASSERT(HashOffset() == 0);
+ STATIC_ASSERT(HashSize() == kSizetSize);
+ return base::hash_range(data_ + HashSize(), data_ + size_);
+}
#endif
uint32_t Snapshot::ExtractNumContexts(const v8::StartupData* data) {
diff --git a/chromium/v8/src/snapshot/snapshot.h b/chromium/v8/src/snapshot/snapshot.h
index b86a4ac9f90..bbf5cd92e93 100644
--- a/chromium/v8/src/snapshot/snapshot.h
+++ b/chromium/v8/src/snapshot/snapshot.h
@@ -90,7 +90,7 @@ class EmbeddedData final {
void Dispose() { delete[] data_; }
- const uint8_t* InstructionStartOfBuiltin(int i) const;
+ Address InstructionStartOfBuiltin(int i) const;
uint32_t InstructionSizeOfBuiltin(int i) const;
bool ContainsBuiltin(int i) const { return InstructionSizeOfBuiltin(i) > 0; }
@@ -100,16 +100,26 @@ class EmbeddedData final {
return RoundUp<kCodeAlignment>(InstructionSizeOfBuiltin(i));
}
+ size_t CreateHash() const;
+ size_t Hash() const {
+ return *reinterpret_cast<const size_t*>(data_ + HashOffset());
+ }
+
// The layout of the blob is as follows:
//
- // [0] offset of instruction stream 0
- // ... offsets
- // [N] length of instruction stream 0
- // ... lengths
- // ... instruction streams
+ // [0] hash of the remaining blob
+ // [1] offset of instruction stream 0
+ // ... offsets
+ // [N + 1] length of instruction stream 0
+ // ... lengths
+ // ... instruction streams
static constexpr uint32_t kTableSize = Builtins::builtin_count;
- static constexpr uint32_t OffsetsOffset() { return 0; }
+ static constexpr uint32_t HashOffset() { return 0; }
+ static constexpr uint32_t HashSize() { return kSizetSize; }
+ static constexpr uint32_t OffsetsOffset() {
+ return HashOffset() + HashSize();
+ }
static constexpr uint32_t OffsetsSize() { return kUInt32Size * kTableSize; }
static constexpr uint32_t LengthsOffset() {
return OffsetsOffset() + OffsetsSize();
@@ -194,10 +204,11 @@ class Snapshot : public AllStatic {
uint32_t index);
static uint32_t GetHeaderValue(const v8::StartupData* data, uint32_t offset) {
- return ReadLittleEndianValue<uint32_t>(data->data + offset);
+ return ReadLittleEndianValue<uint32_t>(
+ reinterpret_cast<Address>(data->data) + offset);
}
static void SetHeaderValue(char* data, uint32_t offset, uint32_t value) {
- WriteLittleEndianValue(data + offset, value);
+ WriteLittleEndianValue(reinterpret_cast<Address>(data) + offset, value);
}
static void CheckVersion(const v8::StartupData* data);
diff --git a/chromium/v8/src/snapshot/startup-serializer.cc b/chromium/v8/src/snapshot/startup-serializer.cc
index dc85a57e11a..15835c0bdd1 100644
--- a/chromium/v8/src/snapshot/startup-serializer.cc
+++ b/chromium/v8/src/snapshot/startup-serializer.cc
@@ -46,15 +46,18 @@ void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
if (SerializeBackReference(obj, how_to_code, where_to_point, skip)) return;
FlushSkip(skip);
+ bool use_simulator = false;
+#ifdef USE_SIMULATOR
+ use_simulator = true;
+#endif
- if (isolate()->external_reference_redirector() && obj->IsAccessorInfo()) {
+ if (use_simulator && obj->IsAccessorInfo()) {
// Wipe external reference redirects in the accessor info.
AccessorInfo* info = AccessorInfo::cast(obj);
Address original_address = Foreign::cast(info->getter())->foreign_address();
Foreign::cast(info->js_getter())->set_foreign_address(original_address);
accessor_infos_.push_back(info);
- } else if (isolate()->external_reference_redirector() &&
- obj->IsCallHandlerInfo()) {
+ } else if (use_simulator && obj->IsCallHandlerInfo()) {
CallHandlerInfo* info = CallHandlerInfo::cast(obj);
Address original_address =
Foreign::cast(info->callback())->foreign_address();