summaryrefslogtreecommitdiff
path: root/chromium/v8/src
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/v8/src')
-rw-r--r--chromium/v8/src/ast/scopes.cc1
-rw-r--r--chromium/v8/src/codegen/arm/assembler-arm.cc19
-rw-r--r--chromium/v8/src/compiler/backend/x64/code-generator-x64.cc17
-rw-r--r--chromium/v8/src/sandbox/external-pointer-table-inl.h8
-rw-r--r--chromium/v8/src/sandbox/external-pointer-table.cc12
-rw-r--r--chromium/v8/src/wasm/graph-builder-interface.cc2
6 files changed, 41 insertions, 18 deletions
diff --git a/chromium/v8/src/ast/scopes.cc b/chromium/v8/src/ast/scopes.cc
index 7bf3bcc7796..0f4d8d44ebb 100644
--- a/chromium/v8/src/ast/scopes.cc
+++ b/chromium/v8/src/ast/scopes.cc
@@ -929,6 +929,7 @@ void Scope::Snapshot::Reparent(DeclarationScope* new_parent) {
// Move eval calls since Snapshot's creation into new_parent.
if (outer_scope_->calls_eval_) {
new_parent->RecordEvalCall();
+ outer_scope_->calls_eval_ = false;
declaration_scope_->sloppy_eval_can_extend_vars_ = false;
}
}
diff --git a/chromium/v8/src/codegen/arm/assembler-arm.cc b/chromium/v8/src/codegen/arm/assembler-arm.cc
index b2d7cad0963..3fe769a0ecc 100644
--- a/chromium/v8/src/codegen/arm/assembler-arm.cc
+++ b/chromium/v8/src/codegen/arm/assembler-arm.cc
@@ -1444,10 +1444,6 @@ int Assembler::branch_offset(Label* L) {
L->link_to(pc_offset());
}
- // Block the emission of the constant pool, since the branch instruction must
- // be emitted at the pc offset recorded by the label.
- if (!is_const_pool_blocked()) BlockConstPoolFor(1);
-
return target_pos - (pc_offset() + Instruction::kPcLoadDelta);
}
@@ -1458,6 +1454,11 @@ void Assembler::b(int branch_offset, Condition cond, RelocInfo::Mode rmode) {
int imm24 = branch_offset >> 2;
const bool b_imm_check = is_int24(imm24);
CHECK(b_imm_check);
+
+ // Block the emission of the constant pool before the next instruction.
+ // Otherwise the passed-in branch offset would be off.
+ BlockConstPoolFor(1);
+
emit(cond | B27 | B25 | (imm24 & kImm24Mask));
if (cond == al) {
@@ -1472,6 +1473,11 @@ void Assembler::bl(int branch_offset, Condition cond, RelocInfo::Mode rmode) {
int imm24 = branch_offset >> 2;
const bool bl_imm_check = is_int24(imm24);
CHECK(bl_imm_check);
+
+ // Block the emission of the constant pool before the next instruction.
+ // Otherwise the passed-in branch offset would be off.
+ BlockConstPoolFor(1);
+
emit(cond | B27 | B25 | B24 | (imm24 & kImm24Mask));
}
@@ -1481,6 +1487,11 @@ void Assembler::blx(int branch_offset) {
int imm24 = branch_offset >> 2;
const bool blx_imm_check = is_int24(imm24);
CHECK(blx_imm_check);
+
+ // Block the emission of the constant pool before the next instruction.
+ // Otherwise the passed-in branch offset would be off.
+ BlockConstPoolFor(1);
+
emit(kSpecialCondition | B27 | B25 | h | (imm24 & kImm24Mask));
}
diff --git a/chromium/v8/src/compiler/backend/x64/code-generator-x64.cc b/chromium/v8/src/compiler/backend/x64/code-generator-x64.cc
index 6a29cb308e5..e3f759f5700 100644
--- a/chromium/v8/src/compiler/backend/x64/code-generator-x64.cc
+++ b/chromium/v8/src/compiler/backend/x64/code-generator-x64.cc
@@ -5295,7 +5295,22 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
case MoveType::kStackToRegister: {
Operand src = g.ToOperand(source);
if (source->IsStackSlot()) {
- __ movq(g.ToRegister(destination), src);
+ MachineRepresentation mr =
+ LocationOperand::cast(source)->representation();
+ const bool is_32_bit = mr == MachineRepresentation::kWord32 ||
+ mr == MachineRepresentation::kCompressed ||
+ mr == MachineRepresentation::kCompressedPointer;
+ // TODO(13581): Fix this for other code kinds (see
+ // https://crbug.com/1356461).
+ if (code_kind() == CodeKind::WASM_FUNCTION && is_32_bit) {
+ // When we need only 32 bits, move only 32 bits. Benefits:
+ // - Save a byte here and there (depending on the destination
+ // register; "movl eax, ..." is smaller than "movq rax, ...").
+ // - Safeguard against accidental decompression of compressed slots.
+ __ movl(g.ToRegister(destination), src);
+ } else {
+ __ movq(g.ToRegister(destination), src);
+ }
} else {
DCHECK(source->IsFPStackSlot());
XMMRegister dst = g.ToDoubleRegister(destination);
diff --git a/chromium/v8/src/sandbox/external-pointer-table-inl.h b/chromium/v8/src/sandbox/external-pointer-table-inl.h
index 1e4ff34e614..9295ddd3a3f 100644
--- a/chromium/v8/src/sandbox/external-pointer-table-inl.h
+++ b/chromium/v8/src/sandbox/external-pointer-table-inl.h
@@ -6,6 +6,7 @@
#define V8_SANDBOX_EXTERNAL_POINTER_TABLE_INL_H_
#include "src/base/atomicops.h"
+#include "src/common/assert-scope.h"
#include "src/sandbox/external-pointer-table.h"
#include "src/sandbox/external-pointer.h"
#include "src/utils/allocation.h"
@@ -75,6 +76,13 @@ ExternalPointerHandle ExternalPointerTable::AllocateAndInitializeEntry(
Isolate* isolate, Address initial_value, ExternalPointerTag tag) {
DCHECK(is_initialized());
+ // We currently don't want entry allocation to trigger garbage collection as
+ // this may cause seemingly harmless pointer field assignments to trigger
+ // garbage collection. This is especially true for lazily-initialized
+ // external pointer slots which will typically only allocate the external
+ // pointer table entry when the pointer is first set to a non-null value.
+ DisallowGarbageCollection no_gc;
+
Freelist freelist;
bool success = false;
while (!success) {
diff --git a/chromium/v8/src/sandbox/external-pointer-table.cc b/chromium/v8/src/sandbox/external-pointer-table.cc
index 95d8819dc5d..6a3d8aeb196 100644
--- a/chromium/v8/src/sandbox/external-pointer-table.cc
+++ b/chromium/v8/src/sandbox/external-pointer-table.cc
@@ -315,18 +315,6 @@ ExternalPointerTable::Freelist ExternalPointerTable::Grow(Isolate* isolate) {
set_capacity(new_capacity);
- // Schedule GC when the table's utilization crosses one of these thresholds.
- constexpr double kGCThresholds[] = {0.5, 0.75, 0.9, 0.95, 0.99};
- constexpr double kMaxCapacity = static_cast<double>(kMaxExternalPointers);
- double old_utilization = static_cast<double>(old_capacity) / kMaxCapacity;
- double new_utilization = static_cast<double>(new_capacity) / kMaxCapacity;
- for (double threshold : kGCThresholds) {
- if (old_utilization < threshold && new_utilization >= threshold) {
- isolate->heap()->ReportExternalMemoryPressure();
- break;
- }
- }
-
// Build freelist bottom to top, which might be more cache friendly.
uint32_t start = std::max<uint32_t>(old_capacity, 1); // Skip entry zero
uint32_t last = new_capacity - 1;
diff --git a/chromium/v8/src/wasm/graph-builder-interface.cc b/chromium/v8/src/wasm/graph-builder-interface.cc
index fb1f19c2b13..abf68f80dd7 100644
--- a/chromium/v8/src/wasm/graph-builder-interface.cc
+++ b/chromium/v8/src/wasm/graph-builder-interface.cc
@@ -2106,7 +2106,7 @@ class WasmGraphBuildingInterface {
}
if (exception_value != nullptr) {
*exception_value = builder_->LoopExitValue(
- *exception_value, MachineRepresentation::kWord32);
+ *exception_value, MachineRepresentation::kTaggedPointer);
}
if (wrap_exit_values) {
WrapLocalsAtLoopExit(decoder, control);