summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/jit
diff options
context:
space:
mode:
authorSimon Hausmann <simon.hausmann@digia.com>2012-11-07 11:22:47 +0100
committerSimon Hausmann <simon.hausmann@digia.com>2012-11-07 11:22:47 +0100
commitcfd86b747d32ac22246a1aa908eaa720c63a88c1 (patch)
tree24d68c6f61c464ecba1e05670b80390ea3b0e50c /Source/JavaScriptCore/jit
parent69d7c744c9de19d152dbe2d8e46eb7dfd4511d1a (diff)
downloadqtwebkit-cfd86b747d32ac22246a1aa908eaa720c63a88c1.tar.gz
Imported WebKit commit 20271caf2e2c016d5cef40184cddeefeac4f1876 (http://svn.webkit.org/repository/webkit/trunk@133733)
New snapshot that contains all previous fixes as well as build fix for latest QtMultimedia API changes.
Diffstat (limited to 'Source/JavaScriptCore/jit')
-rw-r--r--Source/JavaScriptCore/jit/JIT.cpp21
-rw-r--r--Source/JavaScriptCore/jit/JIT.h52
-rw-r--r--Source/JavaScriptCore/jit/JITDriver.h3
-rw-r--r--Source/JavaScriptCore/jit/JITOpcodes.cpp5
-rw-r--r--Source/JavaScriptCore/jit/JITOpcodes32_64.cpp5
-rw-r--r--Source/JavaScriptCore/jit/JITPropertyAccess.cpp155
-rw-r--r--Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp101
-rw-r--r--Source/JavaScriptCore/jit/JITStubs.cpp201
-rw-r--r--Source/JavaScriptCore/jit/JITStubs.h4
9 files changed, 115 insertions, 432 deletions
diff --git a/Source/JavaScriptCore/jit/JIT.cpp b/Source/JavaScriptCore/jit/JIT.cpp
index 2d2991b5f..3102c7693 100644
--- a/Source/JavaScriptCore/jit/JIT.cpp
+++ b/Source/JavaScriptCore/jit/JIT.cpp
@@ -301,7 +301,6 @@ void JIT::privateCompileMainPass()
DEFINE_OP(op_loop_if_true)
DEFINE_OP(op_loop_if_false)
DEFINE_OP(op_lshift)
- DEFINE_OP(op_method_check)
DEFINE_OP(op_mod)
DEFINE_OP(op_mov)
DEFINE_OP(op_mul)
@@ -336,6 +335,8 @@ void JIT::privateCompileMainPass()
DEFINE_OP(op_put_by_index)
DEFINE_OP(op_put_by_val)
DEFINE_OP(op_put_getter_setter)
+ case op_init_global_const_nop:
+ NEXT_OPCODE(op_init_global_const_nop);
DEFINE_OP(op_init_global_const)
DEFINE_OP(op_init_global_const_check)
@@ -372,7 +373,7 @@ void JIT::privateCompileMainPass()
DEFINE_OP(op_tear_off_activation)
DEFINE_OP(op_tear_off_arguments)
DEFINE_OP(op_throw)
- DEFINE_OP(op_throw_reference_error)
+ DEFINE_OP(op_throw_static_error)
DEFINE_OP(op_to_jsnumber)
DEFINE_OP(op_to_primitive)
@@ -489,7 +490,6 @@ void JIT::privateCompileSlowCases()
DEFINE_SLOWCASE_OP(op_loop_if_true)
DEFINE_SLOWCASE_OP(op_loop_if_false)
DEFINE_SLOWCASE_OP(op_lshift)
- DEFINE_SLOWCASE_OP(op_method_check)
DEFINE_SLOWCASE_OP(op_mod)
DEFINE_SLOWCASE_OP(op_mul)
DEFINE_SLOWCASE_OP(op_negate)
@@ -569,13 +569,6 @@ ALWAYS_INLINE void PropertyStubCompilationInfo::copyToStubInfo(StructureStubInfo
info.hotPathBegin = linkBuffer.locationOf(hotPathBegin);
switch (m_type) {
- case MethodCheck: {
- CodeLocationDataLabelPtr structureToCompareLocation = linkBuffer.locationOf(methodCheckStructureToCompare);
- info.patch.baseline.methodCheckProtoObj = MacroAssembler::differenceBetweenCodePtr(structureToCompareLocation, linkBuffer.locationOf(methodCheckProtoObj));
- info.patch.baseline.methodCheckProtoStructureToCompare = MacroAssembler::differenceBetweenCodePtr(structureToCompareLocation, linkBuffer.locationOf(methodCheckProtoStructureToCompare));
- info.patch.baseline.methodCheckPutFunction = MacroAssembler::differenceBetweenCodePtr(structureToCompareLocation, linkBuffer.locationOf(methodCheckPutFunction));
- // No break - fall through to GetById.
- }
case GetById: {
CodeLocationLabel hotPathBeginLocation = linkBuffer.locationOf(hotPathBegin);
info.patch.baseline.u.get.structureToCompare = MacroAssembler::differenceBetweenCodePtr(hotPathBeginLocation, linkBuffer.locationOf(getStructureToCompare));
@@ -792,14 +785,6 @@ JITCode JIT::privateCompile(CodePtr* functionEntryArityCheck, JITCompilationEffo
info.hotPathBegin = patchBuffer.locationOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
info.hotPathOther = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].hotPathOther);
}
- unsigned methodCallCount = m_methodCallCompilationInfo.size();
- m_codeBlock->addMethodCallLinkInfos(methodCallCount);
- for (unsigned i = 0; i < methodCallCount; ++i) {
- MethodCallLinkInfo& info = m_codeBlock->methodCallLinkInfo(i);
- info.bytecodeIndex = m_methodCallCompilationInfo[i].bytecodeIndex;
- info.cachedStructure.setLocation(patchBuffer.locationOf(m_methodCallCompilationInfo[i].structureToCompare));
- info.callReturnLocation = m_codeBlock->structureStubInfo(m_methodCallCompilationInfo[i].propertyAccessIndex).callReturnLocation;
- }
#if ENABLE(DFG_JIT) || ENABLE(LLINT)
if (canBeOptimized()
diff --git a/Source/JavaScriptCore/jit/JIT.h b/Source/JavaScriptCore/jit/JIT.h
index cd832c21f..dcf87d352 100644
--- a/Source/JavaScriptCore/jit/JIT.h
+++ b/Source/JavaScriptCore/jit/JIT.h
@@ -150,7 +150,7 @@ namespace JSC {
enum PropertyStubPutById_T { PropertyStubPutById };
struct PropertyStubCompilationInfo {
- enum Type { GetById, PutById, MethodCheck } m_type;
+ enum Type { GetById, PutById } m_type;
unsigned bytecodeIndex;
MacroAssembler::Call callReturnLocation;
@@ -173,10 +173,6 @@ namespace JSC {
MacroAssembler::DataLabel32 putDisplacementLabel1;
MacroAssembler::DataLabel32 putDisplacementLabel2;
#endif
- MacroAssembler::DataLabelPtr methodCheckStructureToCompare;
- MacroAssembler::DataLabelPtr methodCheckProtoObj;
- MacroAssembler::DataLabelPtr methodCheckProtoStructureToCompare;
- MacroAssembler::DataLabelPtr methodCheckPutFunction;
#if !ASSERT_DISABLED
PropertyStubCompilationInfo()
@@ -241,7 +237,7 @@ namespace JSC {
void slowCaseInfo(PropertyStubGetById_T, MacroAssembler::Label coldPathBegin, MacroAssembler::Call call)
{
- ASSERT(m_type == GetById || m_type == MethodCheck);
+ ASSERT(m_type == GetById);
callReturnLocation = call;
getColdPathBegin = coldPathBegin;
}
@@ -252,15 +248,6 @@ namespace JSC {
callReturnLocation = call;
}
- void addMethodCheckInfo(MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::DataLabelPtr protoObj, MacroAssembler::DataLabelPtr protoStructureToCompare, MacroAssembler::DataLabelPtr putFunction)
- {
- m_type = MethodCheck;
- methodCheckStructureToCompare = structureToCompare;
- methodCheckProtoObj = protoObj;
- methodCheckProtoStructureToCompare = protoStructureToCompare;
- methodCheckPutFunction = putFunction;
- }
-
void copyToStubInfo(StructureStubInfo& info, LinkBuffer &patchBuffer);
};
@@ -291,18 +278,6 @@ namespace JSC {
unsigned bytecodeIndex;
};
- struct MethodCallCompilationInfo {
- MethodCallCompilationInfo(unsigned bytecodeIndex, unsigned propertyAccessIndex)
- : bytecodeIndex(bytecodeIndex)
- , propertyAccessIndex(propertyAccessIndex)
- {
- }
-
- unsigned bytecodeIndex;
- MacroAssembler::DataLabelPtr structureToCompare;
- unsigned propertyAccessIndex;
- };
-
// Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
@@ -407,7 +382,6 @@ namespace JSC {
static void resetPatchPutById(RepatchBuffer&, StructureStubInfo*);
static void patchGetByIdSelf(CodeBlock*, StructureStubInfo*, Structure*, PropertyOffset cachedOffset, ReturnAddressPtr);
static void patchPutByIdReplace(CodeBlock*, StructureStubInfo*, Structure*, PropertyOffset cachedOffset, ReturnAddressPtr, bool direct);
- static void patchMethodCallProto(JSGlobalData&, CodeBlock*, MethodCallLinkInfo&, StructureStubInfo&, JSObject*, Structure*, JSObject*, ReturnAddressPtr);
static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
{
@@ -428,6 +402,7 @@ namespace JSC {
void privateCompileLinkPass();
void privateCompileSlowCases();
JITCode privateCompile(CodePtr* functionEntryArityCheck, JITCompilationEffort);
+
void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, PropertyOffset cachedOffset, ReturnAddressPtr, CallFrame*);
void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, PropertyOffset cachedOffset);
void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, PropertyOffset cachedOffset, CallFrame*);
@@ -466,7 +441,9 @@ namespace JSC {
Jump emitJumpIfNotObject(RegisterID structureReg);
Jump emitJumpIfNotType(RegisterID baseReg, RegisterID scratchReg, JSType);
- void testPrototype(JSValue, JumpList& failureCases);
+ Jump addStructureTransitionCheck(JSCell*, Structure*, StructureStubInfo*, RegisterID scratch);
+ void addStructureTransitionCheck(JSCell*, Structure*, StructureStubInfo*, JumpList& failureCases, RegisterID scratch);
+ void testPrototype(JSValue, JumpList& failureCases, StructureStubInfo*);
enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterImmediates };
// value register in write barrier is used before any scratch registers
@@ -543,8 +520,8 @@ namespace JSC {
void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex);
void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag);
- void compileGetByIdHotPath();
- void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
+ void compileGetByIdHotPath(Identifier*);
+ void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier*, Vector<SlowCaseEntry>::iterator&);
void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset);
void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset);
void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode = MayBeFinal);
@@ -559,9 +536,6 @@ namespace JSC {
// sequenceOpCall
static const int sequenceOpCallInstructionSpace = 12;
static const int sequenceOpCallConstantSpace = 2;
- // sequenceMethodCheck
- static const int sequenceMethodCheckInstructionSpace = 40;
- static const int sequenceMethodCheckConstantSpace = 6;
// sequenceGetByIdHotPath
static const int sequenceGetByIdHotPathInstructionSpace = 36;
static const int sequenceGetByIdHotPathConstantSpace = 4;
@@ -575,9 +549,6 @@ namespace JSC {
// sequenceOpCall
static const int sequenceOpCallInstructionSpace = 12;
static const int sequenceOpCallConstantSpace = 2;
- // sequenceMethodCheck
- static const int sequenceMethodCheckInstructionSpace = 40;
- static const int sequenceMethodCheckConstantSpace = 6;
// sequenceGetByIdHotPath
static const int sequenceGetByIdHotPathInstructionSpace = 36;
static const int sequenceGetByIdHotPathConstantSpace = 5;
@@ -626,7 +597,7 @@ namespace JSC {
void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
void compileGetByIdHotPath(int baseVReg, Identifier*);
- void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
+ void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier*, Vector<SlowCaseEntry>::iterator&);
void compileGetDirectOffset(RegisterID base, RegisterID result, PropertyOffset cachedOffset);
void compileGetDirectOffset(JSObject* base, RegisterID result, PropertyOffset cachedOffset);
void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch, FinalObjectMode = MayBeFinal);
@@ -708,7 +679,6 @@ namespace JSC {
void emit_op_loop_if_true(Instruction*);
void emit_op_loop_if_false(Instruction*);
void emit_op_lshift(Instruction*);
- void emit_op_method_check(Instruction*);
void emit_op_mod(Instruction*);
void emit_op_mov(Instruction*);
void emit_op_mul(Instruction*);
@@ -761,7 +731,7 @@ namespace JSC {
void emit_op_tear_off_activation(Instruction*);
void emit_op_tear_off_arguments(Instruction*);
void emit_op_throw(Instruction*);
- void emit_op_throw_reference_error(Instruction*);
+ void emit_op_throw_static_error(Instruction*);
void emit_op_to_jsnumber(Instruction*);
void emit_op_to_primitive(Instruction*);
void emit_op_unexpected_load(Instruction*);
@@ -803,7 +773,6 @@ namespace JSC {
void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_loop_if_false(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
- void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
@@ -933,7 +902,6 @@ namespace JSC {
Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
Vector<ByValCompilationInfo> m_byValCompilationInfo;
Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
- Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo;
Vector<JumpTable> m_jmpTable;
unsigned m_bytecodeOffset;
diff --git a/Source/JavaScriptCore/jit/JITDriver.h b/Source/JavaScriptCore/jit/JITDriver.h
index 318b4e7aa..645c65b28 100644
--- a/Source/JavaScriptCore/jit/JITDriver.h
+++ b/Source/JavaScriptCore/jit/JITDriver.h
@@ -75,7 +75,7 @@ inline bool jitCompileIfAppropriate(ExecState* exec, OwnPtr<CodeBlockType>& code
return true;
}
-inline bool jitCompileFunctionIfAppropriate(ExecState* exec, OwnPtr<FunctionCodeBlock>& codeBlock, JITCode& jitCode, MacroAssemblerCodePtr& jitCodeWithArityCheck, WriteBarrier<SharedSymbolTable>& symbolTable, JITCode::JITType jitType, unsigned bytecodeIndex, JITCompilationEffort effort)
+inline bool jitCompileFunctionIfAppropriate(ExecState* exec, OwnPtr<FunctionCodeBlock>& codeBlock, JITCode& jitCode, MacroAssemblerCodePtr& jitCodeWithArityCheck, JITCode::JITType jitType, unsigned bytecodeIndex, JITCompilationEffort effort)
{
JSGlobalData& globalData = exec->globalData();
@@ -99,7 +99,6 @@ inline bool jitCompileFunctionIfAppropriate(ExecState* exec, OwnPtr<FunctionCode
} else {
if (codeBlock->alternative()) {
codeBlock = static_pointer_cast<FunctionCodeBlock>(codeBlock->releaseAlternative());
- symbolTable.set(exec->globalData(), codeBlock->ownerExecutable(), codeBlock->symbolTable());
jitCode = oldJITCode;
jitCodeWithArityCheck = oldJITCodeWithArityCheck;
return false;
diff --git a/Source/JavaScriptCore/jit/JITOpcodes.cpp b/Source/JavaScriptCore/jit/JITOpcodes.cpp
index 07c8ace2a..249dcbac9 100644
--- a/Source/JavaScriptCore/jit/JITOpcodes.cpp
+++ b/Source/JavaScriptCore/jit/JITOpcodes.cpp
@@ -1069,13 +1069,14 @@ void JIT::emit_op_switch_string(Instruction* currentInstruction)
jump(regT0);
}
-void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
+void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
{
- JITStubCall stubCall(this, cti_op_throw_reference_error);
+ JITStubCall stubCall(this, cti_op_throw_static_error);
if (!m_codeBlock->getConstant(currentInstruction[1].u.operand).isNumber())
stubCall.addArgument(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
else
stubCall.addArgument(Imm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
+ stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
stubCall.call();
}
diff --git a/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp b/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
index 44123be19..9c5d260ab 100644
--- a/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
+++ b/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
@@ -1401,12 +1401,13 @@ void JIT::emit_op_switch_string(Instruction* currentInstruction)
jump(regT0);
}
-void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
+void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
{
unsigned message = currentInstruction[1].u.operand;
- JITStubCall stubCall(this, cti_op_throw_reference_error);
+ JITStubCall stubCall(this, cti_op_throw_static_error);
stubCall.addArgument(m_codeBlock->getConstant(message));
+ stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
stubCall.call();
}
diff --git a/Source/JavaScriptCore/jit/JITPropertyAccess.cpp b/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
index b7be821f6..6362598f4 100644
--- a/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
+++ b/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
@@ -440,71 +440,6 @@ void JIT::emit_op_del_by_id(Instruction* currentInstruction)
stubCall.call(currentInstruction[1].u.operand);
}
-void JIT::emit_op_method_check(Instruction* currentInstruction)
-{
- // Assert that the following instruction is a get_by_id.
- ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id
- || m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id_out_of_line);
-
- currentInstruction += OPCODE_LENGTH(op_method_check);
- unsigned resultVReg = currentInstruction[1].u.operand;
- unsigned baseVReg = currentInstruction[2].u.operand;
- Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
-
- emitGetVirtualRegister(baseVReg, regT0);
-
- // Do the method check - check the object & its prototype's structure inline (this is the common case).
- m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size()));
- MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
-
- Jump notCell = emitJumpIfNotJSCell(regT0);
-
- BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
-
- Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
- DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT1);
- Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT1, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
-
- // This will be relinked to load the function without doing a load.
- DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
-
- END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
-
- Jump match = jump();
-
- // Link the failure cases here.
- notCell.link(this);
- structureCheck.link(this);
- protoStructureCheck.link(this);
-
- // Do a regular(ish) get_by_id (the slow case will be link to
- // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
- compileGetByIdHotPath(baseVReg, ident);
-
- match.link(this);
- emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
- emitPutVirtualRegister(resultVReg);
-
- // We've already generated the following get_by_id, so make sure it's skipped over.
- m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
-
- m_propertyAccessCompilationInfo.last().addMethodCheckInfo(info.structureToCompare, protoObj, protoStructureToCompare, putFunction);
-}
-
-void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- currentInstruction += OPCODE_LENGTH(op_method_check);
- unsigned resultVReg = currentInstruction[1].u.operand;
- unsigned baseVReg = currentInstruction[2].u.operand;
- Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
-
- compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, true);
- emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
-
- // We've already generated the following get_by_id, so make sure it's skipped over.
- m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
-}
-
void JIT::emit_op_get_by_id(Instruction* currentInstruction)
{
unsigned resultVReg = currentInstruction[1].u.operand;
@@ -517,7 +452,7 @@ void JIT::emit_op_get_by_id(Instruction* currentInstruction)
emitPutVirtualRegister(resultVReg);
}
-void JIT::compileGetByIdHotPath(int baseVReg, Identifier*)
+void JIT::compileGetByIdHotPath(int baseVReg, Identifier* ident)
{
// As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
// Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
@@ -525,6 +460,11 @@ void JIT::compileGetByIdHotPath(int baseVReg, Identifier*)
// to jump back to if one of these trampolies finds a match.
emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
+
+ if (*ident == m_globalData->propertyNames->length && canBeOptimized()) {
+ loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
+ emitArrayProfilingSiteForBytecodeIndex(regT1, regT2, m_bytecodeOffset);
+ }
BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
@@ -550,11 +490,11 @@ void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCase
unsigned baseVReg = currentInstruction[2].u.operand;
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
- compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter, false);
+ compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter);
emitValueProfilingSite();
}
-void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
+void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter)
{
// As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
// so that we only need track one pointer into the slow case code - we track a pointer to the location
@@ -568,7 +508,7 @@ void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident
BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
Label coldPathBegin(this);
- JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
+ JITStubCall stubCall(this, cti_op_get_by_id);
stubCall.addArgument(regT0);
stubCall.addArgument(TrustedImmPtr(ident));
Call call = stubCall.call(resultVReg);
@@ -676,7 +616,7 @@ void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
failureCases.append(emitJumpIfNotJSCell(regT0));
failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
- testPrototype(oldStructure->storedPrototype(), failureCases);
+ testPrototype(oldStructure->storedPrototype(), failureCases, stubInfo);
ASSERT(oldStructure->storedPrototype().isNull() || oldStructure->storedPrototype().asCell()->structure() == chain->head()->get());
@@ -684,7 +624,7 @@ void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
if (!direct) {
for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
ASSERT((*it)->storedPrototype().isNull() || (*it)->storedPrototype().asCell()->structure() == it[1].get());
- testPrototype((*it)->storedPrototype(), failureCases);
+ testPrototype((*it)->storedPrototype(), failureCases, stubInfo);
}
}
@@ -788,7 +728,6 @@ void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
// Check eax is an array
loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
- emitArrayProfilingSiteForBytecodeIndex(regT2, regT1, stubInfo->bytecodeIndex);
Jump failureCases1 = branchTest32(Zero, regT2, TrustedImm32(IsArray));
Jump failureCases2 = branchTest32(Zero, regT2, TrustedImm32(IndexingShapeMask));
@@ -837,8 +776,7 @@ void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
Jump failureCases1 = checkStructure(regT0, structure);
// Check the prototype object's Structure had not changed.
- move(TrustedImmPtr(protoObject), regT3);
- Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
+ Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
bool needsStubLink = false;
@@ -867,7 +805,8 @@ void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
patchBuffer.link(failureCases1, slowCaseBegin);
- patchBuffer.link(failureCases2, slowCaseBegin);
+ if (failureCases2.isSet())
+ patchBuffer.link(failureCases2, slowCaseBegin);
// On success return back to the hot patch code, at a point it will perform the store to dest for us.
patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
@@ -972,8 +911,7 @@ void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Polymorphi
Jump failureCases1 = checkStructure(regT0, structure);
// Check the prototype object's Structure had not changed.
- move(TrustedImmPtr(protoObject), regT3);
- Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
+ Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
// Checks out okay!
bool needsStubLink = false;
@@ -1013,7 +951,8 @@ void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Polymorphi
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
patchBuffer.link(failureCases1, lastProtoBegin);
- patchBuffer.link(failureCases2, lastProtoBegin);
+ if (failureCases2.isSet())
+ patchBuffer.link(failureCases2, lastProtoBegin);
// On success return back to the hot patch code, at a point it will perform the store to dest for us.
patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
@@ -1050,7 +989,7 @@ void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Polymorphi
for (unsigned i = 0; i < count; ++i, ++it) {
protoObject = asObject(currStructure->prototypeForLookup(callFrame));
currStructure = it->get();
- testPrototype(protoObject, bucketsOfFail);
+ testPrototype(protoObject, bucketsOfFail, stubInfo);
}
ASSERT(protoObject);
@@ -1129,7 +1068,7 @@ void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
for (unsigned i = 0; i < count; ++i, ++it) {
protoObject = asObject(currStructure->prototypeForLookup(callFrame));
currStructure = it->get();
- testPrototype(protoObject, bucketsOfFail);
+ testPrototype(protoObject, bucketsOfFail, stubInfo);
}
ASSERT(protoObject);
@@ -1304,28 +1243,40 @@ void JIT::emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch,
#endif
}
-void JIT::testPrototype(JSValue prototype, JumpList& failureCases)
+JIT::Jump JIT::addStructureTransitionCheck(JSCell* object, Structure* structure, StructureStubInfo* stubInfo, RegisterID scratch)
{
- if (prototype.isNull())
- return;
-
- ASSERT(prototype.isCell());
- move(TrustedImmPtr(prototype.asCell()), regT3);
- failureCases.append(branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototype.asCell()->structure())));
+ if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
+ structure->addTransitionWatchpoint(stubInfo->addWatchpoint(m_codeBlock));
+#if !ASSERT_DISABLED
+ move(TrustedImmPtr(object), scratch);
+ Jump ok = branchPtr(Equal, Address(scratch, JSCell::structureOffset()), TrustedImmPtr(structure));
+ breakpoint();
+ ok.link(this);
+#endif
+ Jump result; // Returning an unset jump this way because otherwise VC++ would complain.
+ return result;
+ }
+
+ move(TrustedImmPtr(object), scratch);
+ return branchPtr(NotEqual, Address(scratch, JSCell::structureOffset()), TrustedImmPtr(structure));
}
-void JIT::patchMethodCallProto(JSGlobalData& globalData, CodeBlock* codeBlock, MethodCallLinkInfo& methodCallLinkInfo, StructureStubInfo& stubInfo, JSObject* callee, Structure* structure, JSObject* proto, ReturnAddressPtr returnAddress)
+void JIT::addStructureTransitionCheck(JSCell* object, Structure* structure, StructureStubInfo* stubInfo, JumpList& failureCases, RegisterID scratch)
{
- RepatchBuffer repatchBuffer(codeBlock);
-
- CodeLocationDataLabelPtr structureLocation = methodCallLinkInfo.cachedStructure.location();
- methodCallLinkInfo.cachedStructure.set(globalData, structureLocation, codeBlock->ownerExecutable(), structure);
+ Jump failureCase = addStructureTransitionCheck(object, structure, stubInfo, scratch);
+ if (!failureCase.isSet())
+ return;
- Structure* prototypeStructure = proto->structure();
- methodCallLinkInfo.cachedPrototypeStructure.set(globalData, structureLocation.dataLabelPtrAtOffset(stubInfo.patch.baseline.methodCheckProtoStructureToCompare), codeBlock->ownerExecutable(), prototypeStructure);
- methodCallLinkInfo.cachedPrototype.set(globalData, structureLocation.dataLabelPtrAtOffset(stubInfo.patch.baseline.methodCheckProtoObj), codeBlock->ownerExecutable(), proto);
- methodCallLinkInfo.cachedFunction.set(globalData, structureLocation.dataLabelPtrAtOffset(stubInfo.patch.baseline.methodCheckPutFunction), codeBlock->ownerExecutable(), callee);
- repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_method_check_update));
+ failureCases.append(failureCase);
+}
+
+void JIT::testPrototype(JSValue prototype, JumpList& failureCases, StructureStubInfo* stubInfo)
+{
+ if (prototype.isNull())
+ return;
+
+ ASSERT(prototype.isCell());
+ addStructureTransitionCheck(prototype.asCell(), prototype.asCell()->structure(), stubInfo, failureCases, regT3);
}
bool JIT::isDirectPutById(StructureStubInfo* stubInfo)
@@ -1377,9 +1328,11 @@ void JIT::privateCompileGetByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAd
slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_globalData->int32ArrayDescriptor(), 4, SignedTypedArray);
break;
case JITUint8Array:
- case JITUint8ClampedArray:
slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_globalData->uint8ArrayDescriptor(), 1, UnsignedTypedArray);
break;
+ case JITUint8ClampedArray:
+ slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_globalData->uint8ClampedArrayDescriptor(), 1, UnsignedTypedArray);
+ break;
case JITUint16Array:
slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_globalData->uint16ArrayDescriptor(), 2, UnsignedTypedArray);
break;
@@ -1400,8 +1353,8 @@ void JIT::privateCompileGetByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAd
LinkBuffer patchBuffer(*m_globalData, this, m_codeBlock);
- patchBuffer.link(badType, CodeLocationLabel(returnAddress.value()).labelAtOffset(byValInfo->returnAddressToSlowPath));
- patchBuffer.link(slowCases, CodeLocationLabel(returnAddress.value()).labelAtOffset(byValInfo->returnAddressToSlowPath));
+ patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+ patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
@@ -1580,7 +1533,7 @@ JIT::JumpList JIT::emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badT
case 8: {
loadDouble(BaseIndex(base, property, TimesEight), fpRegT0);
Jump notNaN = branchDouble(DoubleEqual, fpRegT0, fpRegT0);
- static const double NaN = std::numeric_limits<double>::quiet_NaN();
+ static const double NaN = QNaN;
loadDouble(&NaN, fpRegT0);
notNaN.link(this);
break;
diff --git a/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp b/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
index 5d619b94b..939766f04 100644
--- a/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
+++ b/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
@@ -91,72 +91,6 @@ void JIT::emit_op_del_by_id(Instruction* currentInstruction)
stubCall.call(dst);
}
-void JIT::emit_op_method_check(Instruction* currentInstruction)
-{
- // Assert that the following instruction is a get_by_id.
- ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id
- || m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id_out_of_line);
-
- currentInstruction += OPCODE_LENGTH(op_method_check);
-
- // Do the method check - check the object & its prototype's structure inline (this is the common case).
- m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_bytecodeOffset, m_propertyAccessCompilationInfo.size()));
- MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
-
- int dst = currentInstruction[1].u.operand;
- int base = currentInstruction[2].u.operand;
-
- emitLoad(base, regT1, regT0);
- emitJumpSlowCaseIfNotJSCell(base, regT1);
-
- BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
-
- Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), info.structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
- DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(TrustedImmPtr(0), regT2);
- Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, JSCell::structureOffset()), protoStructureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
-
- // This will be relinked to load the function without doing a load.
- DataLabelPtr putFunction = moveWithPatch(TrustedImmPtr(0), regT0);
-
- END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
-
- move(TrustedImm32(JSValue::CellTag), regT1);
- Jump match = jump();
-
- // Link the failure cases here.
- structureCheck.link(this);
- protoStructureCheck.link(this);
-
- // Do a regular(ish) get_by_id (the slow case will be link to
- // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
- compileGetByIdHotPath();
-
- match.link(this);
- emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
- emitStore(dst, regT1, regT0);
- map(m_bytecodeOffset + OPCODE_LENGTH(op_method_check) + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
-
- // We've already generated the following get_by_id, so make sure it's skipped over.
- m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
-
- m_propertyAccessCompilationInfo.last().addMethodCheckInfo(info.structureToCompare, protoObj, protoStructureToCompare, putFunction);
-}
-
-void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- currentInstruction += OPCODE_LENGTH(op_method_check);
-
- int dst = currentInstruction[1].u.operand;
- int base = currentInstruction[2].u.operand;
- int ident = currentInstruction[3].u.operand;
-
- compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter, true);
- emitValueProfilingSite(m_bytecodeOffset + OPCODE_LENGTH(op_method_check));
-
- // We've already generated the following get_by_id, so make sure it's skipped over.
- m_bytecodeOffset += OPCODE_LENGTH(op_get_by_id);
-}
-
JIT::CodeRef JIT::stringGetByValStubGenerator(JSGlobalData* globalData)
{
JSInterfaceJIT jit;
@@ -453,22 +387,28 @@ void JIT::emit_op_get_by_id(Instruction* currentInstruction)
{
int dst = currentInstruction[1].u.operand;
int base = currentInstruction[2].u.operand;
+ Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
emitLoad(base, regT1, regT0);
emitJumpSlowCaseIfNotJSCell(base, regT1);
- compileGetByIdHotPath();
+ compileGetByIdHotPath(ident);
emitValueProfilingSite();
emitStore(dst, regT1, regT0);
map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
}
-void JIT::compileGetByIdHotPath()
+void JIT::compileGetByIdHotPath(Identifier* ident)
{
// As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
// Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
// to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
// to jump back to if one of these trampolies finds a match.
+ if (*ident == m_globalData->propertyNames->length && canBeOptimized()) {
+ loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
+ emitArrayProfilingSiteForBytecodeIndex(regT2, regT3, m_bytecodeOffset);
+ }
+
BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
Label hotPathBegin(this);
@@ -498,7 +438,7 @@ void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCase
emitValueProfilingSite();
}
-void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
+void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter)
{
// As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
// so that we only need track one pointer into the slow case code - we track a pointer to the location
@@ -511,7 +451,7 @@ void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<Sl
BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
Label coldPathBegin(this);
- JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
+ JITStubCall stubCall(this, cti_op_get_by_id);
stubCall.addArgument(regT1, regT0);
stubCall.addArgument(TrustedImmPtr(ident));
Call call = stubCall.call(dst);
@@ -624,12 +564,12 @@ void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
JumpList failureCases;
failureCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
- testPrototype(oldStructure->storedPrototype(), failureCases);
+ testPrototype(oldStructure->storedPrototype(), failureCases, stubInfo);
if (!direct) {
// Verify that nothing in the prototype chain has a setter for this property.
for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
- testPrototype((*it)->storedPrototype(), failureCases);
+ testPrototype((*it)->storedPrototype(), failureCases, stubInfo);
}
// If we succeed in all of our checks, and the code was optimizable, then make sure we
@@ -751,7 +691,6 @@ void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
// Check for array
loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
- emitArrayProfilingSiteForBytecodeIndex(regT2, regT3, stubInfo->bytecodeIndex);
Jump failureCases1 = branchTest32(Zero, regT2, TrustedImm32(IsArray));
Jump failureCases2 = branchTest32(Zero, regT2, TrustedImm32(IndexingShapeMask));
@@ -802,8 +741,7 @@ void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
Jump failureCases1 = checkStructure(regT0, structure);
// Check the prototype object's Structure had not changed.
- move(TrustedImmPtr(protoObject), regT3);
- Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
+ Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
bool needsStubLink = false;
// Checks out okay!
@@ -833,7 +771,8 @@ void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
patchBuffer.link(failureCases1, slowCaseBegin);
- patchBuffer.link(failureCases2, slowCaseBegin);
+ if (failureCases2.isSet())
+ patchBuffer.link(failureCases2, slowCaseBegin);
// On success return back to the hot patch code, at a point it will perform the store to dest for us.
patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
@@ -942,8 +881,7 @@ void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Polymorphi
Jump failureCases1 = checkStructure(regT0, structure);
// Check the prototype object's Structure had not changed.
- move(TrustedImmPtr(protoObject), regT3);
- Jump failureCases2 = branchPtr(NotEqual, Address(regT3, JSCell::structureOffset()), TrustedImmPtr(prototypeStructure));
+ Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
bool needsStubLink = false;
bool isDirect = false;
@@ -980,7 +918,8 @@ void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Polymorphi
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
patchBuffer.link(failureCases1, lastProtoBegin);
- patchBuffer.link(failureCases2, lastProtoBegin);
+ if (failureCases2.isSet())
+ patchBuffer.link(failureCases2, lastProtoBegin);
// On success return back to the hot patch code, at a point it will perform the store to dest for us.
patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
@@ -1019,7 +958,7 @@ void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Polymorphi
for (unsigned i = 0; i < count; ++i, ++it) {
protoObject = asObject(currStructure->prototypeForLookup(callFrame));
currStructure = it->get();
- testPrototype(protoObject, bucketsOfFail);
+ testPrototype(protoObject, bucketsOfFail, stubInfo);
}
ASSERT(protoObject);
@@ -1098,7 +1037,7 @@ void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
for (unsigned i = 0; i < count; ++i, ++it) {
protoObject = asObject(currStructure->prototypeForLookup(callFrame));
currStructure = it->get();
- testPrototype(protoObject, bucketsOfFail);
+ testPrototype(protoObject, bucketsOfFail, stubInfo);
}
ASSERT(protoObject);
diff --git a/Source/JavaScriptCore/jit/JITStubs.cpp b/Source/JavaScriptCore/jit/JITStubs.cpp
index ba8c76cfb..5ddb98dee 100644
--- a/Source/JavaScriptCore/jit/JITStubs.cpp
+++ b/Source/JavaScriptCore/jit/JITStubs.cpp
@@ -864,7 +864,10 @@ NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* co
}
// put_by_id_transition checks the prototype chain for setters.
- normalizePrototypeChain(callFrame, baseCell);
+ if (normalizePrototypeChain(callFrame, baseCell) == InvalidPrototypeChain) {
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
+ return;
+ }
StructureChain* prototypeChain = structure->prototypeChain(callFrame);
ASSERT(structure->previousID()->transitionWatchpointSetHasBeenInvalidated());
@@ -937,7 +940,7 @@ NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* co
if (slot.slotBase() == structure->prototypeForLookup(callFrame)) {
ASSERT(slot.slotBase().isObject());
-
+
JSObject* slotBaseObject = asObject(slot.slotBase());
size_t offset = slot.cachedOffset();
@@ -958,7 +961,7 @@ NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* co
PropertyOffset offset = slot.cachedOffset();
size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset);
- if (!count) {
+ if (count == InvalidPrototypeChain) {
stubInfo->accessType = access_get_by_id_generic;
return;
}
@@ -1512,178 +1515,6 @@ DEFINE_STUB_FUNCTION(JSObject*, op_put_by_id_transition_realloc)
return base;
}
-DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_method_check)
-{
- STUB_INIT_STACK_FRAME(stackFrame);
-
- CallFrame* callFrame = stackFrame.callFrame;
- Identifier& ident = stackFrame.args[1].identifier();
-
- CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
- MethodCallLinkInfo& methodCallLinkInfo = codeBlock->getMethodCallLinkInfo(STUB_RETURN_ADDRESS);
- StructureStubInfo& stubInfo = codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
- AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
-
- JSValue baseValue = stackFrame.args[0].jsValue();
- PropertySlot slot(baseValue);
- JSValue result = baseValue.get(callFrame, ident, slot);
- CHECK_FOR_EXCEPTION();
-
- if (accessType != static_cast<AccessType>(stubInfo.accessType))
- return JSValue::encode(result);
-
- if (!methodCallLinkInfo.seenOnce()) {
- methodCallLinkInfo.setSeen();
- return JSValue::encode(result);
- }
-
- // If we successfully got something, then the base from which it is being accessed must
- // be an object. (Assertion to ensure asObject() call below is safe, which comes after
- // an isCacheable() chceck.
- ASSERT(!slot.isCacheableValue() || slot.slotBase().isObject());
-
- // Check that:
- // * We're dealing with a JSCell,
- // * the property is cachable,
- // * it's not a dictionary
- // * there is a function cached.
- Structure* structure;
- JSCell* specific;
- JSObject* slotBaseObject;
- if (baseValue.isCell()
- && slot.isCacheableValue()
- && !(structure = baseValue.asCell()->structure())->isUncacheableDictionary()
- && (slotBaseObject = asObject(slot.slotBase()))->getPropertySpecificValue(callFrame, ident, specific)
- && specific
- ) {
-
- JSObject* callee = asObject(specific);
-
- // Since we're accessing a prototype in a loop, it's a good bet that it
- // should not be treated as a dictionary.
- if (slotBaseObject->structure()->isDictionary())
- slotBaseObject->flattenDictionaryObject(callFrame->globalData());
-
- // The result fetched should always be the callee!
- ASSERT(result == JSValue(callee));
-
- // Check to see if the function is on the object's prototype. Patch up the code to optimize.
- if (slot.slotBase() == structure->prototypeForLookup(callFrame)) {
- JIT::patchMethodCallProto(callFrame->globalData(), codeBlock, methodCallLinkInfo, stubInfo, callee, structure, slotBaseObject, STUB_RETURN_ADDRESS);
- return JSValue::encode(result);
- }
-
- // Check to see if the function is on the object itself.
- // Since we generate the method-check to check both the structure and a prototype-structure (since this
- // is the common case) we have a problem - we need to patch the prototype structure check to do something
- // useful. We could try to nop it out altogether, but that's a little messy, so lets do something simpler
- // for now. For now it performs a check on a special object on the global object only used for this
- // purpose. The object is in no way exposed, and as such the check will always pass.
- if (slot.slotBase() == baseValue) {
- JIT::patchMethodCallProto(callFrame->globalData(), codeBlock, methodCallLinkInfo, stubInfo, callee, structure, callFrame->scope()->globalObject()->methodCallDummy(), STUB_RETURN_ADDRESS);
- return JSValue::encode(result);
- }
- }
-
- // Revert the get_by_id op back to being a regular get_by_id - allow it to cache like normal, if it needs to.
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id));
- return JSValue::encode(result);
-}
-
-DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_method_check_update)
-{
- STUB_INIT_STACK_FRAME(stackFrame);
-
- CallFrame* callFrame = stackFrame.callFrame;
- Identifier& ident = stackFrame.args[1].identifier();
-
- CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
- MethodCallLinkInfo& methodCallLinkInfo = codeBlock->getMethodCallLinkInfo(STUB_RETURN_ADDRESS);
- StructureStubInfo& stubInfo = codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
- AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
-
- JSValue baseValue = stackFrame.args[0].jsValue();
- PropertySlot slot(baseValue);
- JSValue result = baseValue.get(callFrame, ident, slot);
- CHECK_FOR_EXCEPTION();
-
- if (accessType != static_cast<AccessType>(stubInfo.accessType))
- return JSValue::encode(result);
-
- ASSERT(methodCallLinkInfo.seenOnce());
-
- // If we successfully got something, then the base from which it is being accessed must
- // be an object. (Assertion to ensure asObject() call below is safe, which comes after
- // an isCacheable() chceck.
- ASSERT(!slot.isCacheableValue() || slot.slotBase().isObject());
-
- // Check that:
- // * We're dealing with a JSCell,
- // * the property is cachable,
- // * it's not a dictionary
- // * there is a function cached.
- Structure* structure;
- JSCell* specific;
- JSObject* slotBaseObject;
- if (!(baseValue.isCell()
- && slot.isCacheableValue()
- && !(structure = baseValue.asCell()->structure())->isUncacheableDictionary()
- && (slotBaseObject = asObject(slot.slotBase()))->getPropertySpecificValue(callFrame, ident, specific)
- && specific
- )
- || (slot.slotBase() != structure->prototypeForLookup(callFrame)
- && slot.slotBase() != baseValue)) {
- // Revert the get_by_id op back to being a regular get_by_id - allow it to cache like normal, if it needs to.
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id));
- return JSValue::encode(result);
- }
-
- // Now check if the situation has changed sufficiently that we should bail out of
- // doing method_check optimizations entirely, or if it changed only slightly, in
- // which case we can just repatch.
-
- JSValue proto = structure->prototypeForLookup(callFrame);
-
- bool previousWasProto = methodCallLinkInfo.cachedPrototype.get() != codeBlock->globalObject()->methodCallDummy();
- bool currentIsProto = slot.slotBase() == proto;
-
- JSObject* callee = asObject(specific);
-
- if (previousWasProto != currentIsProto
- || !structure->transitivelyTransitionedFrom(methodCallLinkInfo.cachedStructure.get())
- || (previousWasProto && !slotBaseObject->structure()->transitivelyTransitionedFrom(methodCallLinkInfo.cachedPrototypeStructure.get()))
- || specific != methodCallLinkInfo.cachedFunction.get()) {
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id));
- return JSValue::encode(result);
- }
-
- // It makes sense to simply repatch the method_check.
-
- // Since we're accessing a prototype in a loop, it's a good bet that it
- // should not be treated as a dictionary.
- if (slotBaseObject->structure()->isDictionary())
- slotBaseObject->flattenDictionaryObject(callFrame->globalData());
-
- // The result fetched should always be the callee!
- ASSERT(result == JSValue(callee));
-
- // Check to see if the function is on the object's prototype. Patch up the code to optimize.
- if (slot.slotBase() == proto) {
- JIT::patchMethodCallProto(callFrame->globalData(), codeBlock, methodCallLinkInfo, stubInfo, callee, structure, slotBaseObject, STUB_RETURN_ADDRESS);
- return JSValue::encode(result);
- }
-
- ASSERT(slot.slotBase() == baseValue);
-
- // Since we generate the method-check to check both the structure and a prototype-structure (since this
- // is the common case) we have a problem - we need to patch the prototype structure check to do something
- // useful. We could try to nop it out altogether, but that's a little messy, so lets do something simpler
- // for now. For now it performs a check on a special object on the global object only used for this
- // purpose. The object is in no way exposed, and as such the check will always pass.
- JIT::patchMethodCallProto(callFrame->globalData(), codeBlock, methodCallLinkInfo, stubInfo, callee, structure, callFrame->scope()->globalObject()->methodCallDummy(), STUB_RETURN_ADDRESS);
- return JSValue::encode(result);
-}
-
DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id)
{
STUB_INIT_STACK_FRAME(stackFrame);
@@ -1873,7 +1704,13 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
}
- } else if (size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset)) {
+ } else {
+ size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset);
+ if (count == InvalidPrototypeChain) {
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
+ return JSValue::encode(result);
+ }
+
ASSERT(!baseValue.asCell()->structure()->isDictionary());
int listIndex;
PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->globalData(), codeBlock->ownerExecutable(), stubInfo, listIndex);
@@ -1885,8 +1722,7 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
}
- } else
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
+ }
return JSValue::encode(result);
}
@@ -2308,7 +2144,7 @@ DEFINE_STUB_FUNCTION(JSObject*, op_push_activation)
{
STUB_INIT_STACK_FRAME(stackFrame);
- JSActivation* activation = JSActivation::create(stackFrame.callFrame->globalData(), stackFrame.callFrame, static_cast<FunctionExecutable*>(stackFrame.callFrame->codeBlock()->ownerExecutable()));
+ JSActivation* activation = JSActivation::create(stackFrame.callFrame->globalData(), stackFrame.callFrame, stackFrame.callFrame->codeBlock());
stackFrame.callFrame->setScope(activation);
return activation;
}
@@ -3500,13 +3336,16 @@ DEFINE_STUB_FUNCTION(void, op_put_getter_setter)
baseObj->putDirectAccessor(callFrame, stackFrame.args[1].identifier(), accessor, Accessor);
}
-DEFINE_STUB_FUNCTION(void, op_throw_reference_error)
+DEFINE_STUB_FUNCTION(void, op_throw_static_error)
{
STUB_INIT_STACK_FRAME(stackFrame);
CallFrame* callFrame = stackFrame.callFrame;
String message = stackFrame.args[0].jsValue().toString(callFrame)->value(callFrame);
- stackFrame.globalData->exception = createReferenceError(callFrame, message);
+ if (stackFrame.args[1].asInt32)
+ stackFrame.globalData->exception = createReferenceError(callFrame, message);
+ else
+ stackFrame.globalData->exception = createTypeError(callFrame, message);
VM_THROW_EXCEPTION_AT_END();
}
diff --git a/Source/JavaScriptCore/jit/JITStubs.h b/Source/JavaScriptCore/jit/JITStubs.h
index 6e3141e22..5761236b1 100644
--- a/Source/JavaScriptCore/jit/JITStubs.h
+++ b/Source/JavaScriptCore/jit/JITStubs.h
@@ -365,8 +365,6 @@ extern "C" {
EncodedJSValue JIT_STUB cti_op_get_by_id_custom_stub(STUB_ARGS_DECLARATION) WTF_INTERNAL;
EncodedJSValue JIT_STUB cti_op_get_by_id_generic(STUB_ARGS_DECLARATION) WTF_INTERNAL;
EncodedJSValue JIT_STUB cti_op_get_by_id_getter_stub(STUB_ARGS_DECLARATION) WTF_INTERNAL;
- EncodedJSValue JIT_STUB cti_op_get_by_id_method_check(STUB_ARGS_DECLARATION) WTF_INTERNAL;
- EncodedJSValue JIT_STUB cti_op_get_by_id_method_check_update(STUB_ARGS_DECLARATION) WTF_INTERNAL;
EncodedJSValue JIT_STUB cti_op_get_by_id_proto_fail(STUB_ARGS_DECLARATION) WTF_INTERNAL;
EncodedJSValue JIT_STUB cti_op_get_by_id_proto_list(STUB_ARGS_DECLARATION) WTF_INTERNAL;
EncodedJSValue JIT_STUB cti_op_get_by_id_proto_list_full(STUB_ARGS_DECLARATION) WTF_INTERNAL;
@@ -454,7 +452,7 @@ extern "C" {
void JIT_STUB cti_op_init_global_const_check(STUB_ARGS_DECLARATION) WTF_INTERNAL;
void JIT_STUB cti_op_tear_off_activation(STUB_ARGS_DECLARATION) WTF_INTERNAL;
void JIT_STUB cti_op_tear_off_arguments(STUB_ARGS_DECLARATION) WTF_INTERNAL;
- void JIT_STUB cti_op_throw_reference_error(STUB_ARGS_DECLARATION) WTF_INTERNAL;
+ void JIT_STUB cti_op_throw_static_error(STUB_ARGS_DECLARATION) WTF_INTERNAL;
#if ENABLE(DFG_JIT)
void JIT_STUB cti_optimize(STUB_ARGS_DECLARATION) WTF_INTERNAL;
#endif