summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/interpreter
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
committerLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
commit1bf1084f2b10c3b47fd1a588d85d21ed0eb41d0c (patch)
tree46dcd36c86e7fbc6e5df36deb463b33e9967a6f7 /Source/JavaScriptCore/interpreter
parent32761a6cee1d0dee366b885b7b9c777e67885688 (diff)
downloadWebKitGtk-tarball-master.tar.gz
Diffstat (limited to 'Source/JavaScriptCore/interpreter')
-rw-r--r--Source/JavaScriptCore/interpreter/AbstractPC.cpp4
-rw-r--r--Source/JavaScriptCore/interpreter/AbstractPC.h7
-rw-r--r--Source/JavaScriptCore/interpreter/CLoopStack.cpp164
-rw-r--r--Source/JavaScriptCore/interpreter/CLoopStack.h114
-rw-r--r--Source/JavaScriptCore/interpreter/CLoopStackInlines.h81
-rw-r--r--Source/JavaScriptCore/interpreter/CachedCall.h38
-rw-r--r--Source/JavaScriptCore/interpreter/CallFrame.cpp281
-rw-r--r--Source/JavaScriptCore/interpreter/CallFrame.h319
-rw-r--r--Source/JavaScriptCore/interpreter/CallFrameClosure.h10
-rw-r--r--Source/JavaScriptCore/interpreter/CallFrameInlines.h143
-rw-r--r--Source/JavaScriptCore/interpreter/FrameTracers.h107
-rw-r--r--Source/JavaScriptCore/interpreter/Interpreter.cpp1203
-rw-r--r--Source/JavaScriptCore/interpreter/Interpreter.h232
-rw-r--r--Source/JavaScriptCore/interpreter/InterpreterInlines.h45
-rw-r--r--Source/JavaScriptCore/interpreter/JSStack.cpp160
-rw-r--r--Source/JavaScriptCore/interpreter/JSStack.h164
-rw-r--r--Source/JavaScriptCore/interpreter/JSStackInlines.h295
-rw-r--r--Source/JavaScriptCore/interpreter/ProtoCallFrame.cpp20
-rw-r--r--Source/JavaScriptCore/interpreter/ProtoCallFrame.h27
-rw-r--r--Source/JavaScriptCore/interpreter/Register.h36
-rw-r--r--Source/JavaScriptCore/interpreter/ShadowChicken.cpp468
-rw-r--r--Source/JavaScriptCore/interpreter/ShadowChicken.h225
-rw-r--r--Source/JavaScriptCore/interpreter/ShadowChickenInlines.h47
-rw-r--r--Source/JavaScriptCore/interpreter/StackVisitor.cpp433
-rw-r--r--Source/JavaScriptCore/interpreter/StackVisitor.h108
-rw-r--r--Source/JavaScriptCore/interpreter/VMEntryRecord.h75
-rw-r--r--Source/JavaScriptCore/interpreter/VMInspector.cpp572
-rw-r--r--Source/JavaScriptCore/interpreter/VMInspector.h89
28 files changed, 2739 insertions, 2728 deletions
diff --git a/Source/JavaScriptCore/interpreter/AbstractPC.cpp b/Source/JavaScriptCore/interpreter/AbstractPC.cpp
index 8600b7228..09b24d5dc 100644
--- a/Source/JavaScriptCore/interpreter/AbstractPC.cpp
+++ b/Source/JavaScriptCore/interpreter/AbstractPC.cpp
@@ -27,9 +27,9 @@
#include "AbstractPC.h"
#include "CallFrame.h"
-#include "VM.h"
#include "JSObject.h"
-
+#include "JSCInlines.h"
+#include "VM.h"
namespace JSC {
diff --git a/Source/JavaScriptCore/interpreter/AbstractPC.h b/Source/JavaScriptCore/interpreter/AbstractPC.h
index c30027d9e..fc432eb5e 100644
--- a/Source/JavaScriptCore/interpreter/AbstractPC.h
+++ b/Source/JavaScriptCore/interpreter/AbstractPC.h
@@ -23,11 +23,9 @@
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef AbstractPC_h
-#define AbstractPC_h
+#pragma once
#include "MacroAssemblerCodeRef.h"
-#include <wtf/Platform.h>
namespace JSC {
@@ -71,6 +69,3 @@ private:
};
} // namespace JSC
-
-#endif // AbstractPC_h
-
diff --git a/Source/JavaScriptCore/interpreter/CLoopStack.cpp b/Source/JavaScriptCore/interpreter/CLoopStack.cpp
new file mode 100644
index 000000000..f688e88b3
--- /dev/null
+++ b/Source/JavaScriptCore/interpreter/CLoopStack.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2008, 2013-2016 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "CLoopStack.h"
+
+#if !ENABLE(JIT)
+
+#include "CLoopStackInlines.h"
+#include "ConservativeRoots.h"
+#include "Interpreter.h"
+#include "JSCInlines.h"
+#include "Options.h"
+#include <wtf/Lock.h>
+
+namespace JSC {
+
+static size_t committedBytesCount = 0;
+
+static size_t commitSize()
+{
+ static size_t size = std::max<size_t>(16 * 1024, pageSize());
+ return size;
+}
+
+static StaticLock stackStatisticsMutex;
+
+CLoopStack::CLoopStack(VM& vm)
+ : m_vm(vm)
+ , m_topCallFrame(vm.topCallFrame)
+ , m_end(0)
+ , m_softReservedZoneSizeInRegisters(0)
+{
+ size_t capacity = Options::maxPerThreadStackUsage();
+ ASSERT(capacity && isPageAligned(capacity));
+
+ m_reservation = PageReservation::reserve(WTF::roundUpToMultipleOf(commitSize(), capacity), OSAllocator::JSVMStackPages);
+ setCLoopStackLimit(highAddress());
+ m_commitTop = highAddress();
+
+ m_lastStackTop = baseOfStack();
+
+ m_topCallFrame = 0;
+}
+
+CLoopStack::~CLoopStack()
+{
+ ptrdiff_t sizeToDecommit = reinterpret_cast<char*>(highAddress()) - reinterpret_cast<char*>(m_commitTop);
+ m_reservation.decommit(reinterpret_cast<void*>(m_commitTop), sizeToDecommit);
+ addToCommittedByteCount(-sizeToDecommit);
+ m_reservation.deallocate();
+}
+
+bool CLoopStack::grow(Register* newTopOfStack)
+{
+ Register* newTopOfStackWithReservedZone = newTopOfStack - m_softReservedZoneSizeInRegisters;
+
+ // If we have already committed enough memory to satisfy this request,
+ // just update the end pointer and return.
+ if (newTopOfStackWithReservedZone >= m_commitTop) {
+ setCLoopStackLimit(newTopOfStack);
+ return true;
+ }
+
+ // Compute the chunk size of additional memory to commit, and see if we
+ // have it still within our budget. If not, we'll fail to grow and
+ // return false.
+ ptrdiff_t delta = reinterpret_cast<char*>(m_commitTop) - reinterpret_cast<char*>(newTopOfStackWithReservedZone);
+ delta = WTF::roundUpToMultipleOf(commitSize(), delta);
+ Register* newCommitTop = m_commitTop - (delta / sizeof(Register));
+ if (newCommitTop < reservationTop())
+ return false;
+
+ // Otherwise, the growth is still within our budget. Commit it and return true.
+ m_reservation.commit(newCommitTop, delta);
+ addToCommittedByteCount(delta);
+ m_commitTop = newCommitTop;
+ setCLoopStackLimit(newTopOfStack);
+ return true;
+}
+
+void CLoopStack::gatherConservativeRoots(ConservativeRoots& conservativeRoots, JITStubRoutineSet& jitStubRoutines, CodeBlockSet& codeBlocks)
+{
+ conservativeRoots.add(topOfStack() + 1, highAddress(), jitStubRoutines, codeBlocks);
+}
+
+void CLoopStack::sanitizeStack()
+{
+#if !ASAN_ENABLED
+ ASSERT(topOfStack() <= baseOfStack());
+
+ if (m_lastStackTop < topOfStack()) {
+ char* begin = reinterpret_cast<char*>(m_lastStackTop + 1);
+ char* end = reinterpret_cast<char*>(topOfStack() + 1);
+ memset(begin, 0, end - begin);
+ }
+
+ m_lastStackTop = topOfStack();
+#endif
+}
+
+void CLoopStack::releaseExcessCapacity()
+{
+ Register* highAddressWithReservedZone = highAddress() - m_softReservedZoneSizeInRegisters;
+ ptrdiff_t delta = reinterpret_cast<char*>(highAddressWithReservedZone) - reinterpret_cast<char*>(m_commitTop);
+ m_reservation.decommit(m_commitTop, delta);
+ addToCommittedByteCount(-delta);
+ m_commitTop = highAddressWithReservedZone;
+}
+
+void CLoopStack::addToCommittedByteCount(long byteCount)
+{
+ LockHolder locker(stackStatisticsMutex);
+ ASSERT(static_cast<long>(committedBytesCount) + byteCount > -1);
+ committedBytesCount += byteCount;
+}
+
+void CLoopStack::setSoftReservedZoneSize(size_t reservedZoneSize)
+{
+ m_softReservedZoneSizeInRegisters = reservedZoneSize / sizeof(Register);
+ if (m_commitTop >= (m_end + 1) - m_softReservedZoneSizeInRegisters)
+ grow(m_end + 1);
+}
+
+bool CLoopStack::isSafeToRecurse() const
+{
+ void* reservationLimit = reinterpret_cast<int8_t*>(reservationTop() + m_softReservedZoneSizeInRegisters);
+ return !m_topCallFrame || (m_topCallFrame->topOfFrame() > reservationLimit);
+}
+
+size_t CLoopStack::committedByteCount()
+{
+ LockHolder locker(stackStatisticsMutex);
+ return committedBytesCount;
+}
+
+} // namespace JSC
+
+#endif // !ENABLE(JIT)
diff --git a/Source/JavaScriptCore/interpreter/CLoopStack.h b/Source/JavaScriptCore/interpreter/CLoopStack.h
new file mode 100644
index 000000000..de7bba6f5
--- /dev/null
+++ b/Source/JavaScriptCore/interpreter/CLoopStack.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2008-2009, 2013-2014, 2016 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if !ENABLE(JIT)
+
+#include "Register.h"
+#include <wtf/Noncopyable.h>
+#include <wtf/PageReservation.h>
+#include <wtf/VMTags.h>
+
+namespace JSC {
+
+ class CodeBlockSet;
+ class ConservativeRoots;
+ class JITStubRoutineSet;
+ class VM;
+ class LLIntOffsetsExtractor;
+
+ class CLoopStack {
+ WTF_MAKE_NONCOPYABLE(CLoopStack);
+ public:
+ // Allow 8k of excess registers before we start trying to reap the stack
+ static const ptrdiff_t maxExcessCapacity = 8 * 1024;
+
+ CLoopStack(VM&);
+ ~CLoopStack();
+
+ bool ensureCapacityFor(Register* newTopOfStack);
+
+ bool containsAddress(Register* address) { return (lowAddress() <= address && address < highAddress()); }
+ static size_t committedByteCount();
+
+ void gatherConservativeRoots(ConservativeRoots&, JITStubRoutineSet&, CodeBlockSet&);
+ void sanitizeStack();
+
+ Register* baseOfStack() const
+ {
+ return highAddress() - 1;
+ }
+
+ size_t size() const { return highAddress() - lowAddress(); }
+
+ void setSoftReservedZoneSize(size_t);
+ bool isSafeToRecurse() const;
+ inline Register* topOfStack();
+
+ private:
+
+ Register* lowAddress() const
+ {
+ return m_end + 1;
+ }
+
+ Register* highAddress() const
+ {
+ return reinterpret_cast_ptr<Register*>(static_cast<char*>(m_reservation.base()) + m_reservation.size());
+ }
+
+ inline Register* topOfFrameFor(CallFrame*);
+
+ Register* reservationTop() const
+ {
+ char* reservationTop = static_cast<char*>(m_reservation.base());
+ return reinterpret_cast_ptr<Register*>(reservationTop);
+ }
+
+ bool grow(Register* newTopOfStack);
+ void shrink(Register* newTopOfStack);
+ void releaseExcessCapacity();
+ void addToCommittedByteCount(long);
+
+ void setCLoopStackLimit(Register* newTopOfStack);
+
+ VM& m_vm;
+ CallFrame*& m_topCallFrame;
+ Register* m_end;
+ Register* m_commitTop;
+ PageReservation m_reservation;
+ Register* m_lastStackTop;
+ ptrdiff_t m_softReservedZoneSizeInRegisters;
+
+ friend class LLIntOffsetsExtractor;
+ };
+
+} // namespace JSC
+
+#endif // !ENABLE(JIT)
diff --git a/Source/JavaScriptCore/interpreter/CLoopStackInlines.h b/Source/JavaScriptCore/interpreter/CLoopStackInlines.h
new file mode 100644
index 000000000..44e385bdc
--- /dev/null
+++ b/Source/JavaScriptCore/interpreter/CLoopStackInlines.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2012-2014, 2016 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if !ENABLE(JIT)
+
+#include "CLoopStack.h"
+#include "CallFrame.h"
+#include "CodeBlock.h"
+#include "VM.h"
+
+namespace JSC {
+
+inline bool CLoopStack::ensureCapacityFor(Register* newTopOfStack)
+{
+ Register* newEnd = newTopOfStack - 1;
+ if (newEnd >= m_end)
+ return true;
+ return grow(newTopOfStack);
+}
+
+inline Register* CLoopStack::topOfFrameFor(CallFrame* frame)
+{
+ if (UNLIKELY(!frame))
+ return baseOfStack();
+ return frame->topOfFrame() - 1;
+}
+
+inline Register* CLoopStack::topOfStack()
+{
+ return topOfFrameFor(m_topCallFrame);
+}
+
+inline void CLoopStack::shrink(Register* newTopOfStack)
+{
+ Register* newEnd = newTopOfStack - 1;
+ if (newEnd >= m_end)
+ return;
+ setCLoopStackLimit(newTopOfStack);
+ // Note: Clang complains of an unresolved linkage to maxExcessCapacity if
+ // invoke std::max() with it as an argument. To work around this, we first
+ // assign the constant to a local variable, and use the local instead.
+ ptrdiff_t maxExcessCapacity = CLoopStack::maxExcessCapacity;
+ ptrdiff_t maxExcessInRegisters = std::max(maxExcessCapacity, m_softReservedZoneSizeInRegisters);
+ if (m_end == baseOfStack() && (highAddress() - m_commitTop) >= maxExcessInRegisters)
+ releaseExcessCapacity();
+}
+
+inline void CLoopStack::setCLoopStackLimit(Register* newTopOfStack)
+{
+ Register* newEnd = newTopOfStack - 1;
+ m_end = newEnd;
+ m_vm.setCLoopStackLimit(newTopOfStack);
+}
+
+} // namespace JSC
+
+#endif // !ENABLE(JIT)
diff --git a/Source/JavaScriptCore/interpreter/CachedCall.h b/Source/JavaScriptCore/interpreter/CachedCall.h
index 2ca3e9794..fb770cb99 100644
--- a/Source/JavaScriptCore/interpreter/CachedCall.h
+++ b/Source/JavaScriptCore/interpreter/CachedCall.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2009, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2009-2017 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -23,8 +23,7 @@
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef CachedCall_h
-#define CachedCall_h
+#pragma once
#include "CallFrameClosure.h"
#include "ExceptionHelpers.h"
@@ -33,41 +32,50 @@
#include "Interpreter.h"
#include "ProtoCallFrame.h"
#include "VMEntryScope.h"
+#include "VMInlines.h"
+#include <wtf/ForbidHeapAllocation.h>
namespace JSC {
class CachedCall {
- WTF_MAKE_NONCOPYABLE(CachedCall); WTF_MAKE_FAST_ALLOCATED;
+ WTF_MAKE_NONCOPYABLE(CachedCall);
+ WTF_FORBID_HEAP_ALLOCATION;
public:
CachedCall(CallFrame* callFrame, JSFunction* function, int argumentCount)
: m_valid(false)
, m_interpreter(callFrame->interpreter())
- , m_entryScope(callFrame->vm(), function->scope()->globalObject())
+ , m_vm(callFrame->vm())
+ , m_entryScope(m_vm, function->scope()->globalObject(m_vm))
{
- ASSERT(!function->isHostFunction());
- if (callFrame->vm().isSafeToRecurse()) {
- m_arguments.resize(argumentCount);
- m_closure = m_interpreter->prepareForRepeatCall(function->jsExecutable(), callFrame, &m_protoCallFrame, function, argumentCount + 1, function->scope(), m_arguments.data());
+ VM& vm = m_entryScope.vm();
+ auto scope = DECLARE_THROW_SCOPE(vm);
+
+ ASSERT(!function->isHostFunctionNonInline());
+ if (UNLIKELY(vm.isSafeToRecurseSoft())) {
+ m_arguments.ensureCapacity(argumentCount);
+ m_closure = m_interpreter->prepareForRepeatCall(function->jsExecutable(), callFrame, &m_protoCallFrame, function, argumentCount + 1, function->scope(), m_arguments);
} else
- throwStackOverflowError(callFrame);
- m_valid = !callFrame->hadException();
+ throwStackOverflowError(callFrame, scope);
+ m_valid = !scope.exception();
}
JSValue call()
{
ASSERT(m_valid);
+ ASSERT(m_arguments.size() == static_cast<size_t>(m_protoCallFrame.argumentCount()));
return m_interpreter->execute(m_closure);
}
void setThis(JSValue v) { m_protoCallFrame.setThisValue(v); }
- void setArgument(int n, JSValue v) { m_protoCallFrame.setArgument(n, v); }
+
+ void clearArguments() { m_arguments.clear(); }
+ void appendArgument(JSValue v) { m_arguments.append(v); }
private:
bool m_valid;
Interpreter* m_interpreter;
+ VM& m_vm;
VMEntryScope m_entryScope;
ProtoCallFrame m_protoCallFrame;
- Vector<JSValue> m_arguments;
+ MarkedArgumentBuffer m_arguments;
CallFrameClosure m_closure;
};
}
-
-#endif
diff --git a/Source/JavaScriptCore/interpreter/CallFrame.cpp b/Source/JavaScriptCore/interpreter/CallFrame.cpp
index a226e9848..de57c0100 100644
--- a/Source/JavaScriptCore/interpreter/CallFrame.cpp
+++ b/Source/JavaScriptCore/interpreter/CallFrame.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008, 2013 Apple Inc. All Rights Reserved.
+ * Copyright (C) 2008, 2013-2014, 2016 Apple Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,80 +26,140 @@
#include "config.h"
#include "CallFrame.h"
-#include "CallFrameInlines.h"
#include "CodeBlock.h"
+#include "InlineCallFrame.h"
#include "Interpreter.h"
-#include "Operations.h"
+#include "JSCInlines.h"
#include "VMEntryScope.h"
+#include <wtf/StringPrintStream.h>
namespace JSC {
-#ifndef NDEBUG
-JSStack* CallFrame::stack()
+void ExecState::initGlobalExec(ExecState* globalExec, JSCallee* globalCallee)
{
- return &interpreter()->stack();
+ globalExec->setCodeBlock(nullptr);
+ globalExec->setCallerFrame(noCaller());
+ globalExec->setReturnPC(0);
+ globalExec->setArgumentCountIncludingThis(0);
+ globalExec->setCallee(globalCallee);
}
-#endif
-
-#if USE(JSVALUE32_64)
-unsigned CallFrame::locationAsBytecodeOffset() const
+bool CallFrame::callSiteBitsAreBytecodeOffset() const
{
ASSERT(codeBlock());
- ASSERT(hasLocationAsBytecodeOffset());
- return currentVPC() - codeBlock()->instructions().begin();
+ switch (codeBlock()->jitType()) {
+ case JITCode::InterpreterThunk:
+ case JITCode::BaselineJIT:
+ return true;
+ case JITCode::None:
+ case JITCode::HostCallThunk:
+ RELEASE_ASSERT_NOT_REACHED();
+ return false;
+ default:
+ return false;
+ }
+
+ RELEASE_ASSERT_NOT_REACHED();
+ return false;
}
-void CallFrame::setLocationAsBytecodeOffset(unsigned offset)
+bool CallFrame::callSiteBitsAreCodeOriginIndex() const
{
ASSERT(codeBlock());
- setCurrentVPC(codeBlock()->instructions().begin() + offset);
- ASSERT(hasLocationAsBytecodeOffset());
+ switch (codeBlock()->jitType()) {
+ case JITCode::DFGJIT:
+ case JITCode::FTLJIT:
+ return true;
+ case JITCode::None:
+ case JITCode::HostCallThunk:
+ RELEASE_ASSERT_NOT_REACHED();
+ return false;
+ default:
+ return false;
+ }
+
+ RELEASE_ASSERT_NOT_REACHED();
+ return false;
}
-#else
+
+unsigned CallFrame::callSiteAsRawBits() const
+{
+ return this[CallFrameSlot::argumentCount].tag();
+}
+
+SUPPRESS_ASAN unsigned CallFrame::unsafeCallSiteAsRawBits() const
+{
+ return this[CallFrameSlot::argumentCount].unsafeTag();
+}
+
+CallSiteIndex CallFrame::callSiteIndex() const
+{
+ return CallSiteIndex(callSiteAsRawBits());
+}
+
+SUPPRESS_ASAN CallSiteIndex CallFrame::unsafeCallSiteIndex() const
+{
+ return CallSiteIndex(unsafeCallSiteAsRawBits());
+}
+
+#if USE(JSVALUE32_64)
Instruction* CallFrame::currentVPC() const
{
- return codeBlock()->instructions().begin() + locationAsBytecodeOffset();
+ return bitwise_cast<Instruction*>(callSiteIndex().bits());
}
+
void CallFrame::setCurrentVPC(Instruction* vpc)
{
- setLocationAsBytecodeOffset(vpc - codeBlock()->instructions().begin());
+ CallSiteIndex callSite(vpc);
+ this[CallFrameSlot::argumentCount].tag() = callSite.bits();
}
-#endif
-
-#if ENABLE(DFG_JIT)
-unsigned CallFrame::bytecodeOffsetFromCodeOriginIndex()
-{
- ASSERT(hasLocationAsCodeOriginIndex());
- CodeBlock* codeBlock = this->codeBlock();
- ASSERT(codeBlock);
- CodeOrigin codeOrigin;
- unsigned index = locationAsCodeOriginIndex();
- ASSERT(codeBlock->canGetCodeOrigin(index));
- codeOrigin = codeBlock->codeOrigin(index);
+unsigned CallFrame::callSiteBitsAsBytecodeOffset() const
+{
+ ASSERT(codeBlock());
+ ASSERT(callSiteBitsAreBytecodeOffset());
+ return currentVPC() - codeBlock()->instructions().begin();
+}
- for (InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame; inlineCallFrame;) {
- if (inlineCallFrame->baselineCodeBlock() == codeBlock)
- return codeOrigin.bytecodeIndex;
+#else // USE(JSVALUE32_64)
+Instruction* CallFrame::currentVPC() const
+{
+ ASSERT(callSiteBitsAreBytecodeOffset());
+ return codeBlock()->instructions().begin() + callSiteBitsAsBytecodeOffset();
+}
- codeOrigin = inlineCallFrame->caller;
- inlineCallFrame = codeOrigin.inlineCallFrame;
- }
- return codeOrigin.bytecodeIndex;
+void CallFrame::setCurrentVPC(Instruction* vpc)
+{
+ CallSiteIndex callSite(vpc - codeBlock()->instructions().begin());
+ this[CallFrameSlot::argumentCount].tag() = static_cast<int32_t>(callSite.bits());
}
-#endif // ENABLE(DFG_JIT)
+unsigned CallFrame::callSiteBitsAsBytecodeOffset() const
+{
+ ASSERT(codeBlock());
+ ASSERT(callSiteBitsAreBytecodeOffset());
+ return callSiteIndex().bits();
+}
+#endif
+
unsigned CallFrame::bytecodeOffset()
{
if (!codeBlock())
return 0;
#if ENABLE(DFG_JIT)
- if (hasLocationAsCodeOriginIndex())
- return bytecodeOffsetFromCodeOriginIndex();
+ if (callSiteBitsAreCodeOriginIndex()) {
+ ASSERT(codeBlock());
+ CodeOrigin codeOrigin = this->codeOrigin();
+ for (InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame; inlineCallFrame;) {
+ codeOrigin = inlineCallFrame->directCaller;
+ inlineCallFrame = codeOrigin.inlineCallFrame;
+ }
+ return codeOrigin.bytecodeIndex;
+ }
#endif
- return locationAsBytecodeOffset();
+ ASSERT(callSiteBitsAreBytecodeOffset());
+ return callSiteBitsAsBytecodeOffset();
}
CodeOrigin CallFrame::codeOrigin()
@@ -107,26 +167,29 @@ CodeOrigin CallFrame::codeOrigin()
if (!codeBlock())
return CodeOrigin(0);
#if ENABLE(DFG_JIT)
- if (hasLocationAsCodeOriginIndex()) {
- unsigned index = locationAsCodeOriginIndex();
+ if (callSiteBitsAreCodeOriginIndex()) {
+ CallSiteIndex index = callSiteIndex();
ASSERT(codeBlock()->canGetCodeOrigin(index));
return codeBlock()->codeOrigin(index);
}
#endif
- return CodeOrigin(locationAsBytecodeOffset());
+ return CodeOrigin(callSiteBitsAsBytecodeOffset());
}
-Register* CallFrame::frameExtentInternal()
+Register* CallFrame::topOfFrameInternal()
{
CodeBlock* codeBlock = this->codeBlock();
ASSERT(codeBlock);
- return registers() + virtualRegisterForLocal(codeBlock->frameRegisterCount()).offset();
+ return registers() + codeBlock->stackPointerOffset();
}
JSGlobalObject* CallFrame::vmEntryGlobalObject()
{
- if (this == lexicalGlobalObject()->globalExec())
- return lexicalGlobalObject();
+ if (callee()->isObject()) {
+ if (this == lexicalGlobalObject()->globalExec())
+ return lexicalGlobalObject();
+ }
+ // If we're not an object, we're wasm, and therefore we're executing code and the below is safe.
// For any ExecState that's not a globalExec, the
// dynamic global object must be set since code is running
@@ -134,4 +197,126 @@ JSGlobalObject* CallFrame::vmEntryGlobalObject()
return vm().entryScope->globalObject();
}
+CallFrame* CallFrame::callerFrame(VMEntryFrame*& currVMEntryFrame)
+{
+ if (callerFrameOrVMEntryFrame() == currVMEntryFrame) {
+ VMEntryRecord* currVMEntryRecord = vmEntryRecord(currVMEntryFrame);
+ currVMEntryFrame = currVMEntryRecord->prevTopVMEntryFrame();
+ return currVMEntryRecord->prevTopCallFrame();
+ }
+ return static_cast<CallFrame*>(callerFrameOrVMEntryFrame());
+}
+
+SUPPRESS_ASAN CallFrame* CallFrame::unsafeCallerFrame(VMEntryFrame*& currVMEntryFrame)
+{
+ if (unsafeCallerFrameOrVMEntryFrame() == currVMEntryFrame) {
+ VMEntryRecord* currVMEntryRecord = vmEntryRecord(currVMEntryFrame);
+ currVMEntryFrame = currVMEntryRecord->unsafePrevTopVMEntryFrame();
+ return currVMEntryRecord->unsafePrevTopCallFrame();
+ }
+ return static_cast<CallFrame*>(unsafeCallerFrameOrVMEntryFrame());
+}
+
+SourceOrigin CallFrame::callerSourceOrigin()
+{
+ SourceOrigin sourceOrigin;
+ bool haveSkippedFirstFrame = false;
+ StackVisitor::visit(this, [&](StackVisitor& visitor) {
+ if (!std::exchange(haveSkippedFirstFrame, true))
+ return StackVisitor::Status::Continue;
+
+ switch (visitor->codeType()) {
+ case StackVisitor::Frame::CodeType::Function:
+ // Skip the builtin functions since they should not pass the source origin to the dynamic code generation calls.
+ // Consider the following code.
+ //
+ // [ "42 + 44" ].forEach(eval);
+ //
+ // In the above case, the eval function will be interpreted as the indirect call to eval inside forEach function.
+ // At that time, the generated eval code should have the source origin to the original caller of the forEach function
+ // instead of the source origin of the forEach function.
+ if (static_cast<FunctionExecutable*>(visitor->codeBlock()->ownerScriptExecutable())->isBuiltinFunction())
+ return StackVisitor::Status::Continue;
+ FALLTHROUGH;
+
+ case StackVisitor::Frame::CodeType::Eval:
+ case StackVisitor::Frame::CodeType::Module:
+ case StackVisitor::Frame::CodeType::Global:
+ sourceOrigin = visitor->codeBlock()->ownerScriptExecutable()->sourceOrigin();
+ return StackVisitor::Status::Done;
+
+ case StackVisitor::Frame::CodeType::Native:
+ return StackVisitor::Status::Continue;
+
+ case StackVisitor::Frame::CodeType::Wasm:
+ // FIXME: Should return the source origin for WASM.
+ return StackVisitor::Status::Done;
+ }
+
+ RELEASE_ASSERT_NOT_REACHED();
+ return StackVisitor::Status::Done;
+ });
+ return sourceOrigin;
+}
+
+String CallFrame::friendlyFunctionName()
+{
+ CodeBlock* codeBlock = this->codeBlock();
+ if (!codeBlock)
+ return emptyString();
+
+ switch (codeBlock->codeType()) {
+ case EvalCode:
+ return ASCIILiteral("eval code");
+ case ModuleCode:
+ return ASCIILiteral("module code");
+ case GlobalCode:
+ return ASCIILiteral("global code");
+ case FunctionCode:
+ if (jsCallee())
+ return getCalculatedDisplayName(vm(), jsCallee());
+ return emptyString();
+ }
+
+ ASSERT_NOT_REACHED();
+ return emptyString();
+}
+
+void CallFrame::dump(PrintStream& out)
+{
+ if (CodeBlock* codeBlock = this->codeBlock()) {
+ out.print(codeBlock->inferredName(), "#", codeBlock->hashAsStringIfPossible(), " [", codeBlock->jitType(), "]");
+
+ out.print("(");
+ thisValue().dumpForBacktrace(out);
+
+ for (size_t i = 0; i < argumentCount(); ++i) {
+ out.print(", ");
+ JSValue value = argument(i);
+ value.dumpForBacktrace(out);
+ }
+
+ out.print(")");
+
+ return;
+ }
+
+ out.print(returnPC());
+}
+
+const char* CallFrame::describeFrame()
+{
+ const size_t bufferSize = 200;
+ static char buffer[bufferSize + 1];
+
+ WTF::StringPrintStream stringStream;
+
+ dump(stringStream);
+
+ strncpy(buffer, stringStream.toCString().data(), bufferSize);
+ buffer[bufferSize] = '\0';
+
+ return buffer;
+}
+
} // namespace JSC
diff --git a/Source/JavaScriptCore/interpreter/CallFrame.h b/Source/JavaScriptCore/interpreter/CallFrame.h
index 48fbcd779..6b70a0ec5 100644
--- a/Source/JavaScriptCore/interpreter/CallFrame.h
+++ b/Source/JavaScriptCore/interpreter/CallFrame.h
@@ -1,7 +1,7 @@
/*
* Copyright (C) 1999-2001 Harri Porten (porten@kde.org)
* Copyright (C) 2001 Peter Kelly (pmk@post.com)
- * Copyright (C) 2003, 2007, 2008, 2011, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2003-2017 Apple Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -20,34 +20,83 @@
*
*/
-#ifndef CallFrame_h
-#define CallFrame_h
+#pragma once
#include "AbstractPC.h"
-#include "VM.h"
-#include "JSStack.h"
#include "MacroAssemblerCodeRef.h"
#include "Register.h"
#include "StackVisitor.h"
+#include "VM.h"
+#include "VMEntryRecord.h"
namespace JSC {
class Arguments;
- class JSActivation;
+ class ExecState;
class Interpreter;
+ class JSCallee;
class JSScope;
+ struct Instruction;
+
+ typedef ExecState CallFrame;
+
+ struct CallSiteIndex {
+ CallSiteIndex()
+ : m_bits(UINT_MAX)
+ {
+ }
+
+ explicit CallSiteIndex(uint32_t bits)
+ : m_bits(bits)
+ { }
+#if USE(JSVALUE32_64)
+ explicit CallSiteIndex(Instruction* instruction)
+ : m_bits(bitwise_cast<uint32_t>(instruction))
+ { }
+#endif
+
+ explicit operator bool() const { return m_bits != UINT_MAX; }
+ bool operator==(const CallSiteIndex& other) const { return m_bits == other.m_bits; }
+
+ inline uint32_t bits() const { return m_bits; }
+
+ private:
+ uint32_t m_bits;
+ };
+
+ struct CallerFrameAndPC {
+ CallFrame* callerFrame;
+ Instruction* pc;
+ static const int sizeInRegisters = 2 * sizeof(void*) / sizeof(Register);
+ };
+ static_assert(CallerFrameAndPC::sizeInRegisters == sizeof(CallerFrameAndPC) / sizeof(Register), "CallerFrameAndPC::sizeInRegisters is incorrect.");
+
+ struct CallFrameSlot {
+ static const int codeBlock = CallerFrameAndPC::sizeInRegisters;
+ static const int callee = codeBlock + 1;
+ static const int argumentCount = callee + 1;
+ static const int thisArgument = argumentCount + 1;
+ static const int firstArgument = thisArgument + 1;
+ };
+
// Represents the current state of script execution.
// Passed as the first argument to most functions.
class ExecState : private Register {
public:
- JSValue calleeAsValue() const { return this[JSStack::Callee].jsValue(); }
- JSObject* callee() const { return this[JSStack::Callee].function(); }
- CodeBlock* codeBlock() const { return this[JSStack::CodeBlock].Register::codeBlock(); }
- JSScope* scope() const
+ static const int headerSizeInRegisters = CallFrameSlot::argumentCount + 1;
+
+ JSValue calleeAsValue() const { return this[CallFrameSlot::callee].jsValue(); }
+ JSObject* jsCallee() const { return this[CallFrameSlot::callee].object(); }
+ JSCell* callee() const { return this[CallFrameSlot::callee].unboxedCell(); }
+ SUPPRESS_ASAN JSValue unsafeCallee() const { return this[CallFrameSlot::callee].asanUnsafeJSValue(); }
+ CodeBlock* codeBlock() const { return this[CallFrameSlot::codeBlock].Register::codeBlock(); }
+ CodeBlock** addressOfCodeBlock() const { return bitwise_cast<CodeBlock**>(this + CallFrameSlot::codeBlock); }
+ SUPPRESS_ASAN CodeBlock* unsafeCodeBlock() const { return this[CallFrameSlot::codeBlock].Register::asanUnsafeCodeBlock(); }
+ JSScope* scope(int scopeRegisterOffset) const
{
- ASSERT(this[JSStack::ScopeChain].Register::scope());
- return this[JSStack::ScopeChain].Register::scope();
+ ASSERT(this[scopeRegisterOffset].Register::scope());
+ return this[scopeRegisterOffset].Register::scope();
}
// Global object in which execution began.
@@ -68,50 +117,28 @@ namespace JSC {
// pointer, so these are inefficient, and should be used sparingly in new code.
// But they're used in many places in legacy code, so they're not going away any time soon.
- void clearException() { vm().clearException(); }
- void clearSupplementaryExceptionInfo()
- {
- vm().clearExceptionStack();
- }
-
- JSValue exception() const { return vm().exception(); }
- bool hadException() const { return !vm().exception().isEmpty(); }
-
+ AtomicStringTable* atomicStringTable() const { return vm().atomicStringTable(); }
const CommonIdentifiers& propertyNames() const { return *vm().propertyNames; }
- const MarkedArgumentBuffer& emptyList() const { return *vm().emptyList; }
+ const ArgList& emptyList() const { return *vm().emptyList; }
Interpreter* interpreter() { return vm().interpreter; }
Heap* heap() { return &vm().heap; }
-#ifndef NDEBUG
- void dumpCaller();
-#endif
- static const HashTable& arrayConstructorTable(VM& vm) { return *vm.arrayConstructorTable; }
- static const HashTable& arrayPrototypeTable(VM& vm) { return *vm.arrayPrototypeTable; }
- static const HashTable& booleanPrototypeTable(VM& vm) { return *vm.booleanPrototypeTable; }
- static const HashTable& dataViewTable(VM& vm) { return *vm.dataViewTable; }
- static const HashTable& dateTable(VM& vm) { return *vm.dateTable; }
- static const HashTable& dateConstructorTable(VM& vm) { return *vm.dateConstructorTable; }
- static const HashTable& errorPrototypeTable(VM& vm) { return *vm.errorPrototypeTable; }
- static const HashTable& globalObjectTable(VM& vm) { return *vm.globalObjectTable; }
- static const HashTable& jsonTable(VM& vm) { return *vm.jsonTable; }
- static const HashTable& numberConstructorTable(VM& vm) { return *vm.numberConstructorTable; }
- static const HashTable& numberPrototypeTable(VM& vm) { return *vm.numberPrototypeTable; }
- static const HashTable& objectConstructorTable(VM& vm) { return *vm.objectConstructorTable; }
- static const HashTable& privateNamePrototypeTable(VM& vm) { return *vm.privateNamePrototypeTable; }
- static const HashTable& regExpTable(VM& vm) { return *vm.regExpTable; }
- static const HashTable& regExpConstructorTable(VM& vm) { return *vm.regExpConstructorTable; }
- static const HashTable& regExpPrototypeTable(VM& vm) { return *vm.regExpPrototypeTable; }
- static const HashTable& stringConstructorTable(VM& vm) { return *vm.stringConstructorTable; }
-#if ENABLE(PROMISES)
- static const HashTable& promisePrototypeTable(VM& vm) { return *vm.promisePrototypeTable; }
- static const HashTable& promiseConstructorTable(VM& vm) { return *vm.promiseConstructorTable; }
-#endif
+
static CallFrame* create(Register* callFrameBase) { return static_cast<CallFrame*>(callFrameBase); }
Register* registers() { return this; }
+ const Register* registers() const { return this; }
CallFrame& operator=(const Register& r) { *static_cast<Register*>(this) = r; return *this; }
- CallFrame* callerFrame() const { return callerFrameAndPC().callerFrame; }
+ CallFrame* callerFrame() const { return static_cast<CallFrame*>(callerFrameOrVMEntryFrame()); }
+ void* callerFrameOrVMEntryFrame() const { return callerFrameAndPC().callerFrame; }
+ SUPPRESS_ASAN void* unsafeCallerFrameOrVMEntryFrame() const { return unsafeCallerFrameAndPC().callerFrame; }
+
+ CallFrame* unsafeCallerFrame(VMEntryFrame*&);
+ JS_EXPORT_PRIVATE CallFrame* callerFrame(VMEntryFrame*&);
+
+ JS_EXPORT_PRIVATE SourceOrigin callerSourceOrigin();
+
static ptrdiff_t callerFrameOffset() { return OBJECT_OFFSETOF(CallerFrameAndPC, callerFrame); }
ReturnAddressPtr returnPC() const { return ReturnAddressPtr(callerFrameAndPC().pc); }
@@ -120,51 +147,17 @@ namespace JSC {
static ptrdiff_t returnPCOffset() { return OBJECT_OFFSETOF(CallerFrameAndPC, pc); }
AbstractPC abstractReturnPC(VM& vm) { return AbstractPC(vm, this); }
- class Location {
- public:
- static inline uint32_t decode(uint32_t bits);
-
- static inline bool isBytecodeLocation(uint32_t bits);
-#if USE(JSVALUE64)
- static inline uint32_t encodeAsBytecodeOffset(uint32_t bits);
-#else
- static inline uint32_t encodeAsBytecodeInstruction(Instruction*);
-#endif
-
- static inline bool isCodeOriginIndex(uint32_t bits);
- static inline uint32_t encodeAsCodeOriginIndex(uint32_t bits);
-
- private:
- enum TypeTag {
- BytecodeLocationTag = 0,
- CodeOriginIndexTag = 1,
- };
-
- static inline uint32_t encode(TypeTag, uint32_t bits);
-
- static const uint32_t s_mask = 0x1;
-#if USE(JSVALUE64)
- static const uint32_t s_shift = 31;
- static const uint32_t s_shiftedMask = s_mask << s_shift;
-#else
- static const uint32_t s_shift = 1;
-#endif
- };
-
- bool hasLocationAsBytecodeOffset() const;
- bool hasLocationAsCodeOriginIndex() const;
-
- unsigned locationAsRawBits() const;
- unsigned locationAsBytecodeOffset() const;
- unsigned locationAsCodeOriginIndex() const;
+ bool callSiteBitsAreBytecodeOffset() const;
+ bool callSiteBitsAreCodeOriginIndex() const;
- void setLocationAsRawBits(unsigned);
- void setLocationAsBytecodeOffset(unsigned);
+ unsigned callSiteAsRawBits() const;
+ unsigned unsafeCallSiteAsRawBits() const;
+ CallSiteIndex callSiteIndex() const;
+ CallSiteIndex unsafeCallSiteIndex() const;
+ private:
+ unsigned callSiteBitsAsBytecodeOffset() const;
+ public:
-#if ENABLE(DFG_JIT)
- unsigned bytecodeOffsetFromCodeOriginIndex();
-#endif
-
// This will try to get you the bytecode offset, but you should be aware that
// this bytecode offset may be bogus in the presence of inlining. This will
// also return 0 if the call frame has no notion of bytecode offsets (for
@@ -174,59 +167,35 @@ namespace JSC {
// This will get you a CodeOrigin. It will always succeed. May return
// CodeOrigin(0) if we're in native code.
- CodeOrigin codeOrigin();
+ JS_EXPORT_PRIVATE CodeOrigin codeOrigin();
- Register* frameExtent()
+ Register* topOfFrame()
{
- if (isVMEntrySentinel() || !codeBlock())
- return registers() - 1;
- return frameExtentInternal();
+ if (!codeBlock())
+ return registers();
+ return topOfFrameInternal();
}
- Register* frameExtentInternal();
-
-#if USE(JSVALUE32_64)
- Instruction* currentVPC() const
- {
- ASSERT(!isVMEntrySentinel());
- return bitwise_cast<Instruction*>(this[JSStack::ArgumentCount].tag());
- }
- void setCurrentVPC(Instruction* vpc)
- {
- ASSERT(!isVMEntrySentinel());
- this[JSStack::ArgumentCount].tag() = bitwise_cast<int32_t>(vpc);
- }
-#else
- Instruction* currentVPC() const;
+ Instruction* currentVPC() const; // This only makes sense in the LLInt and baseline.
void setCurrentVPC(Instruction* vpc);
-#endif
void setCallerFrame(CallFrame* frame) { callerFrameAndPC().callerFrame = frame; }
- void setScope(JSScope* scope) { static_cast<Register*>(this)[JSStack::ScopeChain] = scope; }
+ void setScope(int scopeRegisterOffset, JSScope* scope) { static_cast<Register*>(this)[scopeRegisterOffset] = scope; }
- ALWAYS_INLINE void init(CodeBlock* codeBlock, Instruction* vPC, JSScope* scope,
- CallFrame* callerFrame, int argc, JSObject* callee)
- {
- ASSERT(callerFrame == noCaller() || callerFrame->isVMEntrySentinel() || callerFrame->stack()->containsAddress(this));
-
- setCodeBlock(codeBlock);
- setScope(scope);
- setCallerFrame(callerFrame);
- setReturnPC(vPC); // This is either an Instruction* or a pointer into JIT generated code stored as an Instruction*.
- setArgumentCountIncludingThis(argc); // original argument count (for the sake of the "arguments" object)
- setCallee(callee);
- }
+ static void initGlobalExec(ExecState* globalExec, JSCallee* globalCallee);
// Read a register from the codeframe (or constant from the CodeBlock).
Register& r(int);
+ Register& r(VirtualRegister);
// Read a register for a non-constant
Register& uncheckedR(int);
+ Register& uncheckedR(VirtualRegister);
// Access to arguments as passed. (After capture, arguments may move to a different location.)
size_t argumentCount() const { return argumentCountIncludingThis() - 1; }
- size_t argumentCountIncludingThis() const { return this[JSStack::ArgumentCount].payload(); }
- static int argumentOffset(int argument) { return (JSStack::FirstArgument + argument); }
- static int argumentOffsetIncludingThis(int argument) { return (JSStack::ThisArgument + argument); }
+ size_t argumentCountIncludingThis() const { return this[CallFrameSlot::argumentCount].payload(); }
+ static int argumentOffset(int argument) { return (CallFrameSlot::firstArgument + argument); }
+ static int argumentOffsetIncludingThis(int argument) { return (CallFrameSlot::thisArgument + argument); }
// In the following (argument() and setArgument()), the 'argument'
// parameter is the index of the arguments of the target function of
@@ -237,6 +206,7 @@ namespace JSC {
// arguments(0) will not fetch the 'this' value. To get/set 'this',
// use thisValue() and setThisValue() below.
+ JSValue* addressOfArgumentsStart() const { return bitwise_cast<JSValue*>(this + argumentOffset(0)); }
JSValue argument(size_t argument)
{
if (argument >= argumentCount())
@@ -253,71 +223,55 @@ namespace JSC {
this[argumentOffset(argument)] = value;
}
+ JSValue getArgumentUnsafe(size_t argIndex)
+ {
+ // User beware! This method does not verify that there is a valid
+ // argument at the specified argIndex. This is used for debugging
+ // and verification code only. The caller is expected to know what
+ // he/she is doing when calling this method.
+ return this[argumentOffset(argIndex)].jsValue();
+ }
+
static int thisArgumentOffset() { return argumentOffsetIncludingThis(0); }
JSValue thisValue() { return this[thisArgumentOffset()].jsValue(); }
void setThisValue(JSValue value) { this[thisArgumentOffset()] = value; }
- JSValue argumentAfterCapture(size_t argument);
+ // Under the constructor implemented in C++, thisValue holds the newTarget instead of the automatically constructed value.
+ // The result of this function is only effective under the "construct" context.
+ JSValue newTarget() { return thisValue(); }
- static int offsetFor(size_t argumentCountIncludingThis) { return argumentCountIncludingThis + JSStack::ThisArgument - 1; }
+ JSValue argumentAfterCapture(size_t argument);
- // FIXME: Remove these.
- int hostThisRegister() { return thisArgumentOffset(); }
- JSValue hostThisValue() { return thisValue(); }
+ static int offsetFor(size_t argumentCountIncludingThis) { return argumentCountIncludingThis + CallFrameSlot::thisArgument - 1; }
static CallFrame* noCaller() { return 0; }
- bool isVMEntrySentinel() const
- {
- return !!this && codeBlock() == vmEntrySentinelCodeBlock();
- }
-
- CallFrame* vmEntrySentinelCallerFrame() const
- {
- ASSERT(isVMEntrySentinel());
- return this[JSStack::ScopeChain].callFrame();
- }
-
- void initializeVMEntrySentinelFrame(CallFrame* callFrame)
- {
- setCallerFrame(noCaller());
- setReturnPC(0);
- setCodeBlock(vmEntrySentinelCodeBlock());
- static_cast<Register*>(this)[JSStack::ScopeChain] = callFrame;
- setCallee(0);
- setArgumentCountIncludingThis(0);
- }
-
- CallFrame* callerFrameSkippingVMEntrySentinel()
- {
- CallFrame* caller = callerFrame();
- if (caller->isVMEntrySentinel())
- return caller->vmEntrySentinelCallerFrame();
- return caller;
- }
-
- void setArgumentCountIncludingThis(int count) { static_cast<Register*>(this)[JSStack::ArgumentCount].payload() = count; }
- void setCallee(JSObject* callee) { static_cast<Register*>(this)[JSStack::Callee] = Register::withCallee(callee); }
- void setCodeBlock(CodeBlock* codeBlock) { static_cast<Register*>(this)[JSStack::CodeBlock] = codeBlock; }
+ void setArgumentCountIncludingThis(int count) { static_cast<Register*>(this)[CallFrameSlot::argumentCount].payload() = count; }
+ void setCallee(JSObject* callee) { static_cast<Register*>(this)[CallFrameSlot::callee] = callee; }
+ void setCodeBlock(CodeBlock* codeBlock) { static_cast<Register*>(this)[CallFrameSlot::codeBlock] = codeBlock; }
void setReturnPC(void* value) { callerFrameAndPC().pc = reinterpret_cast<Instruction*>(value); }
- // CallFrame::iterate() expects a Functor that implements the following method:
- // StackVisitor::Status operator()(StackVisitor&);
+ String friendlyFunctionName();
- template <typename Functor> void iterate(Functor& functor)
+ // CallFrame::iterate() expects a Functor that implements the following method:
+ // StackVisitor::Status operator()(StackVisitor&) const;
+ // FIXME: This method is improper. We rely on the fact that we can call it with a null
+ // receiver. We should always be using StackVisitor directly.
+ template <typename Functor> void iterate(const Functor& functor)
{
StackVisitor::visit<Functor>(this, functor);
}
+ void dump(PrintStream&);
+ JS_EXPORT_PRIVATE const char* describeFrame();
+
private:
- static const intptr_t s_VMEntrySentinel = 1;
-#ifndef NDEBUG
- JSStack* stack();
-#endif
ExecState();
~ExecState();
+ Register* topOfFrameInternal();
+
// The following are for internal use in debugging and verification
// code only and not meant as an API for general usage:
@@ -331,31 +285,16 @@ namespace JSC {
int offset = reg - this->registers();
// The offset is defined (based on argumentOffset()) to be:
- // offset = JSStack::FirstArgument - argIndex;
+ // offset = CallFrameSlot::firstArgument - argIndex;
// Hence:
- // argIndex = JSStack::FirstArgument - offset;
- size_t argIndex = offset - JSStack::FirstArgument;
+ // argIndex = CallFrameSlot::firstArgument - offset;
+ size_t argIndex = offset - CallFrameSlot::firstArgument;
return argIndex;
}
- JSValue getArgumentUnsafe(size_t argIndex)
- {
- // User beware! This method does not verify that there is a valid
- // argument at the specified argIndex. This is used for debugging
- // and verification code only. The caller is expected to know what
- // he/she is doing when calling this method.
- return this[argumentOffset(argIndex)].jsValue();
- }
-
CallerFrameAndPC& callerFrameAndPC() { return *reinterpret_cast<CallerFrameAndPC*>(this); }
const CallerFrameAndPC& callerFrameAndPC() const { return *reinterpret_cast<const CallerFrameAndPC*>(this); }
-
- static CodeBlock* vmEntrySentinelCodeBlock() { return reinterpret_cast<CodeBlock*>(s_VMEntrySentinel); }
-
- friend class JSStack;
- friend class VMInspector;
+ SUPPRESS_ASAN const CallerFrameAndPC& unsafeCallerFrameAndPC() const { return *reinterpret_cast<const CallerFrameAndPC*>(this); }
};
} // namespace JSC
-
-#endif // CallFrame_h
diff --git a/Source/JavaScriptCore/interpreter/CallFrameClosure.h b/Source/JavaScriptCore/interpreter/CallFrameClosure.h
index e3326626d..a625caf9b 100644
--- a/Source/JavaScriptCore/interpreter/CallFrameClosure.h
+++ b/Source/JavaScriptCore/interpreter/CallFrameClosure.h
@@ -23,8 +23,7 @@
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef CallFrameClosure_h
-#define CallFrameClosure_h
+#pragma once
#include "ProtoCallFrame.h"
@@ -49,13 +48,6 @@ struct CallFrameClosure {
{
protoCallFrame->setArgument(argument, value);
}
-
- void resetCallFrame()
- {
- protoCallFrame->setScope(scope);
- }
};
}
-
-#endif
diff --git a/Source/JavaScriptCore/interpreter/CallFrameInlines.h b/Source/JavaScriptCore/interpreter/CallFrameInlines.h
deleted file mode 100644
index 51d751a51..000000000
--- a/Source/JavaScriptCore/interpreter/CallFrameInlines.h
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef CallFrameInlines_h
-#define CallFrameInlines_h
-
-#include "CallFrame.h"
-
-namespace JSC {
-
-inline uint32_t CallFrame::Location::encode(CallFrame::Location::TypeTag tag, uint32_t bits)
-{
-#if USE(JSVALUE64)
- ASSERT(!(bits & s_shiftedMask));
- ASSERT(!(tag & ~s_mask));
- return bits | (tag << s_shift);
-#else
- ASSERT(!(tag & ~s_mask));
- if (tag & CodeOriginIndexTag)
- bits = (bits << s_shift);
- ASSERT(!(bits & s_mask));
- bits |= tag;
- return bits;
-#endif
-}
-
-inline uint32_t CallFrame::Location::decode(uint32_t bits)
-{
-#if USE(JSVALUE64)
- return bits & ~s_shiftedMask;
-#else
- if (isCodeOriginIndex(bits))
- return bits >> s_shift;
- return bits & ~s_mask;
-#endif
-}
-
-#if USE(JSVALUE64)
-inline uint32_t CallFrame::Location::encodeAsBytecodeOffset(uint32_t bits)
-{
- uint32_t encodedBits = encode(BytecodeLocationTag, bits);
- ASSERT(isBytecodeLocation(encodedBits));
- return encodedBits;
-}
-#else
-inline uint32_t CallFrame::Location::encodeAsBytecodeInstruction(Instruction* instruction)
-{
- uint32_t encodedBits = encode(BytecodeLocationTag, reinterpret_cast<uint32_t>(instruction));
- ASSERT(isBytecodeLocation(encodedBits));
- return encodedBits;
-}
-#endif
-
-inline uint32_t CallFrame::Location::encodeAsCodeOriginIndex(uint32_t bits)
-{
- uint32_t encodedBits = encode(CodeOriginIndexTag, bits);
- ASSERT(isCodeOriginIndex(encodedBits));
- return encodedBits;
-}
-
-inline bool CallFrame::Location::isBytecodeLocation(uint32_t bits)
-{
- return !isCodeOriginIndex(bits);
-}
-
-inline bool CallFrame::Location::isCodeOriginIndex(uint32_t bits)
-{
-#if USE(JSVALUE64)
- TypeTag tag = static_cast<TypeTag>(bits >> s_shift);
- return !!(tag & CodeOriginIndexTag);
-#else
- return !!(bits & CodeOriginIndexTag);
-#endif
-}
-
-inline bool CallFrame::hasLocationAsBytecodeOffset() const
-{
- return Location::isBytecodeLocation(locationAsRawBits());
-}
-
-inline bool CallFrame::hasLocationAsCodeOriginIndex() const
-{
- return Location::isCodeOriginIndex(locationAsRawBits());
-}
-
-inline unsigned CallFrame::locationAsRawBits() const
-{
- return this[JSStack::ArgumentCount].tag();
-}
-
-inline void CallFrame::setLocationAsRawBits(unsigned bits)
-{
- this[JSStack::ArgumentCount].tag() = static_cast<int32_t>(bits);
-}
-
-#if USE(JSVALUE64)
-inline unsigned CallFrame::locationAsBytecodeOffset() const
-{
- ASSERT(hasLocationAsBytecodeOffset());
- ASSERT(codeBlock());
- return Location::decode(locationAsRawBits());
-}
-
-inline void CallFrame::setLocationAsBytecodeOffset(unsigned offset)
-{
- ASSERT(codeBlock());
- setLocationAsRawBits(Location::encodeAsBytecodeOffset(offset));
- ASSERT(hasLocationAsBytecodeOffset());
-}
-#endif // USE(JSVALUE64)
-
-inline unsigned CallFrame::locationAsCodeOriginIndex() const
-{
- ASSERT(hasLocationAsCodeOriginIndex());
- ASSERT(codeBlock());
- return Location::decode(locationAsRawBits());
-}
-
-} // namespace JSC
-
-#endif // CallFrameInlines_h
diff --git a/Source/JavaScriptCore/interpreter/FrameTracers.h b/Source/JavaScriptCore/interpreter/FrameTracers.h
new file mode 100644
index 000000000..c5b6b8338
--- /dev/null
+++ b/Source/JavaScriptCore/interpreter/FrameTracers.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#include "CatchScope.h"
+#include "VM.h"
+
+namespace JSC {
+
+struct VMEntryFrame;
+
+class SuspendExceptionScope {
+public:
+ SuspendExceptionScope(VM* vm)
+ : m_vm(vm)
+ {
+ auto scope = DECLARE_CATCH_SCOPE(*vm);
+ oldException = scope.exception();
+ scope.clearException();
+ }
+ ~SuspendExceptionScope()
+ {
+ m_vm->restorePreviousException(oldException);
+ }
+private:
+ Exception* oldException;
+ VM* m_vm;
+};
+
+class TopCallFrameSetter {
+public:
+ TopCallFrameSetter(VM& currentVM, CallFrame* callFrame)
+ : vm(currentVM)
+ , oldCallFrame(currentVM.topCallFrame)
+ {
+ currentVM.topCallFrame = callFrame;
+ }
+
+ ~TopCallFrameSetter()
+ {
+ vm.topCallFrame = oldCallFrame;
+ }
+private:
+ VM& vm;
+ CallFrame* oldCallFrame;
+};
+
+class NativeCallFrameTracer {
+public:
+ ALWAYS_INLINE NativeCallFrameTracer(VM* vm, CallFrame* callFrame)
+ {
+ ASSERT(vm);
+ ASSERT(callFrame);
+ ASSERT(reinterpret_cast<void*>(callFrame) < reinterpret_cast<void*>(vm->topVMEntryFrame));
+ vm->topCallFrame = callFrame;
+ }
+};
+
+class NativeCallFrameTracerWithRestore {
+public:
+ ALWAYS_INLINE NativeCallFrameTracerWithRestore(VM* vm, VMEntryFrame* vmEntryFrame, CallFrame* callFrame)
+ : m_vm(vm)
+ {
+ ASSERT(vm);
+ ASSERT(callFrame);
+ m_savedTopVMEntryFrame = vm->topVMEntryFrame;
+ m_savedTopCallFrame = vm->topCallFrame;
+ vm->topVMEntryFrame = vmEntryFrame;
+ vm->topCallFrame = callFrame;
+ }
+
+ ALWAYS_INLINE ~NativeCallFrameTracerWithRestore()
+ {
+ m_vm->topVMEntryFrame = m_savedTopVMEntryFrame;
+ m_vm->topCallFrame = m_savedTopCallFrame;
+ }
+
+private:
+ VM* m_vm;
+ VMEntryFrame* m_savedTopVMEntryFrame;
+ CallFrame* m_savedTopCallFrame;
+};
+
+}
diff --git a/Source/JavaScriptCore/interpreter/Interpreter.cpp b/Source/JavaScriptCore/interpreter/Interpreter.cpp
index 4fbc8229a..b1243f290 100644
--- a/Source/JavaScriptCore/interpreter/Interpreter.cpp
+++ b/Source/JavaScriptCore/interpreter/Interpreter.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008, 2009, 2010, 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
* Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
*
* Redistribution and use in source and binary forms, with or without
@@ -11,7 +11,7 @@
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
- * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * 3. Neither the name of Apple Inc. ("Apple") nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
@@ -30,238 +30,284 @@
#include "config.h"
#include "Interpreter.h"
-#include "Arguments.h"
#include "BatchedTransitionOptimizer.h"
-#include "CallFrame.h"
#include "CallFrameClosure.h"
-#include "CallFrameInlines.h"
#include "CodeBlock.h"
+#include "DirectArguments.h"
#include "Heap.h"
#include "Debugger.h"
#include "DebuggerCallFrame.h"
+#include "DirectEvalCodeCache.h"
#include "ErrorInstance.h"
-#include "EvalCodeCache.h"
+#include "EvalCodeBlock.h"
+#include "Exception.h"
#include "ExceptionHelpers.h"
-#include "GetterSetter.h"
-#include "JSActivation.h"
-#include "JSArray.h"
+#include "FunctionCodeBlock.h"
+#include "JSArrayInlines.h"
#include "JSBoundFunction.h"
-#include "JSNameScope.h"
-#include "JSNotAnObject.h"
-#include "JSPropertyNameIterator.h"
-#include "JSStackInlines.h"
+#include "JSCInlines.h"
+#include "JSLexicalEnvironment.h"
+#include "JSModuleEnvironment.h"
#include "JSString.h"
#include "JSWithScope.h"
#include "LLIntCLoop.h"
+#include "LLIntData.h"
#include "LLIntThunks.h"
-#include "LegacyProfiler.h"
#include "LiteralParser.h"
-#include "NameInstance.h"
+#include "ModuleProgramCodeBlock.h"
#include "ObjectPrototype.h"
-#include "Operations.h"
#include "Parser.h"
+#include "ProgramCodeBlock.h"
#include "ProtoCallFrame.h"
#include "RegExpObject.h"
-#include "RegExpPrototype.h"
#include "Register.h"
-#include "SamplingTool.h"
+#include "ScopedArguments.h"
+#include "StackAlignment.h"
+#include "StackFrame.h"
#include "StackVisitor.h"
#include "StrictEvalActivation.h"
#include "StrongInlines.h"
+#include "Symbol.h"
#include "VMEntryScope.h"
+#include "VMInlines.h"
#include "VirtualRegister.h"
#include <limits.h>
#include <stdio.h>
#include <wtf/StackStats.h>
+#include <wtf/StdLibExtras.h>
#include <wtf/StringPrintStream.h>
#include <wtf/Threading.h>
-#include <wtf/WTFThreadData.h>
#include <wtf/text/StringBuilder.h>
#if ENABLE(JIT)
#include "JIT.h"
#endif
-#define WTF_USE_GCC_COMPUTED_GOTO_WORKAROUND (ENABLE(LLINT) && !defined(__llvm__))
-
using namespace std;
namespace JSC {
-Interpreter::ErrorHandlingMode::ErrorHandlingMode(ExecState *exec)
- : m_interpreter(*exec->interpreter())
-{
- if (!m_interpreter.m_errorHandlingModeReentry)
- m_interpreter.stack().enableErrorStackReserve();
- m_interpreter.m_errorHandlingModeReentry++;
-}
-
-Interpreter::ErrorHandlingMode::~ErrorHandlingMode()
-{
- m_interpreter.m_errorHandlingModeReentry--;
- ASSERT(m_interpreter.m_errorHandlingModeReentry >= 0);
- if (!m_interpreter.m_errorHandlingModeReentry)
- m_interpreter.stack().disableErrorStackReserve();
-}
-
JSValue eval(CallFrame* callFrame)
{
+ VM& vm = callFrame->vm();
+ auto scope = DECLARE_THROW_SCOPE(vm);
+
if (!callFrame->argumentCount())
return jsUndefined();
JSValue program = callFrame->argument(0);
if (!program.isString())
return program;
-
- TopCallFrameSetter topCallFrame(callFrame->vm(), callFrame);
+
+ TopCallFrameSetter topCallFrame(vm, callFrame);
+ JSGlobalObject* globalObject = callFrame->lexicalGlobalObject();
+ if (!globalObject->evalEnabled()) {
+ throwException(callFrame, scope, createEvalError(callFrame, globalObject->evalDisabledErrorMessage()));
+ return jsUndefined();
+ }
String programSource = asString(program)->value(callFrame);
- if (callFrame->hadException())
- return JSValue();
+ RETURN_IF_EXCEPTION(scope, JSValue());
CallFrame* callerFrame = callFrame->callerFrame();
+ CallSiteIndex callerCallSiteIndex = callerFrame->callSiteIndex();
CodeBlock* callerCodeBlock = callerFrame->codeBlock();
- JSScope* callerScopeChain = callerFrame->scope();
- EvalExecutable* eval = callerCodeBlock->evalCodeCache().tryGet(callerCodeBlock->isStrictMode(), programSource, callerScopeChain);
+ JSScope* callerScopeChain = callerFrame->uncheckedR(callerCodeBlock->scopeRegister().offset()).Register::scope();
+ UnlinkedCodeBlock* callerUnlinkedCodeBlock = callerCodeBlock->unlinkedCodeBlock();
+
+ bool isArrowFunctionContext = callerUnlinkedCodeBlock->isArrowFunction() || callerUnlinkedCodeBlock->isArrowFunctionContext();
+
+ DerivedContextType derivedContextType = callerUnlinkedCodeBlock->derivedContextType();
+ if (!isArrowFunctionContext && callerUnlinkedCodeBlock->isClassContext()) {
+ derivedContextType = callerUnlinkedCodeBlock->isConstructor()
+ ? DerivedContextType::DerivedConstructorContext
+ : DerivedContextType::DerivedMethodContext;
+ }
+
+ EvalContextType evalContextType;
+ if (isFunctionParseMode(callerUnlinkedCodeBlock->parseMode()))
+ evalContextType = EvalContextType::FunctionEvalContext;
+ else if (callerUnlinkedCodeBlock->codeType() == EvalCode)
+ evalContextType = callerUnlinkedCodeBlock->evalContextType();
+ else
+ evalContextType = EvalContextType::None;
+ DirectEvalExecutable* eval = callerCodeBlock->directEvalCodeCache().tryGet(programSource, callerCallSiteIndex);
if (!eval) {
if (!callerCodeBlock->isStrictMode()) {
- // FIXME: We can use the preparser in strict mode, we just need additional logic
- // to prevent duplicates.
if (programSource.is8Bit()) {
LiteralParser<LChar> preparser(callFrame, programSource.characters8(), programSource.length(), NonStrictJSON);
- if (JSValue parsedObject = preparser.tryLiteralParse())
+ if (JSValue parsedObject = preparser.tryLiteralParse()) {
+ scope.release();
return parsedObject;
+ }
} else {
LiteralParser<UChar> preparser(callFrame, programSource.characters16(), programSource.length(), NonStrictJSON);
- if (JSValue parsedObject = preparser.tryLiteralParse())
- return parsedObject;
+ if (JSValue parsedObject = preparser.tryLiteralParse()) {
+ scope.release();
+ return parsedObject;
+ }
}
}
// If the literal parser bailed, it should not have thrown exceptions.
- ASSERT(!callFrame->vm().exception());
+ ASSERT(!scope.exception());
- eval = callerCodeBlock->evalCodeCache().getSlow(callFrame, callerCodeBlock->ownerExecutable(), callerCodeBlock->isStrictMode(), programSource, callerScopeChain);
+ VariableEnvironment variablesUnderTDZ;
+ JSScope::collectClosureVariablesUnderTDZ(callerScopeChain, variablesUnderTDZ);
+ eval = DirectEvalExecutable::create(callFrame, makeSource(programSource, callerCodeBlock->source()->sourceOrigin()), callerCodeBlock->isStrictMode(), derivedContextType, isArrowFunctionContext, evalContextType, &variablesUnderTDZ);
+ ASSERT(!!scope.exception() == !eval);
if (!eval)
return jsUndefined();
+
+ callerCodeBlock->directEvalCodeCache().set(callFrame, callerCodeBlock, programSource, callerCallSiteIndex, eval);
}
JSValue thisValue = callerFrame->thisValue();
- Interpreter* interpreter = callFrame->vm().interpreter;
+ Interpreter* interpreter = vm.interpreter;
+ scope.release();
return interpreter->execute(eval, callFrame, thisValue, callerScopeChain);
}
-CallFrame* sizeAndAllocFrameForVarargs(CallFrame* callFrame, JSStack* stack, JSValue arguments, int firstFreeRegister)
+unsigned sizeOfVarargs(CallFrame* callFrame, JSValue arguments, uint32_t firstVarArgOffset)
{
- if (!arguments) { // f.apply(x, arguments), with arguments unmodified.
- unsigned argumentCountIncludingThis = callFrame->argumentCountIncludingThis();
- CallFrame* newCallFrame = CallFrame::create(callFrame->registers() + firstFreeRegister - argumentCountIncludingThis - JSStack::CallFrameHeaderSize - 1);
- if (argumentCountIncludingThis > Arguments::MaxArguments + 1 || !stack->grow(newCallFrame->registers())) {
- callFrame->vm().throwException(callFrame, createStackOverflowError(callFrame));
- return 0;
- }
- return newCallFrame;
- }
+ VM& vm = callFrame->vm();
+ auto scope = DECLARE_THROW_SCOPE(vm);
- if (arguments.isUndefinedOrNull()) {
- CallFrame* newCallFrame = CallFrame::create(callFrame->registers() + firstFreeRegister - 1 - JSStack::CallFrameHeaderSize - 1);
- if (!stack->grow(newCallFrame->registers())) {
- callFrame->vm().throwException(callFrame, createStackOverflowError(callFrame));
+ if (UNLIKELY(!arguments.isCell())) {
+ if (arguments.isUndefinedOrNull())
return 0;
- }
- return newCallFrame;
+
+ throwException(callFrame, scope, createInvalidFunctionApplyParameterError(callFrame, arguments));
+ return 0;
}
-
- if (!arguments.isObject()) {
- callFrame->vm().throwException(callFrame, createInvalidParameterError(callFrame, "Function.prototype.apply", arguments));
+
+ JSCell* cell = arguments.asCell();
+ unsigned length;
+ switch (cell->type()) {
+ case DirectArgumentsType:
+ length = jsCast<DirectArguments*>(cell)->length(callFrame);
+ break;
+ case ScopedArgumentsType:
+ length = jsCast<ScopedArguments*>(cell)->length(callFrame);
+ break;
+ case StringType:
+ case SymbolType:
+ throwException(callFrame, scope, createInvalidFunctionApplyParameterError(callFrame, arguments));
return 0;
+
+ default:
+ RELEASE_ASSERT(arguments.isObject());
+ length = getLength(callFrame, jsCast<JSObject*>(cell));
+ break;
}
+ RETURN_IF_EXCEPTION(scope, 0);
+
+ if (length >= firstVarArgOffset)
+ length -= firstVarArgOffset;
+ else
+ length = 0;
+
+ return length;
+}
- if (asObject(arguments)->classInfo() == Arguments::info()) {
- Arguments* argsObject = asArguments(arguments);
- unsigned argCount = argsObject->length(callFrame);
- CallFrame* newCallFrame = CallFrame::create(callFrame->registers() + firstFreeRegister - CallFrame::offsetFor(argCount + 1));
- if (argCount > Arguments::MaxArguments || !stack->grow(newCallFrame->registers())) {
- callFrame->vm().throwException(callFrame, createStackOverflowError(callFrame));
- return 0;
- }
- return newCallFrame;
- }
+unsigned sizeFrameForForwardArguments(CallFrame* callFrame, VM& vm, unsigned numUsedStackSlots)
+{
+ auto scope = DECLARE_THROW_SCOPE(vm);
- if (isJSArray(arguments)) {
- JSArray* array = asArray(arguments);
- unsigned argCount = array->length();
- CallFrame* newCallFrame = CallFrame::create(callFrame->registers() + firstFreeRegister - CallFrame::offsetFor(argCount + 1));
- if (argCount > Arguments::MaxArguments || !stack->grow(newCallFrame->registers())) {
- callFrame->vm().throwException(callFrame, createStackOverflowError(callFrame));
- return 0;
- }
- return newCallFrame;
- }
+ unsigned length = callFrame->argumentCount();
+ CallFrame* calleeFrame = calleeFrameForVarargs(callFrame, numUsedStackSlots, length + 1);
+ if (UNLIKELY(!vm.ensureStackCapacityFor(calleeFrame->registers())))
+ throwStackOverflowError(callFrame, scope);
- JSObject* argObject = asObject(arguments);
- unsigned argCount = argObject->get(callFrame, callFrame->propertyNames().length).toUInt32(callFrame);
- CallFrame* newCallFrame = CallFrame::create(callFrame->registers() + firstFreeRegister - CallFrame::offsetFor(argCount + 1));
- if (argCount > Arguments::MaxArguments || !stack->grow(newCallFrame->registers())) {
- callFrame->vm().throwException(callFrame, createStackOverflowError(callFrame));
- return 0;
- }
- return newCallFrame;
+ return length;
}
-void loadVarargs(CallFrame* callFrame, CallFrame* newCallFrame, JSValue thisValue, JSValue arguments)
+unsigned sizeFrameForVarargs(CallFrame* callFrame, VM& vm, JSValue arguments, unsigned numUsedStackSlots, uint32_t firstVarArgOffset)
{
- if (!arguments) { // f.apply(x, arguments), with arguments unmodified.
- unsigned argumentCountIncludingThis = callFrame->argumentCountIncludingThis();
+ auto scope = DECLARE_THROW_SCOPE(vm);
- newCallFrame->setArgumentCountIncludingThis(argumentCountIncludingThis);
- newCallFrame->setThisValue(thisValue);
- for (size_t i = 0; i < callFrame->argumentCount(); ++i)
- newCallFrame->setArgument(i, callFrame->argumentAfterCapture(i));
- return;
+ unsigned length = sizeOfVarargs(callFrame, arguments, firstVarArgOffset);
+ RETURN_IF_EXCEPTION(scope, 0);
+
+ CallFrame* calleeFrame = calleeFrameForVarargs(callFrame, numUsedStackSlots, length + 1);
+ if (UNLIKELY(length > maxArguments || !vm.ensureStackCapacityFor(calleeFrame->registers()))) {
+ throwStackOverflowError(callFrame, scope);
+ return 0;
}
- if (arguments.isUndefinedOrNull()) {
- newCallFrame->setArgumentCountIncludingThis(1);
- newCallFrame->setThisValue(thisValue);
+ return length;
+}
+
+void loadVarargs(CallFrame* callFrame, VirtualRegister firstElementDest, JSValue arguments, uint32_t offset, uint32_t length)
+{
+ if (UNLIKELY(!arguments.isCell()) || !length)
return;
- }
- if (asObject(arguments)->classInfo() == Arguments::info()) {
- Arguments* argsObject = asArguments(arguments);
- unsigned argCount = argsObject->length(callFrame);
- newCallFrame->setArgumentCountIncludingThis(argCount + 1);
- newCallFrame->setThisValue(thisValue);
- argsObject->copyToArguments(callFrame, newCallFrame, argCount);
+ JSCell* cell = arguments.asCell();
+
+ switch (cell->type()) {
+ case DirectArgumentsType:
+ jsCast<DirectArguments*>(cell)->copyToArguments(callFrame, firstElementDest, offset, length);
return;
- }
-
- if (isJSArray(arguments)) {
- JSArray* array = asArray(arguments);
- unsigned argCount = array->length();
- newCallFrame->setArgumentCountIncludingThis(argCount + 1);
- newCallFrame->setThisValue(thisValue);
- array->copyToArguments(callFrame, newCallFrame, argCount);
+ case ScopedArgumentsType:
+ jsCast<ScopedArguments*>(cell)->copyToArguments(callFrame, firstElementDest, offset, length);
return;
- }
+ default: {
+ ASSERT(arguments.isObject());
+ JSObject* object = jsCast<JSObject*>(cell);
+ if (isJSArray(object)) {
+ jsCast<JSArray*>(object)->copyToArguments(callFrame, firstElementDest, offset, length);
+ return;
+ }
+ unsigned i;
+ for (i = 0; i < length && object->canGetIndexQuickly(i + offset); ++i)
+ callFrame->r(firstElementDest + i) = object->getIndexQuickly(i + offset);
+ for (; i < length; ++i)
+ callFrame->r(firstElementDest + i) = object->get(callFrame, i + offset);
+ return;
+ } }
+}
+
+void setupVarargsFrame(CallFrame* callFrame, CallFrame* newCallFrame, JSValue arguments, uint32_t offset, uint32_t length)
+{
+ VirtualRegister calleeFrameOffset(newCallFrame - callFrame);
- JSObject* argObject = asObject(arguments);
- unsigned argCount = argObject->get(callFrame, callFrame->propertyNames().length).toUInt32(callFrame);
- newCallFrame->setArgumentCountIncludingThis(argCount + 1);
+ loadVarargs(
+ callFrame,
+ calleeFrameOffset + CallFrame::argumentOffset(0),
+ arguments, offset, length);
+
+ newCallFrame->setArgumentCountIncludingThis(length + 1);
+}
+
+void setupVarargsFrameAndSetThis(CallFrame* callFrame, CallFrame* newCallFrame, JSValue thisValue, JSValue arguments, uint32_t firstVarArgOffset, uint32_t length)
+{
+ setupVarargsFrame(callFrame, newCallFrame, arguments, firstVarArgOffset, length);
newCallFrame->setThisValue(thisValue);
- for (size_t i = 0; i < argCount; ++i) {
- newCallFrame->setArgument(i, asObject(arguments)->get(callFrame, i));
- if (UNLIKELY(callFrame->vm().exception()))
- return;
- }
}
+void setupForwardArgumentsFrame(CallFrame* execCaller, CallFrame* execCallee, uint32_t length)
+{
+ ASSERT(length == execCaller->argumentCount());
+ unsigned offset = execCaller->argumentOffset(0) * sizeof(Register);
+ memcpy(reinterpret_cast<char*>(execCallee) + offset, reinterpret_cast<char*>(execCaller) + offset, length * sizeof(Register));
+ execCallee->setArgumentCountIncludingThis(length + 1);
+}
+
+void setupForwardArgumentsFrameAndSetThis(CallFrame* execCaller, CallFrame* execCallee, JSValue thisValue, uint32_t length)
+{
+ setupForwardArgumentsFrame(execCaller, execCallee, length);
+ execCallee->setThisValue(thisValue);
+}
+
+
+
Interpreter::Interpreter(VM& vm)
- : m_sampleEntryDepth(0)
- , m_vm(vm)
- , m_stack(vm)
- , m_errorHandlingModeReentry(0)
+ : m_vm(vm)
+#if !ENABLE(JIT)
+ , m_cloopStack(vm)
+#endif
#if !ASSERT_DISABLED
, m_initialized(false)
#endif
@@ -272,11 +318,9 @@ Interpreter::~Interpreter()
{
}
-void Interpreter::initialize(bool canUseJIT)
+void Interpreter::initialize()
{
- UNUSED_PARAM(canUseJIT);
-
-#if ENABLE(COMPUTED_GOTO_OPCODES) && ENABLE(LLINT)
+#if ENABLE(COMPUTED_GOTO_OPCODES)
m_opcodeTable = LLInt::opcodeMap();
for (int i = 0; i < numOpcodeIDs; ++i)
m_opcodeIDTable.add(m_opcodeTable[i], static_cast<OpcodeID>(i));
@@ -285,10 +329,6 @@ void Interpreter::initialize(bool canUseJIT)
#if !ASSERT_DISABLED
m_initialized = true;
#endif
-
-#if ENABLE(OPCODE_SAMPLING)
- enableSampler();
-#endif
}
#ifdef NDEBUG
@@ -313,7 +353,7 @@ public:
{
}
- StackVisitor::Status operator()(StackVisitor& visitor)
+ StackVisitor::Status operator()(StackVisitor& visitor) const
{
if (!m_hasSkippedFirstFrame) {
m_hasSkippedFirstFrame = true;
@@ -329,7 +369,7 @@ public:
}
private:
- bool m_hasSkippedFirstFrame;
+ mutable bool m_hasSkippedFirstFrame;
const Register*& m_it;
};
@@ -344,8 +384,8 @@ void Interpreter::dumpRegisters(CallFrame* callFrame)
const Register* it;
const Register* end;
- it = callFrame->registers() + JSStack::ThisArgument + callFrame->argumentCount();
- end = callFrame->registers() + JSStack::ThisArgument - 1;
+ it = callFrame->registers() + CallFrameSlot::thisArgument + callFrame->argumentCount();
+ end = callFrame->registers() + CallFrameSlot::thisArgument - 1;
while (it > end) {
JSValue v = it->jsValue();
int registerNumber = it - callFrame->registers();
@@ -359,9 +399,9 @@ void Interpreter::dumpRegisters(CallFrame* callFrame)
--it;
dataLogF("[CallerFrame] | %10p | %p \n", it, callFrame->callerFrame());
--it;
- dataLogF("[Callee] | %10p | %p \n", it, callFrame->callee());
+ dataLogF("[Callee] | %10p | %p \n", it, callFrame->jsCallee());
--it;
- dataLogF("[ScopeChain] | %10p | %p \n", it, callFrame->scope());
+ // FIXME: Remove the next decrement when the ScopeChain slot is removed from the call header
--it;
#if ENABLE(JIT)
AbstractPC pc = callFrame->abstractReturnPC(callFrame->vm());
@@ -388,7 +428,7 @@ void Interpreter::dumpRegisters(CallFrame* callFrame)
}
dataLogF("-----------------------------------------------------------------------------\n");
- end = it - codeBlock->m_numCalleeRegisters + codeBlock->m_numVars;
+ end = it - codeBlock->m_numCalleeLocals + codeBlock->m_numVars;
if (it != end) {
do {
JSValue v = (*it).jsValue();
@@ -405,160 +445,41 @@ void Interpreter::dumpRegisters(CallFrame* callFrame)
bool Interpreter::isOpcode(Opcode opcode)
{
#if ENABLE(COMPUTED_GOTO_OPCODES)
-#if !ENABLE(LLINT)
- return static_cast<OpcodeID>(bitwise_cast<uintptr_t>(opcode)) <= op_end;
-#else
return opcode != HashTraits<Opcode>::emptyValue()
&& !HashTraits<Opcode>::isDeletedValue(opcode)
&& m_opcodeIDTable.contains(opcode);
-#endif
#else
return opcode >= 0 && opcode <= op_end;
#endif
}
-static bool unwindCallFrame(StackVisitor& visitor)
-{
- CallFrame* callFrame = visitor->callFrame();
- CodeBlock* codeBlock = visitor->codeBlock();
- CodeBlock* oldCodeBlock = codeBlock;
- JSScope* scope = callFrame->scope();
-
- if (Debugger* debugger = callFrame->vmEntryGlobalObject()->debugger()) {
- if (callFrame->callee())
- debugger->returnEvent(callFrame);
- else
- debugger->didExecuteProgram(callFrame);
- }
-
- JSValue activation;
- if (oldCodeBlock->codeType() == FunctionCode && oldCodeBlock->needsActivation()) {
-#if ENABLE(DFG_JIT)
- RELEASE_ASSERT(!visitor->isInlinedFrame());
-#endif
- activation = callFrame->uncheckedR(oldCodeBlock->activationRegister().offset()).jsValue();
- if (activation)
- jsCast<JSActivation*>(activation)->tearOff(*scope->vm());
- }
-
- if (oldCodeBlock->codeType() == FunctionCode && oldCodeBlock->usesArguments()) {
- if (Arguments* arguments = visitor->existingArguments()) {
- if (activation)
- arguments->didTearOffActivation(callFrame, jsCast<JSActivation*>(activation));
-#if ENABLE(DFG_JIT)
- else if (visitor->isInlinedFrame())
- arguments->tearOff(callFrame, visitor->inlineCallFrame());
-#endif
- else
- arguments->tearOff(callFrame);
- }
- }
-
- CallFrame* callerFrame = callFrame->callerFrame();
- if (callerFrame->isVMEntrySentinel()) {
- callFrame->vm().topCallFrame = callerFrame->vmEntrySentinelCallerFrame();
- return false;
- }
- return true;
-}
-
-static StackFrameCodeType getStackFrameCodeType(StackVisitor& visitor)
-{
- switch (visitor->codeType()) {
- case StackVisitor::Frame::Eval:
- return StackFrameEvalCode;
- case StackVisitor::Frame::Function:
- return StackFrameFunctionCode;
- case StackVisitor::Frame::Global:
- return StackFrameGlobalCode;
- case StackVisitor::Frame::Native:
- ASSERT_NOT_REACHED();
- return StackFrameNativeCode;
- }
- RELEASE_ASSERT_NOT_REACHED();
- return StackFrameGlobalCode;
-}
-
-void StackFrame::computeLineAndColumn(unsigned& line, unsigned& column)
-{
- if (!codeBlock) {
- line = 0;
- column = 0;
- return;
- }
-
- int divot = 0;
- int unusedStartOffset = 0;
- int unusedEndOffset = 0;
- unsigned divotLine = 0;
- unsigned divotColumn = 0;
- expressionInfo(divot, unusedStartOffset, unusedEndOffset, divotLine, divotColumn);
-
- line = divotLine + lineOffset;
- column = divotColumn + (divotLine ? 1 : firstLineColumnOffset);
-}
-
-void StackFrame::expressionInfo(int& divot, int& startOffset, int& endOffset, unsigned& line, unsigned& column)
-{
- codeBlock->expressionRangeForBytecodeOffset(bytecodeOffset, divot, startOffset, endOffset, line, column);
- divot += characterOffset;
-}
-
-String StackFrame::toString(CallFrame* callFrame)
-{
- StringBuilder traceBuild;
- String functionName = friendlyFunctionName(callFrame);
- String sourceURL = friendlySourceURL();
- traceBuild.append(functionName);
- if (!sourceURL.isEmpty()) {
- if (!functionName.isEmpty())
- traceBuild.append('@');
- traceBuild.append(sourceURL);
- if (codeType != StackFrameNativeCode) {
- unsigned line;
- unsigned column;
- computeLineAndColumn(line, column);
-
- traceBuild.append(':');
- traceBuild.appendNumber(line);
- traceBuild.append(':');
- traceBuild.appendNumber(column);
- }
- }
- return traceBuild.toString().impl();
-}
-
class GetStackTraceFunctor {
public:
- GetStackTraceFunctor(VM& vm, Vector<StackFrame>& results, size_t remainingCapacity)
+ GetStackTraceFunctor(VM& vm, Vector<StackFrame>& results, size_t framesToSkip, size_t capacity)
: m_vm(vm)
, m_results(results)
- , m_remainingCapacityForFrameCapture(remainingCapacity)
+ , m_framesToSkip(framesToSkip)
+ , m_remainingCapacityForFrameCapture(capacity)
{
+ m_results.reserveInitialCapacity(capacity);
}
- StackVisitor::Status operator()(StackVisitor& visitor)
+ StackVisitor::Status operator()(StackVisitor& visitor) const
{
- VM& vm = m_vm;
+ if (m_framesToSkip > 0) {
+ m_framesToSkip--;
+ return StackVisitor::Continue;
+ }
+
if (m_remainingCapacityForFrameCapture) {
- if (visitor->isJSFrame()) {
- CodeBlock* codeBlock = visitor->codeBlock();
- StackFrame s = {
- Strong<JSObject>(vm, visitor->callee()),
- getStackFrameCodeType(visitor),
- Strong<ExecutableBase>(vm, codeBlock->ownerExecutable()),
- Strong<UnlinkedCodeBlock>(vm, codeBlock->unlinkedCodeBlock()),
- codeBlock->source(),
- codeBlock->ownerExecutable()->lineNo(),
- codeBlock->firstLineColumnOffset(),
- codeBlock->sourceOffset(),
- visitor->bytecodeOffset(),
- visitor->sourceURL()
- };
- m_results.append(s);
+ if (!visitor->isWasmFrame()
+ && !!visitor->codeBlock()
+ && !visitor->codeBlock()->unlinkedCodeBlock()->isBuiltinFunction()) {
+ m_results.append(
+ StackFrame(m_vm, visitor->callee(), visitor->codeBlock(), visitor->bytecodeOffset()));
} else {
- StackFrame s = { Strong<JSObject>(vm, visitor->callee()), StackFrameNativeCode, Strong<ExecutableBase>(), Strong<UnlinkedCodeBlock>(), 0, 0, 0, 0, 0, String()};
- m_results.append(s);
+ m_results.append(
+ StackFrame(m_vm, visitor->callee()));
}
m_remainingCapacityForFrameCapture--;
@@ -570,50 +491,80 @@ public:
private:
VM& m_vm;
Vector<StackFrame>& m_results;
- size_t m_remainingCapacityForFrameCapture;
+ mutable size_t m_framesToSkip;
+ mutable size_t m_remainingCapacityForFrameCapture;
};
-void Interpreter::getStackTrace(Vector<StackFrame>& results, size_t maxStackSize)
+void Interpreter::getStackTrace(Vector<StackFrame>& results, size_t framesToSkip, size_t maxStackSize)
{
VM& vm = m_vm;
- ASSERT(!vm.topCallFrame->isVMEntrySentinel());
CallFrame* callFrame = vm.topCallFrame;
if (!callFrame)
return;
- GetStackTraceFunctor functor(vm, results, maxStackSize);
+ size_t framesCount = 0;
+ callFrame->iterate([&] (StackVisitor&) -> StackVisitor::Status {
+ framesCount++;
+ return StackVisitor::Continue;
+ });
+ if (framesCount <= framesToSkip)
+ return;
+
+ framesCount -= framesToSkip;
+ framesCount = std::min(maxStackSize, framesCount);
+
+ GetStackTraceFunctor functor(vm, results, framesToSkip, framesCount);
callFrame->iterate(functor);
+ ASSERT(results.size() == results.capacity());
}
-JSString* Interpreter::stackTraceAsString(ExecState* exec, Vector<StackFrame> stackTrace)
+JSString* Interpreter::stackTraceAsString(VM& vm, const Vector<StackFrame>& stackTrace)
{
// FIXME: JSStringJoiner could be more efficient than StringBuilder here.
StringBuilder builder;
for (unsigned i = 0; i < stackTrace.size(); i++) {
- builder.append(String(stackTrace[i].toString(exec)));
+ builder.append(String(stackTrace[i].toString(vm)));
if (i != stackTrace.size() - 1)
builder.append('\n');
}
- return jsString(&exec->vm(), builder.toString());
+ return jsString(&vm, builder.toString());
}
-class GetExceptionHandlerFunctor {
+ALWAYS_INLINE static HandlerInfo* findExceptionHandler(StackVisitor& visitor, CodeBlock* codeBlock, RequiredHandler requiredHandler)
+{
+ ASSERT(codeBlock);
+#if ENABLE(DFG_JIT)
+ ASSERT(!visitor->isInlinedFrame());
+#endif
+
+ CallFrame* callFrame = visitor->callFrame();
+ unsigned exceptionHandlerIndex;
+ if (JITCode::isOptimizingJIT(codeBlock->jitType()))
+ exceptionHandlerIndex = callFrame->callSiteIndex().bits();
+ else
+ exceptionHandlerIndex = callFrame->bytecodeOffset();
+
+ return codeBlock->handlerForIndex(exceptionHandlerIndex, requiredHandler);
+}
+
+class GetCatchHandlerFunctor {
public:
- GetExceptionHandlerFunctor()
+ GetCatchHandlerFunctor()
: m_handler(0)
{
}
HandlerInfo* handler() { return m_handler; }
- StackVisitor::Status operator()(StackVisitor& visitor)
+ StackVisitor::Status operator()(StackVisitor& visitor) const
{
+ visitor.unwindToMachineCodeBlockFrame();
+
CodeBlock* codeBlock = visitor->codeBlock();
if (!codeBlock)
return StackVisitor::Continue;
- unsigned bytecodeOffset = visitor->bytecodeOffset();
- m_handler = codeBlock->handlerForBytecodeOffset(bytecodeOffset);
+ m_handler = findExceptionHandler(visitor, codeBlock, RequiredHandler::CatchHandler);
if (m_handler)
return StackVisitor::Done;
@@ -621,9 +572,23 @@ public:
}
private:
- HandlerInfo* m_handler;
+ mutable HandlerInfo* m_handler;
};
+ALWAYS_INLINE static void notifyDebuggerOfUnwinding(CallFrame* callFrame)
+{
+ VM& vm = callFrame->vm();
+ auto catchScope = DECLARE_CATCH_SCOPE(vm);
+ if (Debugger* debugger = callFrame->vmEntryGlobalObject()->debugger()) {
+ SuspendExceptionScope scope(&vm);
+ if (jsDynamicCast<JSFunction*>(vm, callFrame->jsCallee()))
+ debugger->unwindEvent(callFrame);
+ else
+ debugger->didExecuteProgram(callFrame);
+ ASSERT_UNUSED(catchScope, !catchScope.exception());
+ }
+}
+
class UnwindFunctor {
public:
UnwindFunctor(CallFrame*& callFrame, bool isTermination, CodeBlock*& codeBlock, HandlerInfo*& handler)
@@ -634,37 +599,82 @@ public:
{
}
- StackVisitor::Status operator()(StackVisitor& visitor)
+ StackVisitor::Status operator()(StackVisitor& visitor) const
{
- VM& vm = m_callFrame->vm();
+ visitor.unwindToMachineCodeBlockFrame();
m_callFrame = visitor->callFrame();
m_codeBlock = visitor->codeBlock();
- unsigned bytecodeOffset = visitor->bytecodeOffset();
- if (m_isTermination || !(m_handler = m_codeBlock->handlerForBytecodeOffset(bytecodeOffset))) {
- if (!unwindCallFrame(visitor)) {
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->exceptionUnwind(m_callFrame);
- return StackVisitor::Done;
+ m_handler = nullptr;
+ if (!m_isTermination) {
+ if (m_codeBlock) {
+ m_handler = findExceptionHandler(visitor, m_codeBlock, RequiredHandler::AnyHandler);
+ if (m_handler)
+ return StackVisitor::Done;
}
- } else
+ }
+
+ notifyDebuggerOfUnwinding(m_callFrame);
+
+ copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(visitor);
+
+ bool shouldStopUnwinding = visitor->callerIsVMEntryFrame();
+ if (shouldStopUnwinding)
return StackVisitor::Done;
return StackVisitor::Continue;
}
private:
+ void copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(StackVisitor& visitor) const
+ {
+#if ENABLE(JIT) && NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
+ RegisterAtOffsetList* currentCalleeSaves = visitor->calleeSaveRegisters();
+
+ if (!currentCalleeSaves)
+ return;
+
+ VM& vm = m_callFrame->vm();
+ RegisterAtOffsetList* allCalleeSaves = vm.getAllCalleeSaveRegisterOffsets();
+ RegisterSet dontCopyRegisters = RegisterSet::stackRegisters();
+ intptr_t* frame = reinterpret_cast<intptr_t*>(m_callFrame->registers());
+
+ unsigned registerCount = currentCalleeSaves->size();
+ VMEntryRecord* record = vmEntryRecord(vm.topVMEntryFrame);
+ for (unsigned i = 0; i < registerCount; i++) {
+ RegisterAtOffset currentEntry = currentCalleeSaves->at(i);
+ if (dontCopyRegisters.get(currentEntry.reg()))
+ continue;
+ RegisterAtOffset* calleeSavesEntry = allCalleeSaves->find(currentEntry.reg());
+
+ record->calleeSaveRegistersBuffer[calleeSavesEntry->offsetAsIndex()] = *(frame + currentEntry.offsetAsIndex());
+ }
+#else
+ UNUSED_PARAM(visitor);
+#endif
+ }
+
CallFrame*& m_callFrame;
bool m_isTermination;
CodeBlock*& m_codeBlock;
HandlerInfo*& m_handler;
};
-NEVER_INLINE HandlerInfo* Interpreter::unwind(CallFrame*& callFrame, JSValue& exceptionValue)
+NEVER_INLINE HandlerInfo* Interpreter::unwind(VM& vm, CallFrame*& callFrame, Exception* exception, UnwindStart unwindStart)
{
+ auto scope = DECLARE_CATCH_SCOPE(vm);
+
+ if (unwindStart == UnwindFromCallerFrame) {
+ if (callFrame->callerFrameOrVMEntryFrame() == vm.topVMEntryFrame)
+ return nullptr;
+
+ callFrame = callFrame->callerFrame();
+ vm.topCallFrame = callFrame;
+ }
+
CodeBlock* codeBlock = callFrame->codeBlock();
- bool isTermination = false;
+ JSValue exceptionValue = exception->value();
ASSERT(!exceptionValue.isEmpty());
ASSERT(!exceptionValue.isCell() || exceptionValue.asCell());
// This shouldn't be possible (hence the assertions), but we're already in the slowest of
@@ -672,60 +682,43 @@ NEVER_INLINE HandlerInfo* Interpreter::unwind(CallFrame*& callFrame, JSValue& ex
if (exceptionValue.isEmpty() || (exceptionValue.isCell() && !exceptionValue.asCell()))
exceptionValue = jsNull();
- if (exceptionValue.isObject()) {
- isTermination = isTerminatedExecutionException(asObject(exceptionValue));
- }
+ ASSERT_UNUSED(scope, scope.exception() && scope.exception()->stack().size());
+
+ // Calculate an exception handler vPC, unwinding call frames as necessary.
+ HandlerInfo* handler = nullptr;
+ UnwindFunctor functor(callFrame, isTerminatedExecutionException(vm, exception), codeBlock, handler);
+ callFrame->iterate(functor);
+ if (!handler)
+ return nullptr;
- ASSERT(callFrame->vm().exceptionStack().size());
+ return handler;
+}
+void Interpreter::notifyDebuggerOfExceptionToBeThrown(CallFrame* callFrame, Exception* exception)
+{
+ VM& vm = callFrame->vm();
Debugger* debugger = callFrame->vmEntryGlobalObject()->debugger();
- if (debugger && debugger->needsExceptionCallbacks()) {
- // We need to clear the exception and the exception stack here in order to see if a new exception happens.
- // Afterwards, the values are put back to continue processing this error.
- ClearExceptionScope scope(&callFrame->vm());
+ if (debugger && debugger->needsExceptionCallbacks() && !exception->didNotifyInspectorOfThrow()) {
// This code assumes that if the debugger is enabled then there is no inlining.
// If that assumption turns out to be false then we'll ignore the inlined call
// frames.
// https://bugs.webkit.org/show_bug.cgi?id=121754
- bool hasHandler;
+ bool hasCatchHandler;
+ bool isTermination = isTerminatedExecutionException(vm, exception);
if (isTermination)
- hasHandler = false;
+ hasCatchHandler = false;
else {
- GetExceptionHandlerFunctor functor;
+ GetCatchHandlerFunctor functor;
callFrame->iterate(functor);
- hasHandler = !!functor.handler();
+ HandlerInfo* handler = functor.handler();
+ ASSERT(!handler || handler->isCatchHandler());
+ hasCatchHandler = !!handler;
}
- debugger->exception(callFrame, exceptionValue, hasHandler);
+ debugger->exception(callFrame, exception->value(), hasCatchHandler);
}
-
- // Calculate an exception handler vPC, unwinding call frames as necessary.
- HandlerInfo* handler = 0;
- VM& vm = callFrame->vm();
- ASSERT(callFrame == vm.topCallFrame);
- UnwindFunctor functor(callFrame, isTermination, codeBlock, handler);
- callFrame->iterate(functor);
- if (!handler)
- return 0;
-
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->exceptionUnwind(callFrame);
-
- // Unwind the scope chain within the exception handler's call frame.
- int targetScopeDepth = handler->scopeDepth;
- if (codeBlock->needsActivation() && callFrame->uncheckedR(codeBlock->activationRegister().offset()).jsValue())
- ++targetScopeDepth;
-
- JSScope* scope = callFrame->scope();
- int scopeDelta = scope->depth() - targetScopeDepth;
- RELEASE_ASSERT(scopeDelta >= 0);
-
- while (scopeDelta--)
- scope = scope->next();
- callFrame->setScope(scope);
-
- return handler;
+ exception->setDidNotifyInspectorOfThrow();
}
static inline JSValue checkedReturn(JSValue returnValue)
@@ -740,36 +733,24 @@ static inline JSObject* checkedReturn(JSObject* returnValue)
return returnValue;
}
-class SamplingScope {
-public:
- SamplingScope(Interpreter* interpreter)
- : m_interpreter(interpreter)
- {
- interpreter->startSampling();
- }
- ~SamplingScope()
- {
- m_interpreter->stopSampling();
- }
-private:
- Interpreter* m_interpreter;
-};
-
-JSValue Interpreter::execute(ProgramExecutable* program, CallFrame* callFrame, JSObject* thisObj)
+JSValue Interpreter::executeProgram(const SourceCode& source, CallFrame* callFrame, JSObject* thisObj)
{
- SamplingScope samplingScope(this);
-
- JSScope* scope = callFrame->scope();
+ JSScope* scope = thisObj->globalObject()->globalScope();
VM& vm = *scope->vm();
+ auto throwScope = DECLARE_THROW_SCOPE(vm);
- ASSERT(!vm.exception());
- ASSERT(!vm.isCollectorBusy());
- if (vm.isCollectorBusy())
+ ProgramExecutable* program = ProgramExecutable::create(callFrame, source);
+ ASSERT(throwScope.exception() || program);
+ RETURN_IF_EXCEPTION(throwScope, { });
+
+ ASSERT(!throwScope.exception());
+ ASSERT(!vm.isCollectorBusyOnCurrentThread());
+ RELEASE_ASSERT(vm.currentThreadIsHoldingAPILock());
+ if (vm.isCollectorBusyOnCurrentThread())
return jsNull();
- VMEntryScope entryScope(vm, scope->globalObject());
- if (!vm.isSafeToRecurse())
- return checkedReturn(throwStackOverflowError(callFrame));
+ if (UNLIKELY(!vm.isSafeToRecurseSoft()))
+ return checkedReturn(throwStackOverflowError(callFrame, throwScope));
// First check if the "program" is actually just a JSON object. If so,
// we'll handle the JSON object here. Else, we'll handle real JS code
@@ -777,7 +758,7 @@ JSValue Interpreter::execute(ProgramExecutable* program, CallFrame* callFrame, J
Vector<JSONPData> JSONPData;
bool parseResult;
- const String programSource = program->source().toString();
+ StringView programSource = program->source().view();
if (programSource.isNull())
return jsUndefined();
if (programSource.is8Bit()) {
@@ -788,6 +769,7 @@ JSValue Interpreter::execute(ProgramExecutable* program, CallFrame* callFrame, J
parseResult = literalParser.tryJSONPParse(JSONPData, scope->globalObject()->globalObjectMethodTable()->supportsRichSourceInfo(scope->globalObject()));
}
+ RETURN_IF_EXCEPTION(throwScope, { });
if (parseResult) {
JSGlobalObject* globalObject = scope->globalObject();
JSValue result;
@@ -808,23 +790,22 @@ JSValue Interpreter::execute(ProgramExecutable* program, CallFrame* callFrame, J
switch (JSONPPath[i].m_type) {
case JSONPPathEntryTypeDot: {
if (i == 0) {
- PropertySlot slot(globalObject);
+ PropertySlot slot(globalObject, PropertySlot::InternalMethodType::Get);
if (!globalObject->getPropertySlot(callFrame, JSONPPath[i].m_pathEntryName, slot)) {
+ RETURN_IF_EXCEPTION(throwScope, JSValue());
if (entry)
- return callFrame->vm().throwException(callFrame, createUndefinedVariableError(globalObject->globalExec(), JSONPPath[i].m_pathEntryName));
+ return throwException(callFrame, throwScope, createUndefinedVariableError(callFrame, JSONPPath[i].m_pathEntryName));
goto failedJSONP;
}
baseObject = slot.getValue(callFrame, JSONPPath[i].m_pathEntryName);
} else
baseObject = baseObject.get(callFrame, JSONPPath[i].m_pathEntryName);
- if (callFrame->hadException())
- return jsUndefined();
+ RETURN_IF_EXCEPTION(throwScope, JSValue());
continue;
}
case JSONPPathEntryTypeLookup: {
- baseObject = baseObject.get(callFrame, JSONPPath[i].m_pathIndex);
- if (callFrame->hadException())
- return jsUndefined();
+ baseObject = baseObject.get(callFrame, static_cast<unsigned>(JSONPPath[i].m_pathIndex));
+ RETURN_IF_EXCEPTION(throwScope, JSValue());
continue;
}
default:
@@ -836,30 +817,26 @@ JSValue Interpreter::execute(ProgramExecutable* program, CallFrame* callFrame, J
switch (JSONPPath.last().m_type) {
case JSONPPathEntryTypeCall: {
JSValue function = baseObject.get(callFrame, JSONPPath.last().m_pathEntryName);
- if (callFrame->hadException())
- return jsUndefined();
+ RETURN_IF_EXCEPTION(throwScope, JSValue());
CallData callData;
CallType callType = getCallData(function, callData);
- if (callType == CallTypeNone)
- return callFrame->vm().throwException(callFrame, createNotAFunctionError(callFrame, function));
+ if (callType == CallType::None)
+ return throwException(callFrame, throwScope, createNotAFunctionError(callFrame, function));
MarkedArgumentBuffer jsonArg;
jsonArg.append(JSONPValue);
JSValue thisValue = JSONPPath.size() == 1 ? jsUndefined(): baseObject;
JSONPValue = JSC::call(callFrame, function, callType, callData, thisValue, jsonArg);
- if (callFrame->hadException())
- return jsUndefined();
+ RETURN_IF_EXCEPTION(throwScope, JSValue());
break;
}
case JSONPPathEntryTypeDot: {
baseObject.put(callFrame, JSONPPath.last().m_pathEntryName, JSONPValue, slot);
- if (callFrame->hadException())
- return jsUndefined();
+ RETURN_IF_EXCEPTION(throwScope, JSValue());
break;
}
case JSONPPathEntryTypeLookup: {
baseObject.putByIndex(callFrame, JSONPPath.last().m_pathIndex, JSONPValue, slot.isStrictMode());
- if (callFrame->hadException())
- return jsUndefined();
+ RETURN_IF_EXCEPTION(throwScope, JSValue());
break;
}
default:
@@ -874,210 +851,190 @@ failedJSONP:
// If we get here, then we have already proven that the script is not a JSON
// object.
+ VMEntryScope entryScope(vm, scope->globalObject());
+
// Compile source to bytecode if necessary:
- if (JSObject* error = program->initializeGlobalProperties(vm, callFrame, scope))
- return checkedReturn(callFrame->vm().throwException(callFrame, error));
+ JSObject* error = program->initializeGlobalProperties(vm, callFrame, scope);
+ ASSERT(!throwScope.exception() || !error);
+ if (UNLIKELY(error))
+ return checkedReturn(throwException(callFrame, throwScope, error));
- if (JSObject* error = program->prepareForExecution(callFrame, scope, CodeForCall))
- return checkedReturn(callFrame->vm().throwException(callFrame, error));
+ ProgramCodeBlock* codeBlock;
+ {
+ CodeBlock* tempCodeBlock;
+ JSObject* error = program->prepareForExecution<ProgramExecutable>(vm, nullptr, scope, CodeForCall, tempCodeBlock);
+ ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
+ if (UNLIKELY(error))
+ return checkedReturn(error);
+ codeBlock = jsCast<ProgramCodeBlock*>(tempCodeBlock);
+ }
- ProgramCodeBlock* codeBlock = program->codeBlock();
+ if (UNLIKELY(vm.shouldTriggerTermination(callFrame)))
+ return throwTerminatedExecutionException(callFrame, throwScope);
- if (UNLIKELY(vm.watchdog.didFire(callFrame)))
- return throwTerminatedExecutionException(callFrame);
+ if (scope->structure()->isUncacheableDictionary())
+ scope->flattenDictionaryObject(vm);
ASSERT(codeBlock->numParameters() == 1); // 1 parameter for 'this'.
- if (UNLIKELY(!m_stack.entryCheck(codeBlock, 1)))
- return checkedReturn(throwStackOverflowError(callFrame));
-
ProtoCallFrame protoCallFrame;
- protoCallFrame.init(codeBlock, scope, 0, thisObj, 1);
-
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->willExecute(callFrame, program->sourceURL(), program->lineNo());
+ protoCallFrame.init(codeBlock, JSCallee::create(vm, scope->globalObject(), scope), thisObj, 1);
// Execute the code:
- JSValue result;
- {
- SamplingTool::CallRecord callRecord(m_sampler.get());
- Watchdog::Scope watchdogScope(vm.watchdog);
-
- result = program->generatedJITCode()->execute(&vm, &protoCallFrame, m_stack.getTopOfStack());
- }
-
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->didExecute(callFrame, program->sourceURL(), program->lineNo());
-
+ throwScope.release();
+ JSValue result = program->generatedJITCode()->execute(&vm, &protoCallFrame);
return checkedReturn(result);
}
JSValue Interpreter::executeCall(CallFrame* callFrame, JSObject* function, CallType callType, const CallData& callData, JSValue thisValue, const ArgList& args)
{
VM& vm = callFrame->vm();
- ASSERT(!callFrame->hadException());
- ASSERT(!vm.isCollectorBusy());
- if (vm.isCollectorBusy())
+ auto throwScope = DECLARE_THROW_SCOPE(vm);
+
+ ASSERT(!throwScope.exception());
+ ASSERT(!vm.isCollectorBusyOnCurrentThread());
+ if (vm.isCollectorBusyOnCurrentThread())
return jsNull();
- bool isJSCall = (callType == CallTypeJS);
- JSScope* scope;
+ bool isJSCall = (callType == CallType::JS);
+ JSScope* scope = nullptr;
CodeBlock* newCodeBlock;
size_t argsCount = 1 + args.size(); // implicit "this" parameter
- if (isJSCall)
+ JSGlobalObject* globalObject;
+
+ if (isJSCall) {
scope = callData.js.scope;
- else {
- ASSERT(callType == CallTypeHost);
- scope = callFrame->scope();
+ globalObject = scope->globalObject();
+ } else {
+ ASSERT(callType == CallType::Host);
+ globalObject = function->globalObject();
}
- VMEntryScope entryScope(vm, scope->globalObject());
- if (!vm.isSafeToRecurse())
- return checkedReturn(throwStackOverflowError(callFrame));
+ VMEntryScope entryScope(vm, globalObject);
+ if (UNLIKELY(!vm.isSafeToRecurseSoft()))
+ return checkedReturn(throwStackOverflowError(callFrame, throwScope));
if (isJSCall) {
// Compile the callee:
- JSObject* compileError = callData.js.functionExecutable->prepareForExecution(callFrame, scope, CodeForCall);
- if (UNLIKELY(!!compileError)) {
- return checkedReturn(callFrame->vm().throwException(callFrame, compileError));
- }
- newCodeBlock = callData.js.functionExecutable->codeBlockForCall();
+ JSObject* compileError = callData.js.functionExecutable->prepareForExecution<FunctionExecutable>(vm, jsCast<JSFunction*>(function), scope, CodeForCall, newCodeBlock);
+ ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(compileError));
+ if (UNLIKELY(!!compileError))
+ return checkedReturn(compileError);
+
ASSERT(!!newCodeBlock);
newCodeBlock->m_shouldAlwaysBeInlined = false;
} else
newCodeBlock = 0;
- if (UNLIKELY(vm.watchdog.didFire(callFrame)))
- return throwTerminatedExecutionException(callFrame);
-
- if (UNLIKELY(!m_stack.entryCheck(newCodeBlock, argsCount)))
- return checkedReturn(throwStackOverflowError(callFrame));
+ if (UNLIKELY(vm.shouldTriggerTermination(callFrame)))
+ return throwTerminatedExecutionException(callFrame, throwScope);
ProtoCallFrame protoCallFrame;
- protoCallFrame.init(newCodeBlock, scope, function, thisValue, argsCount, args.data());
-
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->willExecute(callFrame, function);
+ protoCallFrame.init(newCodeBlock, function, thisValue, argsCount, args.data());
JSValue result;
{
- SamplingTool::CallRecord callRecord(m_sampler.get(), !isJSCall);
- Watchdog::Scope watchdogScope(vm.watchdog);
-
// Execute the code:
- if (isJSCall)
- result = callData.js.functionExecutable->generatedJITCodeForCall()->execute(&vm, &protoCallFrame, m_stack.getTopOfStack());
- else
- result = JSValue::decode(callToNativeFunction(reinterpret_cast<void*>(callData.native.function), &vm.topCallFrame, &protoCallFrame, m_stack.getTopOfStack()));
+ if (isJSCall) {
+ throwScope.release();
+ result = callData.js.functionExecutable->generatedJITCodeForCall()->execute(&vm, &protoCallFrame);
+ } else {
+ result = JSValue::decode(vmEntryToNative(reinterpret_cast<void*>(callData.native.function), &vm, &protoCallFrame));
+ RETURN_IF_EXCEPTION(throwScope, JSValue());
+ }
}
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->didExecute(callFrame, function);
-
return checkedReturn(result);
}
-JSObject* Interpreter::executeConstruct(CallFrame* callFrame, JSObject* constructor, ConstructType constructType, const ConstructData& constructData, const ArgList& args)
+JSObject* Interpreter::executeConstruct(CallFrame* callFrame, JSObject* constructor, ConstructType constructType, const ConstructData& constructData, const ArgList& args, JSValue newTarget)
{
VM& vm = callFrame->vm();
- ASSERT(!callFrame->hadException());
- ASSERT(!vm.isCollectorBusy());
+ auto throwScope = DECLARE_THROW_SCOPE(vm);
+
+ ASSERT(!throwScope.exception());
+ ASSERT(!vm.isCollectorBusyOnCurrentThread());
// We throw in this case because we have to return something "valid" but we're
// already in an invalid state.
- if (vm.isCollectorBusy())
- return checkedReturn(throwStackOverflowError(callFrame));
+ if (vm.isCollectorBusyOnCurrentThread())
+ return checkedReturn(throwStackOverflowError(callFrame, throwScope));
- bool isJSConstruct = (constructType == ConstructTypeJS);
- JSScope* scope;
+ bool isJSConstruct = (constructType == ConstructType::JS);
+ JSScope* scope = nullptr;
CodeBlock* newCodeBlock;
size_t argsCount = 1 + args.size(); // implicit "this" parameter
- if (isJSConstruct)
+ JSGlobalObject* globalObject;
+
+ if (isJSConstruct) {
scope = constructData.js.scope;
- else {
- ASSERT(constructType == ConstructTypeHost);
- scope = callFrame->scope();
+ globalObject = scope->globalObject();
+ } else {
+ ASSERT(constructType == ConstructType::Host);
+ globalObject = constructor->globalObject();
}
- VMEntryScope entryScope(vm, scope->globalObject());
- if (!vm.isSafeToRecurse())
- return checkedReturn(throwStackOverflowError(callFrame));
+ VMEntryScope entryScope(vm, globalObject);
+ if (UNLIKELY(!vm.isSafeToRecurseSoft()))
+ return checkedReturn(throwStackOverflowError(callFrame, throwScope));
if (isJSConstruct) {
// Compile the callee:
- JSObject* compileError = constructData.js.functionExecutable->prepareForExecution(callFrame, scope, CodeForConstruct);
- if (UNLIKELY(!!compileError)) {
- return checkedReturn(callFrame->vm().throwException(callFrame, compileError));
- }
- newCodeBlock = constructData.js.functionExecutable->codeBlockForConstruct();
+ JSObject* compileError = constructData.js.functionExecutable->prepareForExecution<FunctionExecutable>(vm, jsCast<JSFunction*>(constructor), scope, CodeForConstruct, newCodeBlock);
+ ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(compileError));
+ if (UNLIKELY(!!compileError))
+ return checkedReturn(compileError);
+
ASSERT(!!newCodeBlock);
newCodeBlock->m_shouldAlwaysBeInlined = false;
} else
newCodeBlock = 0;
- if (UNLIKELY(vm.watchdog.didFire(callFrame)))
- return throwTerminatedExecutionException(callFrame);
-
- if (UNLIKELY(!m_stack.entryCheck(newCodeBlock, argsCount)))
- return checkedReturn(throwStackOverflowError(callFrame));
+ if (UNLIKELY(vm.shouldTriggerTermination(callFrame)))
+ return throwTerminatedExecutionException(callFrame, throwScope);
ProtoCallFrame protoCallFrame;
- protoCallFrame.init(newCodeBlock, scope, constructor, jsUndefined(), argsCount, args.data());
-
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->willExecute(callFrame, constructor);
+ protoCallFrame.init(newCodeBlock, constructor, newTarget, argsCount, args.data());
JSValue result;
{
- SamplingTool::CallRecord callRecord(m_sampler.get(), !isJSConstruct);
- Watchdog::Scope watchdogScope(vm.watchdog);
-
// Execute the code.
if (isJSConstruct)
- result = constructData.js.functionExecutable->generatedJITCodeForConstruct()->execute(&vm, &protoCallFrame, m_stack.getTopOfStack());
+ result = constructData.js.functionExecutable->generatedJITCodeForConstruct()->execute(&vm, &protoCallFrame);
else {
- result = JSValue::decode(callToNativeFunction(reinterpret_cast<void*>(constructData.native.function), &vm.topCallFrame, &protoCallFrame, m_stack.getTopOfStack()));
+ result = JSValue::decode(vmEntryToNative(reinterpret_cast<void*>(constructData.native.function), &vm, &protoCallFrame));
- if (!callFrame->hadException())
+ if (LIKELY(!throwScope.exception()))
RELEASE_ASSERT(result.isObject());
}
}
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->didExecute(callFrame, constructor);
-
- if (callFrame->hadException())
- return 0;
+ RETURN_IF_EXCEPTION(throwScope, 0);
ASSERT(result.isObject());
return checkedReturn(asObject(result));
}
-CallFrameClosure Interpreter::prepareForRepeatCall(FunctionExecutable* functionExecutable, CallFrame* callFrame, ProtoCallFrame* protoCallFrame, JSFunction* function, int argumentCountIncludingThis, JSScope* scope, JSValue* args)
+CallFrameClosure Interpreter::prepareForRepeatCall(FunctionExecutable* functionExecutable, CallFrame* callFrame, ProtoCallFrame* protoCallFrame, JSFunction* function, int argumentCountIncludingThis, JSScope* scope, const ArgList& args)
{
VM& vm = *scope->vm();
- ASSERT(!vm.exception());
+ auto throwScope = DECLARE_THROW_SCOPE(vm);
+ ASSERT_UNUSED(throwScope, !throwScope.exception());
- if (vm.isCollectorBusy())
+ if (vm.isCollectorBusyOnCurrentThread())
return CallFrameClosure();
// Compile the callee:
- JSObject* error = functionExecutable->prepareForExecution(callFrame, scope, CodeForCall);
- if (error) {
- callFrame->vm().throwException(callFrame, error);
+ CodeBlock* newCodeBlock;
+ JSObject* error = functionExecutable->prepareForExecution<FunctionExecutable>(vm, function, scope, CodeForCall, newCodeBlock);
+ ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
+ if (UNLIKELY(error))
return CallFrameClosure();
- }
- CodeBlock* newCodeBlock = functionExecutable->codeBlockForCall();
newCodeBlock->m_shouldAlwaysBeInlined = false;
size_t argsCount = argumentCountIncludingThis;
- if (UNLIKELY(!m_stack.entryCheck(newCodeBlock, argsCount))) {
- throwStackOverflowError(callFrame);
- return CallFrameClosure();
- }
-
- protoCallFrame->init(newCodeBlock, scope, function, jsUndefined(), argsCount, args);
+ protoCallFrame->init(newCodeBlock, function, jsUndefined(), argsCount, args.data());
// Return the successful closure:
CallFrameClosure result = { callFrame, protoCallFrame, function, functionExecutable, &vm, scope, newCodeBlock->numParameters(), argumentCountIncludingThis };
return result;
@@ -1086,32 +1043,21 @@ CallFrameClosure Interpreter::prepareForRepeatCall(FunctionExecutable* functionE
JSValue Interpreter::execute(CallFrameClosure& closure)
{
VM& vm = *closure.vm;
- SamplingScope samplingScope(this);
-
- ASSERT(!vm.isCollectorBusy());
- if (vm.isCollectorBusy())
+ auto throwScope = DECLARE_THROW_SCOPE(vm);
+
+ ASSERT(!vm.isCollectorBusyOnCurrentThread());
+ RELEASE_ASSERT(vm.currentThreadIsHoldingAPILock());
+ if (vm.isCollectorBusyOnCurrentThread())
return jsNull();
StackStats::CheckPoint stackCheckPoint;
- closure.resetCallFrame();
-
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->willExecute(closure.oldCallFrame, closure.function);
- if (UNLIKELY(vm.watchdog.didFire(closure.oldCallFrame)))
- return throwTerminatedExecutionException(closure.oldCallFrame);
+ if (UNLIKELY(vm.shouldTriggerTermination(closure.oldCallFrame)))
+ return throwTerminatedExecutionException(closure.oldCallFrame, throwScope);
// Execute the code:
- JSValue result;
- {
- SamplingTool::CallRecord callRecord(m_sampler.get());
- Watchdog::Scope watchdogScope(vm.watchdog);
-
- result = closure.functionExecutable->generatedJITCodeForCall()->execute(&vm, closure.protoCallFrame, m_stack.getTopOfStack());
- }
-
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->didExecute(closure.oldCallFrame, closure.function);
+ throwScope.release();
+ JSValue result = closure.functionExecutable->generatedJITCodeForCall()->execute(&vm, closure.protoCallFrame);
return checkedReturn(result);
}
@@ -1119,50 +1065,89 @@ JSValue Interpreter::execute(CallFrameClosure& closure)
JSValue Interpreter::execute(EvalExecutable* eval, CallFrame* callFrame, JSValue thisValue, JSScope* scope)
{
VM& vm = *scope->vm();
- SamplingScope samplingScope(this);
-
+ auto throwScope = DECLARE_THROW_SCOPE(vm);
+
ASSERT(scope->vm() == &callFrame->vm());
- ASSERT(!vm.exception());
- ASSERT(!vm.isCollectorBusy());
- if (vm.isCollectorBusy())
+ ASSERT(!throwScope.exception());
+ ASSERT(!vm.isCollectorBusyOnCurrentThread());
+ RELEASE_ASSERT(vm.currentThreadIsHoldingAPILock());
+ if (vm.isCollectorBusyOnCurrentThread())
return jsNull();
VMEntryScope entryScope(vm, scope->globalObject());
- if (!vm.isSafeToRecurse())
- return checkedReturn(throwStackOverflowError(callFrame));
+ if (UNLIKELY(!vm.isSafeToRecurseSoft()))
+ return checkedReturn(throwStackOverflowError(callFrame, throwScope));
unsigned numVariables = eval->numVariables();
int numFunctions = eval->numberOfFunctionDecls();
JSScope* variableObject;
if ((numVariables || numFunctions) && eval->isStrictMode()) {
- scope = StrictEvalActivation::create(callFrame);
+ scope = StrictEvalActivation::create(callFrame, scope);
variableObject = scope;
} else {
for (JSScope* node = scope; ; node = node->next()) {
RELEASE_ASSERT(node);
- if (node->isVariableObject() && !node->isNameScopeObject()) {
+ if (node->isGlobalObject()) {
variableObject = node;
break;
+ }
+ if (node->isJSLexicalEnvironment()) {
+ JSLexicalEnvironment* lexicalEnvironment = jsCast<JSLexicalEnvironment*>(node);
+ if (lexicalEnvironment->symbolTable()->scopeType() == SymbolTable::ScopeType::VarScope) {
+ variableObject = node;
+ break;
+ }
}
}
}
- JSObject* compileError = eval->prepareForExecution(callFrame, scope, CodeForCall);
- if (UNLIKELY(!!compileError))
- return checkedReturn(callFrame->vm().throwException(callFrame, compileError));
- EvalCodeBlock* codeBlock = eval->codeBlock();
+ EvalCodeBlock* codeBlock;
+ {
+ CodeBlock* tempCodeBlock;
+ JSObject* compileError = eval->prepareForExecution<EvalExecutable>(vm, nullptr, scope, CodeForCall, tempCodeBlock);
+ ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(compileError));
+ if (UNLIKELY(!!compileError))
+ return checkedReturn(compileError);
+ codeBlock = jsCast<EvalCodeBlock*>(tempCodeBlock);
+ }
+
+ // We can't declare a "var"/"function" that overwrites a global "let"/"const"/"class" in a sloppy-mode eval.
+ if (variableObject->isGlobalObject() && !eval->isStrictMode() && (numVariables || numFunctions)) {
+ JSGlobalLexicalEnvironment* globalLexicalEnvironment = jsCast<JSGlobalObject*>(variableObject)->globalLexicalEnvironment();
+ for (unsigned i = 0; i < numVariables; ++i) {
+ const Identifier& ident = codeBlock->variable(i);
+ PropertySlot slot(globalLexicalEnvironment, PropertySlot::InternalMethodType::VMInquiry);
+ if (JSGlobalLexicalEnvironment::getOwnPropertySlot(globalLexicalEnvironment, callFrame, ident, slot)) {
+ return checkedReturn(throwTypeError(callFrame, throwScope, makeString("Can't create duplicate global variable in eval: '", String(ident.impl()), "'")));
+ }
+ }
+
+ for (int i = 0; i < numFunctions; ++i) {
+ FunctionExecutable* function = codeBlock->functionDecl(i);
+ PropertySlot slot(globalLexicalEnvironment, PropertySlot::InternalMethodType::VMInquiry);
+ if (JSGlobalLexicalEnvironment::getOwnPropertySlot(globalLexicalEnvironment, callFrame, function->name(), slot)) {
+ return checkedReturn(throwTypeError(callFrame, throwScope, makeString("Can't create duplicate global variable in eval: '", String(function->name().impl()), "'")));
+ }
+ }
+ }
+
+ if (variableObject->structure()->isUncacheableDictionary())
+ variableObject->flattenDictionaryObject(vm);
if (numVariables || numFunctions) {
BatchedTransitionOptimizer optimizer(vm, variableObject);
- if (variableObject->next())
- variableObject->globalObject()->varInjectionWatchpoint()->fireAll();
+ if (variableObject->next() && !eval->isStrictMode())
+ variableObject->globalObject()->varInjectionWatchpoint()->fireAll(vm, "Executed eval, fired VarInjection watchpoint");
for (unsigned i = 0; i < numVariables; ++i) {
const Identifier& ident = codeBlock->variable(i);
- if (!variableObject->hasProperty(callFrame, ident)) {
+ bool hasProperty = variableObject->hasProperty(callFrame, ident);
+ RETURN_IF_EXCEPTION(throwScope, checkedReturn(throwScope.exception()));
+ if (!hasProperty) {
PutPropertySlot slot(variableObject);
variableObject->methodTable()->put(variableObject, callFrame, ident, jsUndefined(), slot);
+ RETURN_IF_EXCEPTION(throwScope, checkedReturn(throwScope.exception()));
}
}
@@ -1170,101 +1155,107 @@ JSValue Interpreter::execute(EvalExecutable* eval, CallFrame* callFrame, JSValue
FunctionExecutable* function = codeBlock->functionDecl(i);
PutPropertySlot slot(variableObject);
variableObject->methodTable()->put(variableObject, callFrame, function->name(), JSFunction::create(vm, function, scope), slot);
+ RETURN_IF_EXCEPTION(throwScope, checkedReturn(throwScope.exception()));
}
}
- if (UNLIKELY(vm.watchdog.didFire(callFrame)))
- return throwTerminatedExecutionException(callFrame);
+ if (UNLIKELY(vm.shouldTriggerTermination(callFrame)))
+ return throwTerminatedExecutionException(callFrame, throwScope);
ASSERT(codeBlock->numParameters() == 1); // 1 parameter for 'this'.
- if (UNLIKELY(!m_stack.entryCheck(codeBlock, 1)))
- return checkedReturn(throwStackOverflowError(callFrame));
-
ProtoCallFrame protoCallFrame;
- protoCallFrame.init(codeBlock, scope, 0, thisValue, 1);
-
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->willExecute(callFrame, eval->sourceURL(), eval->lineNo());
+ protoCallFrame.init(codeBlock, JSCallee::create(vm, scope->globalObject(), scope), thisValue, 1);
// Execute the code:
- JSValue result;
- {
- SamplingTool::CallRecord callRecord(m_sampler.get());
- Watchdog::Scope watchdogScope(vm.watchdog);
+ throwScope.release();
+ JSValue result = eval->generatedJITCode()->execute(&vm, &protoCallFrame);
+
+ return checkedReturn(result);
+}
- result = eval->generatedJITCode()->execute(&vm, &protoCallFrame, m_stack.getTopOfStack());
+JSValue Interpreter::execute(ModuleProgramExecutable* executable, CallFrame* callFrame, JSModuleEnvironment* scope)
+{
+ VM& vm = *scope->vm();
+ auto throwScope = DECLARE_THROW_SCOPE(vm);
+
+ ASSERT(scope->vm() == &callFrame->vm());
+ ASSERT(!throwScope.exception());
+ ASSERT(!vm.isCollectorBusyOnCurrentThread());
+ RELEASE_ASSERT(vm.currentThreadIsHoldingAPILock());
+ if (vm.isCollectorBusyOnCurrentThread())
+ return jsNull();
+
+ VMEntryScope entryScope(vm, scope->globalObject());
+ if (UNLIKELY(!vm.isSafeToRecurseSoft()))
+ return checkedReturn(throwStackOverflowError(callFrame, throwScope));
+
+ ModuleProgramCodeBlock* codeBlock;
+ {
+ CodeBlock* tempCodeBlock;
+ JSObject* compileError = executable->prepareForExecution<ModuleProgramExecutable>(vm, nullptr, scope, CodeForCall, tempCodeBlock);
+ ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(compileError));
+ if (UNLIKELY(!!compileError))
+ return checkedReturn(compileError);
+ codeBlock = jsCast<ModuleProgramCodeBlock*>(tempCodeBlock);
}
- if (LegacyProfiler* profiler = vm.enabledProfiler())
- profiler->didExecute(callFrame, eval->sourceURL(), eval->lineNo());
+ if (UNLIKELY(vm.shouldTriggerTermination(callFrame)))
+ return throwTerminatedExecutionException(callFrame, throwScope);
+
+ if (scope->structure()->isUncacheableDictionary())
+ scope->flattenDictionaryObject(vm);
+
+ ASSERT(codeBlock->numParameters() == 1); // 1 parameter for 'this'.
+
+ // The |this| of the module is always `undefined`.
+ // http://www.ecma-international.org/ecma-262/6.0/#sec-module-environment-records-hasthisbinding
+ // http://www.ecma-international.org/ecma-262/6.0/#sec-module-environment-records-getthisbinding
+ ProtoCallFrame protoCallFrame;
+ protoCallFrame.init(codeBlock, JSCallee::create(vm, scope->globalObject(), scope), jsUndefined(), 1);
+
+ // Execute the code:
+ throwScope.release();
+ JSValue result = executable->generatedJITCode()->execute(&vm, &protoCallFrame);
return checkedReturn(result);
}
-NEVER_INLINE void Interpreter::debug(CallFrame* callFrame, DebugHookID debugHookID)
+NEVER_INLINE void Interpreter::debug(CallFrame* callFrame, DebugHookType debugHookType)
{
+ VM& vm = callFrame->vm();
+ auto scope = DECLARE_CATCH_SCOPE(vm);
Debugger* debugger = callFrame->vmEntryGlobalObject()->debugger();
if (!debugger)
return;
- ASSERT(callFrame->codeBlock()->hasDebuggerRequests() || callFrame->hadException());
- switch (debugHookID) {
+ ASSERT(callFrame->codeBlock()->hasDebuggerRequests());
+ ASSERT_UNUSED(scope, !scope.exception());
+
+ switch (debugHookType) {
case DidEnterCallFrame:
debugger->callEvent(callFrame);
- return;
+ break;
case WillLeaveCallFrame:
debugger->returnEvent(callFrame);
- return;
+ break;
case WillExecuteStatement:
debugger->atStatement(callFrame);
- return;
+ break;
+ case WillExecuteExpression:
+ debugger->atExpression(callFrame);
+ break;
case WillExecuteProgram:
debugger->willExecuteProgram(callFrame);
- return;
+ break;
case DidExecuteProgram:
debugger->didExecuteProgram(callFrame);
- return;
+ break;
case DidReachBreakpoint:
debugger->didReachBreakpoint(callFrame);
- return;
- }
-}
-
-void Interpreter::enableSampler()
-{
-#if ENABLE(OPCODE_SAMPLING)
- if (!m_sampler) {
- m_sampler = adoptPtr(new SamplingTool(this));
- m_sampler->setup();
+ break;
}
-#endif
-}
-void Interpreter::dumpSampleData(ExecState* exec)
-{
-#if ENABLE(OPCODE_SAMPLING)
- if (m_sampler)
- m_sampler->dump(exec);
-#else
- UNUSED_PARAM(exec);
-#endif
-}
-void Interpreter::startSampling()
-{
-#if ENABLE(SAMPLING_THREAD)
- if (!m_sampleEntryDepth)
- SamplingThread::start();
-
- m_sampleEntryDepth++;
-#endif
-}
-void Interpreter::stopSampling()
-{
-#if ENABLE(SAMPLING_THREAD)
- m_sampleEntryDepth--;
- if (!m_sampleEntryDepth)
- SamplingThread::stop();
-#endif
+ ASSERT(!scope.exception());
}
} // namespace JSC
diff --git a/Source/JavaScriptCore/interpreter/Interpreter.h b/Source/JavaScriptCore/interpreter/Interpreter.h
index cfc721b38..c3bf90bbc 100644
--- a/Source/JavaScriptCore/interpreter/Interpreter.h
+++ b/Source/JavaScriptCore/interpreter/Interpreter.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
* Copyright (C) 2012 Research In Motion Limited. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@@ -11,7 +11,7 @@
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
- * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * 3. Neither the name of Apple Inc. ("Apple") nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
@@ -27,161 +27,65 @@
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef Interpreter_h
-#define Interpreter_h
+#pragma once
#include "ArgList.h"
+#include "CatchScope.h"
+#include "FrameTracers.h"
#include "JSCJSValue.h"
#include "JSCell.h"
-#include "JSFunction.h"
#include "JSObject.h"
-#include "JSStack.h"
-#include "LLIntData.h"
#include "Opcode.h"
-#include "SourceProvider.h"
-
+#include "StackAlignment.h"
#include <wtf/HashMap.h>
-#include <wtf/text/StringBuilder.h>
+
+#if !ENABLE(JIT)
+#include "CLoopStack.h"
+#endif
+
namespace JSC {
class CodeBlock;
class EvalExecutable;
- class ExecutableBase;
class FunctionExecutable;
class VM;
+ class JSFunction;
class JSGlobalObject;
+ class JSModuleEnvironment;
+ class JSModuleRecord;
class LLIntOffsetsExtractor;
class ProgramExecutable;
+ class ModuleProgramExecutable;
class Register;
class JSScope;
- class SamplingTool;
+ class StackFrame;
struct CallFrameClosure;
struct HandlerInfo;
struct Instruction;
struct ProtoCallFrame;
+ struct UnlinkedInstruction;
- enum DebugHookID {
+ enum UnwindStart : uint8_t { UnwindFromCurrentFrame, UnwindFromCallerFrame };
+
+ enum DebugHookType {
WillExecuteProgram,
DidExecuteProgram,
DidEnterCallFrame,
DidReachBreakpoint,
WillLeaveCallFrame,
- WillExecuteStatement
+ WillExecuteStatement,
+ WillExecuteExpression,
};
enum StackFrameCodeType {
StackFrameGlobalCode,
StackFrameEvalCode,
+ StackFrameModuleCode,
StackFrameFunctionCode,
StackFrameNativeCode
};
- struct StackFrame {
- Strong<JSObject> callee;
- StackFrameCodeType codeType;
- Strong<ExecutableBase> executable;
- Strong<UnlinkedCodeBlock> codeBlock;
- RefPtr<SourceProvider> code;
- int lineOffset;
- unsigned firstLineColumnOffset;
- unsigned characterOffset;
- unsigned bytecodeOffset;
- String sourceURL;
- JS_EXPORT_PRIVATE String toString(CallFrame*);
- String friendlySourceURL() const
- {
- String traceLine;
-
- switch (codeType) {
- case StackFrameEvalCode:
- case StackFrameFunctionCode:
- case StackFrameGlobalCode:
- if (!sourceURL.isEmpty())
- traceLine = sourceURL.impl();
- break;
- case StackFrameNativeCode:
- traceLine = "[native code]";
- break;
- }
- return traceLine.isNull() ? emptyString() : traceLine;
- }
- String friendlyFunctionName(CallFrame* callFrame) const
- {
- String traceLine;
- JSObject* stackFrameCallee = callee.get();
-
- switch (codeType) {
- case StackFrameEvalCode:
- traceLine = "eval code";
- break;
- case StackFrameNativeCode:
- if (callee)
- traceLine = getCalculatedDisplayName(callFrame, stackFrameCallee).impl();
- break;
- case StackFrameFunctionCode:
- traceLine = getCalculatedDisplayName(callFrame, stackFrameCallee).impl();
- break;
- case StackFrameGlobalCode:
- traceLine = "global code";
- break;
- }
- return traceLine.isNull() ? emptyString() : traceLine;
- }
- JS_EXPORT_PRIVATE void computeLineAndColumn(unsigned& line, unsigned& column);
-
- private:
- void expressionInfo(int& divot, int& startOffset, int& endOffset, unsigned& line, unsigned& column);
- };
-
- class ClearExceptionScope {
- public:
- ClearExceptionScope(VM* vm): m_vm(vm)
- {
- vm->getExceptionInfo(oldException, oldExceptionStack);
- vm->clearException();
- }
- ~ClearExceptionScope()
- {
- m_vm->setExceptionInfo(oldException, oldExceptionStack);
- }
- private:
- JSC::JSValue oldException;
- RefCountedArray<JSC::StackFrame> oldExceptionStack;
- VM* m_vm;
- };
-
- class TopCallFrameSetter {
- public:
- TopCallFrameSetter(VM& currentVM, CallFrame* callFrame)
- : vm(currentVM)
- , oldCallFrame(currentVM.topCallFrame)
- {
- ASSERT(!callFrame->isVMEntrySentinel());
- currentVM.topCallFrame = callFrame;
- }
-
- ~TopCallFrameSetter()
- {
- ASSERT(!oldCallFrame->isVMEntrySentinel());
- vm.topCallFrame = oldCallFrame;
- }
- private:
- VM& vm;
- CallFrame* oldCallFrame;
- };
-
- class NativeCallFrameTracer {
- public:
- ALWAYS_INLINE NativeCallFrameTracer(VM* vm, CallFrame* callFrame)
- {
- ASSERT(vm);
- ASSERT(callFrame);
- ASSERT(!callFrame->isVMEntrySentinel());
- vm->topCallFrame = callFrame;
- }
- };
-
class Interpreter {
WTF_MAKE_FAST_ALLOCATED;
friend class CachedCall;
@@ -190,20 +94,14 @@ namespace JSC {
friend class VM;
public:
- class ErrorHandlingMode {
- public:
- JS_EXPORT_PRIVATE ErrorHandlingMode(ExecState*);
- JS_EXPORT_PRIVATE ~ErrorHandlingMode();
- private:
- Interpreter& m_interpreter;
- };
-
Interpreter(VM &);
~Interpreter();
- void initialize(bool canUseJIT);
+ void initialize();
- JSStack& stack() { return m_stack; }
+#if !ENABLE(JIT)
+ CLoopStack& cloopStack() { return m_cloopStack; }
+#endif
Opcode getOpcode(OpcodeID id)
{
@@ -218,64 +116,60 @@ namespace JSC {
OpcodeID getOpcodeID(Opcode opcode)
{
ASSERT(m_initialized);
-#if ENABLE(COMPUTED_GOTO_OPCODES) && ENABLE(LLINT)
+#if ENABLE(COMPUTED_GOTO_OPCODES)
ASSERT(isOpcode(opcode));
return m_opcodeIDTable.get(opcode);
#else
return opcode;
#endif
}
-
+
+ OpcodeID getOpcodeID(const Instruction&);
+ OpcodeID getOpcodeID(const UnlinkedInstruction&);
+
bool isOpcode(Opcode);
- JSValue execute(ProgramExecutable*, CallFrame*, JSObject* thisObj);
+ JSValue executeProgram(const SourceCode&, CallFrame*, JSObject* thisObj);
JSValue executeCall(CallFrame*, JSObject* function, CallType, const CallData&, JSValue thisValue, const ArgList&);
- JSObject* executeConstruct(CallFrame*, JSObject* function, ConstructType, const ConstructData&, const ArgList&);
+ JSObject* executeConstruct(CallFrame*, JSObject* function, ConstructType, const ConstructData&, const ArgList&, JSValue newTarget);
JSValue execute(EvalExecutable*, CallFrame*, JSValue thisValue, JSScope*);
+ JSValue execute(ModuleProgramExecutable*, CallFrame*, JSModuleEnvironment*);
void getArgumentsData(CallFrame*, JSFunction*&, ptrdiff_t& firstParameterIndex, Register*& argv, int& argc);
- SamplingTool* sampler() { return m_sampler.get(); }
-
- bool isInErrorHandlingMode() { return m_errorHandlingModeReentry; }
-
- NEVER_INLINE HandlerInfo* unwind(CallFrame*&, JSValue&);
- NEVER_INLINE void debug(CallFrame*, DebugHookID);
- JSString* stackTraceAsString(ExecState*, Vector<StackFrame>);
+ NEVER_INLINE HandlerInfo* unwind(VM&, CallFrame*&, Exception*, UnwindStart);
+ void notifyDebuggerOfExceptionToBeThrown(CallFrame*, Exception*);
+ NEVER_INLINE void debug(CallFrame*, DebugHookType);
+ static JSString* stackTraceAsString(VM&, const Vector<StackFrame>&);
static EncodedJSValue JSC_HOST_CALL constructWithErrorConstructor(ExecState*);
static EncodedJSValue JSC_HOST_CALL callErrorConstructor(ExecState*);
static EncodedJSValue JSC_HOST_CALL constructWithNativeErrorConstructor(ExecState*);
static EncodedJSValue JSC_HOST_CALL callNativeErrorConstructor(ExecState*);
- void dumpSampleData(ExecState* exec);
- void startSampling();
- void stopSampling();
-
JS_EXPORT_PRIVATE void dumpCallFrame(CallFrame*);
+ void getStackTrace(Vector<StackFrame>& results, size_t framesToSkip = 0, size_t maxStackSize = std::numeric_limits<size_t>::max());
+
private:
enum ExecutionFlag { Normal, InitializeAndReturn };
- CallFrameClosure prepareForRepeatCall(FunctionExecutable*, CallFrame*, ProtoCallFrame*, JSFunction*, int argumentCountIncludingThis, JSScope*, JSValue*);
+ CallFrameClosure prepareForRepeatCall(FunctionExecutable*, CallFrame*, ProtoCallFrame*, JSFunction*, int argumentCountIncludingThis, JSScope*, const ArgList&);
JSValue execute(CallFrameClosure&);
- void getStackTrace(Vector<StackFrame>& results, size_t maxStackSize = std::numeric_limits<size_t>::max());
+
void dumpRegisters(CallFrame*);
- bool isCallBytecode(Opcode opcode) { return opcode == getOpcode(op_call) || opcode == getOpcode(op_construct) || opcode == getOpcode(op_call_eval); }
-
- void enableSampler();
- int m_sampleEntryDepth;
- OwnPtr<SamplingTool> m_sampler;
+ bool isCallBytecode(Opcode opcode) { return opcode == getOpcode(op_call) || opcode == getOpcode(op_construct) || opcode == getOpcode(op_call_eval) || opcode == getOpcode(op_tail_call); }
VM& m_vm;
- JSStack m_stack;
- int m_errorHandlingModeReentry;
+#if !ENABLE(JIT)
+ CLoopStack m_cloopStack;
+#endif
-#if ENABLE(COMPUTED_GOTO_OPCODES) && ENABLE(LLINT)
+#if ENABLE(COMPUTED_GOTO_OPCODES)
Opcode* m_opcodeTable; // Maps OpcodeID => Opcode for compiling
HashMap<Opcode, OpcodeID> m_opcodeIDTable; // Maps Opcode => OpcodeID for decompiling
#endif
@@ -286,8 +180,30 @@ namespace JSC {
};
JSValue eval(CallFrame*);
- CallFrame* sizeAndAllocFrameForVarargs(CallFrame*, JSStack*, JSValue, int);
- void loadVarargs(CallFrame*, CallFrame*, JSValue, JSValue);
-} // namespace JSC
-#endif // Interpreter_h
+ inline CallFrame* calleeFrameForVarargs(CallFrame* callFrame, unsigned numUsedStackSlots, unsigned argumentCountIncludingThis)
+ {
+ // We want the new frame to be allocated on a stack aligned offset with a stack
+ // aligned size. Align the size here.
+ argumentCountIncludingThis = WTF::roundUpToMultipleOf(
+ stackAlignmentRegisters(),
+ argumentCountIncludingThis + CallFrame::headerSizeInRegisters) - CallFrame::headerSizeInRegisters;
+
+ // Align the frame offset here.
+ unsigned paddedCalleeFrameOffset = WTF::roundUpToMultipleOf(
+ stackAlignmentRegisters(),
+ numUsedStackSlots + argumentCountIncludingThis + CallFrame::headerSizeInRegisters);
+ return CallFrame::create(callFrame->registers() - paddedCalleeFrameOffset);
+ }
+
+ unsigned sizeOfVarargs(CallFrame* exec, JSValue arguments, uint32_t firstVarArgOffset);
+ static const unsigned maxArguments = 0x10000;
+ unsigned sizeFrameForVarargs(CallFrame* exec, VM&, JSValue arguments, unsigned numUsedStackSlots, uint32_t firstVarArgOffset);
+ unsigned sizeFrameForForwardArguments(CallFrame* exec, VM&, unsigned numUsedStackSlots);
+ void loadVarargs(CallFrame* execCaller, VirtualRegister firstElementDest, JSValue source, uint32_t offset, uint32_t length);
+ void setupVarargsFrame(CallFrame* execCaller, CallFrame* execCallee, JSValue arguments, uint32_t firstVarArgOffset, uint32_t length);
+ void setupVarargsFrameAndSetThis(CallFrame* execCaller, CallFrame* execCallee, JSValue thisValue, JSValue arguments, uint32_t firstVarArgOffset, uint32_t length);
+ void setupForwardArgumentsFrame(CallFrame* execCaller, CallFrame* execCallee, uint32_t length);
+ void setupForwardArgumentsFrameAndSetThis(CallFrame* execCaller, CallFrame* execCallee, JSValue thisValue, uint32_t length);
+
+} // namespace JSC
diff --git a/Source/JavaScriptCore/interpreter/InterpreterInlines.h b/Source/JavaScriptCore/interpreter/InterpreterInlines.h
new file mode 100644
index 000000000..177c15479
--- /dev/null
+++ b/Source/JavaScriptCore/interpreter/InterpreterInlines.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2016 Yusuke Suzuki <utatane.tea@gmail.com>
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#include "Instruction.h"
+#include "Interpreter.h"
+#include "UnlinkedCodeBlock.h"
+
+namespace JSC {
+
+inline OpcodeID Interpreter::getOpcodeID(const Instruction& instruction)
+{
+ return getOpcodeID(instruction.u.opcode);
+}
+
+inline OpcodeID Interpreter::getOpcodeID(const UnlinkedInstruction& instruction)
+{
+ return instruction.u.opcode;
+}
+
+} // namespace JSC
diff --git a/Source/JavaScriptCore/interpreter/JSStack.cpp b/Source/JavaScriptCore/interpreter/JSStack.cpp
deleted file mode 100644
index 8b73d3cc9..000000000
--- a/Source/JavaScriptCore/interpreter/JSStack.cpp
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright (C) 2008, 2013 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
- * its contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
- * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
- * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
- * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
- * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
- * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
- * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "JSStackInlines.h"
-
-#include "ConservativeRoots.h"
-#include "Interpreter.h"
-#include <wtf/PageBlock.h>
-
-namespace JSC {
-
-static size_t committedBytesCount = 0;
-
-static Mutex& stackStatisticsMutex()
-{
- DEFINE_STATIC_LOCAL(Mutex, staticMutex, ());
- return staticMutex;
-}
-
-static size_t commitSize()
-{
- static size_t size = 0;
- if (!size)
- size = std::max(16 * 1024, static_cast<int>(WTF::pageSize()));
- return size;
-}
-
-JSStack::JSStack(VM& vm, size_t capacity)
- : m_vm(vm)
- , m_end(0)
- , m_topCallFrame(vm.topCallFrame)
-{
- ASSERT(capacity && isPageAligned(capacity));
-
- m_reservation = PageReservation::reserve(roundUpAllocationSize(capacity * sizeof(Register), commitSize()), OSAllocator::JSVMStackPages);
- updateStackLimit(highAddress());
- m_commitEnd = highAddress();
-
- disableErrorStackReserve();
-
- m_topCallFrame = 0;
-}
-
-JSStack::~JSStack()
-{
- void* highAddress = reinterpret_cast<void*>(static_cast<char*>(m_reservation.base()) + m_reservation.size());
- m_reservation.decommit(reinterpret_cast<void*>(m_commitEnd), reinterpret_cast<intptr_t>(highAddress) - reinterpret_cast<intptr_t>(m_commitEnd));
- addToCommittedByteCount(-(reinterpret_cast<intptr_t>(highAddress) - reinterpret_cast<intptr_t>(m_commitEnd)));
- m_reservation.deallocate();
-}
-
-bool JSStack::growSlowCase(Register* newEnd)
-{
- // If we have already committed enough memory to satisfy this request,
- // just update the end pointer and return.
- if (newEnd >= m_commitEnd) {
- updateStackLimit(newEnd);
- return true;
- }
-
- // Compute the chunk size of additional memory to commit, and see if we
- // have it is still within our budget. If not, we'll fail to grow and
- // return false.
- long delta = roundUpAllocationSize(reinterpret_cast<char*>(m_commitEnd) - reinterpret_cast<char*>(newEnd), commitSize());
- if (reinterpret_cast<char*>(m_commitEnd) - delta <= reinterpret_cast<char*>(m_useableEnd))
- return false;
-
- // Otherwise, the growth is still within our budget. Go ahead and commit
- // it and return true.
- m_reservation.commit(reinterpret_cast<char*>(m_commitEnd) - delta, delta);
- addToCommittedByteCount(delta);
- m_commitEnd = reinterpret_cast_ptr<Register*>(reinterpret_cast<char*>(m_commitEnd) - delta);
- updateStackLimit(newEnd);
- return true;
-}
-
-void JSStack::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
-{
- conservativeRoots.add(getBaseOfStack(), getTopOfStack());
-}
-
-void JSStack::gatherConservativeRoots(ConservativeRoots& conservativeRoots, JITStubRoutineSet& jitStubRoutines, CodeBlockSet& codeBlocks)
-{
- conservativeRoots.add(getBaseOfStack(), getTopOfStack(), jitStubRoutines, codeBlocks);
-}
-
-void JSStack::releaseExcessCapacity()
-{
- ptrdiff_t delta = reinterpret_cast<uintptr_t>(highAddress()) - reinterpret_cast<uintptr_t>(m_commitEnd);
- m_reservation.decommit(m_commitEnd, delta);
- addToCommittedByteCount(-delta);
- m_commitEnd = highAddress();
-}
-
-void JSStack::initializeThreading()
-{
- stackStatisticsMutex();
-}
-
-size_t JSStack::committedByteCount()
-{
- MutexLocker locker(stackStatisticsMutex());
- return committedBytesCount;
-}
-
-void JSStack::addToCommittedByteCount(long byteCount)
-{
- MutexLocker locker(stackStatisticsMutex());
- ASSERT(static_cast<long>(committedBytesCount) + byteCount > -1);
- committedBytesCount += byteCount;
-}
-
-void JSStack::enableErrorStackReserve()
-{
- m_useableEnd = reservationEnd();
-}
-
-void JSStack::disableErrorStackReserve()
-{
- char* useableEnd = reinterpret_cast<char*>(reservationEnd()) + commitSize();
- m_useableEnd = reinterpret_cast_ptr<Register*>(useableEnd);
-
- // By the time we get here, we are guaranteed to be destructing the last
- // Interpreter::ErrorHandlingMode that enabled this reserve in the first
- // place. That means the stack space beyond m_useableEnd before we
- // enabled the reserve was not previously in use. Hence, it is safe to
- // shrink back to that m_useableEnd.
- if (m_end < m_useableEnd) {
- ASSERT(m_topCallFrame->frameExtent() >= m_useableEnd);
- shrink(m_useableEnd);
- }
-}
-
-} // namespace JSC
diff --git a/Source/JavaScriptCore/interpreter/JSStack.h b/Source/JavaScriptCore/interpreter/JSStack.h
deleted file mode 100644
index cb47a9f0b..000000000
--- a/Source/JavaScriptCore/interpreter/JSStack.h
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Copyright (C) 2008, 2009, 2013 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
- * its contributors may be used to endorse or promote products derived
- * from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
- * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
- * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
- * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
- * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
- * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
- * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef JSStack_h
-#define JSStack_h
-
-#include "ExecutableAllocator.h"
-#include "Register.h"
-#include <wtf/Noncopyable.h>
-#include <wtf/PageReservation.h>
-#include <wtf/VMTags.h>
-
-#define ENABLE_DEBUG_JSSTACK 0
-#if !defined(NDEBUG) && !defined(ENABLE_DEBUG_JSSTACK)
-#define ENABLE_DEBUG_JSSTACK 1
-#endif
-
-namespace JSC {
-
- class CodeBlockSet;
- class ConservativeRoots;
- class ExecState;
- class JITStubRoutineSet;
- class VM;
- class LLIntOffsetsExtractor;
-
- struct Instruction;
- typedef ExecState CallFrame;
-
- struct CallerFrameAndPC {
- CallFrame* callerFrame;
- Instruction* pc;
- };
-
- class JSStack {
- WTF_MAKE_NONCOPYABLE(JSStack);
- public:
- enum CallFrameHeaderEntry {
- CodeBlock = sizeof(CallerFrameAndPC) / sizeof(Register),
- ScopeChain,
- Callee,
- ArgumentCount,
- CallFrameHeaderSize,
-
- // The following entries are not part of the CallFrameHeader but are provided here as a convenience:
- ThisArgument = CallFrameHeaderSize,
- FirstArgument,
- };
-
- static const size_t defaultCapacity = 512 * 1024;
- // Allow 8k of excess registers before we start trying to reap the stack
- static const ptrdiff_t maxExcessCapacity = 8 * 1024;
-
- JSStack(VM&, size_t capacity = defaultCapacity);
- ~JSStack();
-
- void gatherConservativeRoots(ConservativeRoots&);
- void gatherConservativeRoots(ConservativeRoots&, JITStubRoutineSet&, CodeBlockSet&);
-
- Register* getBaseOfStack() const
- {
- return highAddress() - 1;
- }
-
- size_t size() const { return highAddress() - lowAddress(); }
-
- bool grow(Register*);
-
- static size_t committedByteCount();
- static void initializeThreading();
-
- Register* getTopOfFrame(CallFrame*);
- Register* getStartOfFrame(CallFrame*);
- Register* getTopOfStack();
-
- bool entryCheck(class CodeBlock*, int);
-
- CallFrame* pushFrame(class CodeBlock*, JSScope*, int argsCount, JSObject* callee);
-
- void popFrame(CallFrame*);
-
- bool containsAddress(Register* address) { return (lowAddress() <= address && address <= highAddress()); }
-
- void enableErrorStackReserve();
- void disableErrorStackReserve();
-
-#if ENABLE(DEBUG_JSSTACK)
- void installFence(CallFrame*, const char *function = "", int lineNo = 0);
- void validateFence(CallFrame*, const char *function = "", int lineNo = 0);
- static const int FenceSize = 4;
-#else // !ENABLE(DEBUG_JSSTACK)
- void installFence(CallFrame*, const char* = "", int = 0) { }
- void validateFence(CallFrame*, const char* = "", int = 0) { }
-#endif // !ENABLE(DEBUG_JSSTACK)
-
- private:
- Register* lowAddress() const
- {
- return m_end;
- }
-
- Register* highAddress() const
- {
- return reinterpret_cast_ptr<Register*>(static_cast<char*>(m_reservation.base()) + m_reservation.size());
- }
-
- Register* reservationEnd() const
- {
- char* reservationEnd = static_cast<char*>(m_reservation.base());
- return reinterpret_cast_ptr<Register*>(reservationEnd);
- }
-
-#if ENABLE(DEBUG_JSSTACK)
- static JSValue generateFenceValue(size_t argIndex);
- void installTrapsAfterFrame(CallFrame*);
-#else
- void installTrapsAfterFrame(CallFrame*) { }
-#endif
-
- bool growSlowCase(Register*);
- void shrink(Register*);
- void releaseExcessCapacity();
- void addToCommittedByteCount(long);
-
- void updateStackLimit(Register* newEnd);
-
- VM& m_vm;
- Register* m_end;
- Register* m_commitEnd;
- Register* m_useableEnd;
- PageReservation m_reservation;
- CallFrame*& m_topCallFrame;
-
- friend class LLIntOffsetsExtractor;
- };
-
-} // namespace JSC
-
-#endif // JSStack_h
diff --git a/Source/JavaScriptCore/interpreter/JSStackInlines.h b/Source/JavaScriptCore/interpreter/JSStackInlines.h
deleted file mode 100644
index 5a2aff117..000000000
--- a/Source/JavaScriptCore/interpreter/JSStackInlines.h
+++ /dev/null
@@ -1,295 +0,0 @@
-/*
- * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef JSStackInlines_h
-#define JSStackInlines_h
-
-#include "CallFrame.h"
-#include "CodeBlock.h"
-#include "JSStack.h"
-#include "VM.h"
-
-namespace JSC {
-
-inline Register* JSStack::getTopOfFrame(CallFrame* frame)
-{
- if (UNLIKELY(!frame))
- return getBaseOfStack();
- return frame->frameExtent();
-}
-
-inline Register* JSStack::getTopOfStack()
-{
- return getTopOfFrame(m_topCallFrame);
-}
-
-inline Register* JSStack::getStartOfFrame(CallFrame* frame)
-{
- CallFrame* callerFrame = frame->callerFrameSkippingVMEntrySentinel();
- return getTopOfFrame(callerFrame);
-}
-
-inline bool JSStack::entryCheck(class CodeBlock* codeBlock, int argsCount)
-{
- Register* oldEnd = getTopOfStack();
-
- // Ensure that we have enough space for the parameters:
- size_t paddedArgsCount = argsCount;
- if (codeBlock) {
- size_t numParameters = codeBlock->numParameters();
- if (paddedArgsCount < numParameters)
- paddedArgsCount = numParameters;
- }
-
- Register* newCallFrameSlot = oldEnd - paddedArgsCount - (2 * JSStack::CallFrameHeaderSize) + 1;
-
-#if ENABLE(DEBUG_JSSTACK)
- newCallFrameSlot -= JSStack::FenceSize;
-#endif
-
- Register* newEnd = newCallFrameSlot;
- if (!!codeBlock)
- newEnd += virtualRegisterForLocal(codeBlock->frameRegisterCount()).offset();
-
- // Ensure that we have the needed stack capacity to push the new frame:
- if (!grow(newEnd))
- return false;
-
- return true;
-}
-
-inline CallFrame* JSStack::pushFrame(class CodeBlock* codeBlock, JSScope* scope, int argsCount, JSObject* callee)
-{
- ASSERT(!!scope);
- Register* oldEnd = getTopOfStack();
-
- // Ensure that we have enough space for the parameters:
- size_t paddedArgsCount = argsCount;
- if (codeBlock) {
- size_t numParameters = codeBlock->numParameters();
- if (paddedArgsCount < numParameters)
- paddedArgsCount = numParameters;
- }
-
- Register* newCallFrameSlot = oldEnd - paddedArgsCount - (2 * JSStack::CallFrameHeaderSize) + 1;
-
-#if ENABLE(DEBUG_JSSTACK)
- newCallFrameSlot -= JSStack::FenceSize;
-#endif
-
- Register* newEnd = newCallFrameSlot;
- if (!!codeBlock)
- newEnd += virtualRegisterForLocal(codeBlock->frameRegisterCount()).offset();
-
- // Ensure that we have the needed stack capacity to push the new frame:
- if (!grow(newEnd))
- return 0;
-
- // Compute the address of the new VM sentinel frame for this invocation:
- CallFrame* newVMEntrySentinelFrame = CallFrame::create(newCallFrameSlot + paddedArgsCount + JSStack::CallFrameHeaderSize);
- ASSERT(!!newVMEntrySentinelFrame);
-
- // Compute the address of the new frame for this invocation:
- CallFrame* newCallFrame = CallFrame::create(newCallFrameSlot);
- ASSERT(!!newCallFrame);
-
- // The caller frame should always be the real previous frame on the stack,
- // and not a potential GlobalExec that was passed in. Point callerFrame to
- // the top frame on the stack.
- CallFrame* callerFrame = m_topCallFrame;
-
- // Initialize the VM sentinel frame header:
- newVMEntrySentinelFrame->initializeVMEntrySentinelFrame(callerFrame);
-
- // Initialize the callee frame header:
- newCallFrame->init(codeBlock, 0, scope, newVMEntrySentinelFrame, argsCount, callee);
-
- ASSERT(!!newCallFrame->scope());
-
- // Pad additional args if needed:
- // Note: we need to subtract 1 from argsCount and paddedArgsCount to
- // exclude the this pointer.
- for (size_t i = argsCount-1; i < paddedArgsCount-1; ++i)
- newCallFrame->setArgument(i, jsUndefined());
-
- installFence(newCallFrame, __FUNCTION__, __LINE__);
- validateFence(newCallFrame, __FUNCTION__, __LINE__);
- installTrapsAfterFrame(newCallFrame);
-
- // Push the new frame:
- m_topCallFrame = newCallFrame;
-
- return newCallFrame;
-}
-
-inline void JSStack::popFrame(CallFrame* frame)
-{
- validateFence(frame, __FUNCTION__, __LINE__);
-
- // Pop off the callee frame and the sentinel frame.
- CallFrame* callerFrame = frame->callerFrame()->vmEntrySentinelCallerFrame();
-
- // Pop to the caller:
- m_topCallFrame = callerFrame;
-
- // If we are popping the very first frame from the stack i.e. no more
- // frames before this, then we can now safely shrink the stack. In
- // this case, we're shrinking all the way to the beginning since there
- // are no more frames on the stack.
- if (!callerFrame)
- shrink(getBaseOfStack());
-
- installTrapsAfterFrame(callerFrame);
-}
-
-inline void JSStack::shrink(Register* newEnd)
-{
- if (newEnd >= m_end)
- return;
- updateStackLimit(newEnd);
- if (m_end == getBaseOfStack() && (m_commitEnd - getBaseOfStack()) >= maxExcessCapacity)
- releaseExcessCapacity();
-}
-
-inline bool JSStack::grow(Register* newEnd)
-{
- if (newEnd >= m_end)
- return true;
- return growSlowCase(newEnd);
-}
-
-inline void JSStack::updateStackLimit(Register* newEnd)
-{
- m_end = newEnd;
-#if USE(SEPARATE_C_AND_JS_STACK)
- m_vm.setJSStackLimit(newEnd);
-#endif
-}
-
-#if ENABLE(DEBUG_JSSTACK)
-inline JSValue JSStack::generateFenceValue(size_t argIndex)
-{
- unsigned fenceBits = 0xfacebad0 | ((argIndex+1) & 0xf);
- JSValue fenceValue = JSValue(fenceBits);
- return fenceValue;
-}
-
-// The JSStack fences mechanism works as follows:
-// 1. A fence is a number (JSStack::FenceSize) of JSValues that are initialized
-// with values generated by JSStack::generateFenceValue().
-// 2. When pushFrame() is called, the fence is installed after the max extent
-// of the previous topCallFrame and the last arg of the new frame:
-//
-// | ... |
-// |--------------------------------------|
-// | Frame Header of previous frame |
-// |--------------------------------------|
-// topCallFrame --> | |
-// | Locals of previous frame |
-// |--------------------------------------|
-// | *** the Fence *** |
-// |--------------------------------------|
-// | VM entry sentinel frame header |
-// |--------------------------------------|
-// | Args of new frame |
-// |--------------------------------------|
-// | Frame Header of new frame |
-// |--------------------------------------|
-// frame --> | Locals of new frame |
-// | |
-//
-// 3. In popFrame() and elsewhere, we can call JSStack::validateFence() to
-// assert that the fence contains the values we expect.
-
-inline void JSStack::installFence(CallFrame* frame, const char *function, int lineNo)
-{
- UNUSED_PARAM(function);
- UNUSED_PARAM(lineNo);
- Register* startOfFrame = getStartOfFrame(frame);
-
- // The last argIndex is at:
- size_t maxIndex = frame->argIndexForRegister(startOfFrame) + 1;
- size_t startIndex = maxIndex - FenceSize;
- for (size_t i = startIndex; i < maxIndex; ++i) {
- JSValue fenceValue = generateFenceValue(i);
- frame->setArgument(i, fenceValue);
- }
-}
-
-inline void JSStack::validateFence(CallFrame* frame, const char *function, int lineNo)
-{
- UNUSED_PARAM(function);
- UNUSED_PARAM(lineNo);
- ASSERT(!!frame->scope());
- Register* startOfFrame = getStartOfFrame(frame);
- size_t maxIndex = frame->argIndexForRegister(startOfFrame) + 1;
- size_t startIndex = maxIndex - FenceSize;
- for (size_t i = startIndex; i < maxIndex; ++i) {
- JSValue fenceValue = generateFenceValue(i);
- JSValue actualValue = frame->getArgumentUnsafe(i);
- ASSERT(fenceValue == actualValue);
- }
-}
-
-// When debugging the JSStack, we install bad values after the extent of the
-// topCallFrame at the end of pushFrame() and popFrame(). The intention is
-// to trigger crashes in the event that memory in this supposedly unused
-// region is read and consumed without proper initialization. After the trap
-// words are installed, the stack looks like this:
-//
-// | ... |
-// |-----------------------------|
-// | Frame Header of frame |
-// |-----------------------------|
-// topCallFrame --> | |
-// | Locals of frame |
-// |-----------------------------|
-// | *** Trap words *** |
-// |-----------------------------|
-// | Unused space ... |
-// | ... |
-
-inline void JSStack::installTrapsAfterFrame(CallFrame* frame)
-{
- Register* topOfFrame = getTopOfFrame(frame);
- const int sizeOfTrap = 64;
- int32_t* startOfTrap = reinterpret_cast<int32_t*>(topOfFrame);
- int32_t* endOfTrap = startOfTrap - sizeOfTrap;
- int32_t* endOfCommitedMemory = reinterpret_cast<int32_t*>(m_commitEnd);
-
- // Make sure we're not exceeding the amount of available memory to write to:
- if (endOfTrap < endOfCommitedMemory)
- endOfTrap = endOfCommitedMemory;
-
- // Lay the traps:
- int32_t* p = startOfTrap;
- while (p > endOfTrap)
- *p-- = 0xabadcafe; // A bad word to trigger a crash if deref'ed.
-}
-#endif // ENABLE(DEBUG_JSSTACK)
-
-} // namespace JSC
-
-#endif // JSStackInlines_h
diff --git a/Source/JavaScriptCore/interpreter/ProtoCallFrame.cpp b/Source/JavaScriptCore/interpreter/ProtoCallFrame.cpp
index a36001437..eb80b2c23 100644
--- a/Source/JavaScriptCore/interpreter/ProtoCallFrame.cpp
+++ b/Source/JavaScriptCore/interpreter/ProtoCallFrame.cpp
@@ -27,23 +27,25 @@
#include "ProtoCallFrame.h"
#include "CodeBlock.h"
+#include "JSCInlines.h"
+#include "StackAlignment.h"
namespace JSC {
-void ProtoCallFrame::init(CodeBlock* codeBlock, JSScope* scope, JSObject* callee, JSValue thisValue, int argCountIncludingThis, JSValue* otherArgs)
+void ProtoCallFrame::init(CodeBlock* codeBlock, JSObject* callee, JSValue thisValue, int argCountIncludingThis, JSValue* otherArgs)
{
this->args = otherArgs;
this->setCodeBlock(codeBlock);
- this->setScope(scope);
this->setCallee(callee);
this->setArgumentCountIncludingThis(argCountIncludingThis);
- size_t paddedArgsCount = argCountIncludingThis;
- if (codeBlock) {
- size_t numParameters = codeBlock->numParameters();
- if (paddedArgsCount < numParameters)
- paddedArgsCount = numParameters;
- }
- this->setPaddedArgsCount(paddedArgsCount);
+ if (codeBlock && argCountIncludingThis < codeBlock->numParameters())
+ this->arityMissMatch = true;
+ else
+ this->arityMissMatch = false;
+
+ // Round up argCountIncludingThis to keep the stack frame size aligned.
+ size_t paddedArgsCount = roundArgumentCountToAlignFrame(argCountIncludingThis);
+ this->setPaddedArgCount(paddedArgsCount);
this->clearCurrentVPC();
this->setThisValue(thisValue);
}
diff --git a/Source/JavaScriptCore/interpreter/ProtoCallFrame.h b/Source/JavaScriptCore/interpreter/ProtoCallFrame.h
index 84037da95..e41de5f8c 100644
--- a/Source/JavaScriptCore/interpreter/ProtoCallFrame.h
+++ b/Source/JavaScriptCore/interpreter/ProtoCallFrame.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2013 Apple Inc. All Rights Reserved.
+ * Copyright (C) 2013-2017 Apple Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -23,43 +23,44 @@
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef ProtoCallFrame_h
-#define ProtoCallFrame_h
+#pragma once
#include "Register.h"
+#include <wtf/ForbidHeapAllocation.h>
namespace JSC {
-struct ProtoCallFrame {
+struct JS_EXPORT_PRIVATE ProtoCallFrame {
+ WTF_FORBID_HEAP_ALLOCATION;
+public:
Register codeBlockValue;
- Register scopeChainValue;
Register calleeValue;
Register argCountAndCodeOriginValue;
Register thisArg;
uint32_t paddedArgCount;
+ bool arityMissMatch;
JSValue *args;
- void init(CodeBlock*, JSScope*, JSObject*, JSValue, int, JSValue* otherArgs = 0);
+ void init(CodeBlock*, JSObject*, JSValue, int, JSValue* otherArgs = 0);
CodeBlock* codeBlock() const { return codeBlockValue.Register::codeBlock(); }
void setCodeBlock(CodeBlock* codeBlock) { codeBlockValue = codeBlock; }
- JSScope* scope() const { return scopeChainValue.Register::scope(); }
- void setScope(JSScope* scope) { scopeChainValue = scope; }
-
- JSObject* callee() const { return calleeValue.Register::function(); }
- void setCallee(JSObject* callee) { calleeValue = Register::withCallee(callee); }
+ JSObject* callee() const { return calleeValue.Register::object(); }
+ void setCallee(JSObject* callee) { calleeValue = callee; }
int argumentCountIncludingThis() const { return argCountAndCodeOriginValue.payload(); }
int argumentCount() const { return argumentCountIncludingThis() - 1; }
void setArgumentCountIncludingThis(int count) { argCountAndCodeOriginValue.payload() = count; }
- void setPaddedArgsCount(uint32_t argCount) { paddedArgCount = argCount; }
+ void setPaddedArgCount(uint32_t argCount) { paddedArgCount = argCount; }
void clearCurrentVPC() { argCountAndCodeOriginValue.tag() = 0; }
JSValue thisValue() const { return thisArg.Register::jsValue(); }
void setThisValue(JSValue value) { thisArg = value; }
+ bool needArityCheck() { return arityMissMatch; }
+
JSValue argument(size_t argumentIndex)
{
ASSERT(static_cast<int>(argumentIndex) < argumentCount());
@@ -73,5 +74,3 @@ struct ProtoCallFrame {
};
} // namespace JSC
-
-#endif // ProtoCallFrame_h
diff --git a/Source/JavaScriptCore/interpreter/Register.h b/Source/JavaScriptCore/interpreter/Register.h
index 13a7e5866..30d120db6 100644
--- a/Source/JavaScriptCore/interpreter/Register.h
+++ b/Source/JavaScriptCore/interpreter/Register.h
@@ -10,7 +10,7 @@
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
- * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * 3. Neither the name of Apple Inc. ("Apple") nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
@@ -26,8 +26,7 @@
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef Register_h
-#define Register_h
+#pragma once
#include "JSCJSValue.h"
#include <wtf/Assertions.h>
@@ -37,9 +36,8 @@ namespace JSC {
class CodeBlock;
class ExecState;
- class JSActivation;
+ class JSLexicalEnvironment;
class JSObject;
- class JSPropertyNameIterator;
class JSScope;
typedef ExecState CallFrame;
@@ -52,18 +50,19 @@ namespace JSC {
Register(const JSValue&);
Register& operator=(const JSValue&);
JSValue jsValue() const;
+ JSValue asanUnsafeJSValue() const;
EncodedJSValue encodedJSValue() const;
Register& operator=(CallFrame*);
Register& operator=(CodeBlock*);
Register& operator=(JSScope*);
+ Register& operator=(JSObject*);
int32_t i() const;
- JSActivation* activation() const;
CallFrame* callFrame() const;
CodeBlock* codeBlock() const;
- JSObject* function() const;
- JSPropertyNameIterator* propertyNameIterator() const;
+ CodeBlock* asanUnsafeCodeBlock() const;
+ JSObject* object() const;
JSScope* scope() const;
int32_t unboxedInt32() const;
int64_t unboxedInt52() const;
@@ -73,6 +72,7 @@ namespace JSC {
JSCell* unboxedCell() const;
int32_t payload() const;
int32_t tag() const;
+ int32_t unsafeTag() const;
int32_t& payload();
int32_t& tag();
@@ -82,8 +82,6 @@ namespace JSC {
return r;
}
- static Register withCallee(JSObject* callee);
-
private:
union {
EncodedJSValue value;
@@ -113,6 +111,12 @@ namespace JSC {
return *this;
}
+ // FIXME (rdar://problem/19379214): ASan only needs to be suppressed for Register::jsValue() when called from prepareOSREntry(), but there is currently no way to express this short of adding a separate copy of the function.
+ SUPPRESS_ASAN ALWAYS_INLINE JSValue Register::asanUnsafeJSValue() const
+ {
+ return JSValue::decode(u.value);
+ }
+
ALWAYS_INLINE JSValue Register::jsValue() const
{
return JSValue::decode(u.value);
@@ -152,6 +156,11 @@ namespace JSC {
return u.codeBlock;
}
+ SUPPRESS_ASAN ALWAYS_INLINE CodeBlock* Register::asanUnsafeCodeBlock() const
+ {
+ return u.codeBlock;
+ }
+
ALWAYS_INLINE int32_t Register::unboxedInt32() const
{
return payload();
@@ -196,6 +205,11 @@ namespace JSC {
return u.encodedValue.asBits.tag;
}
+ SUPPRESS_ASAN ALWAYS_INLINE int32_t Register::unsafeTag() const
+ {
+ return u.encodedValue.asBits.tag;
+ }
+
ALWAYS_INLINE int32_t& Register::payload()
{
return u.encodedValue.asBits.payload;
@@ -213,5 +227,3 @@ namespace WTF {
template<> struct VectorTraits<JSC::Register> : VectorTraitsBase<true, JSC::Register> { };
} // namespace WTF
-
-#endif // Register_h
diff --git a/Source/JavaScriptCore/interpreter/ShadowChicken.cpp b/Source/JavaScriptCore/interpreter/ShadowChicken.cpp
new file mode 100644
index 000000000..ff02360b8
--- /dev/null
+++ b/Source/JavaScriptCore/interpreter/ShadowChicken.cpp
@@ -0,0 +1,468 @@
+/*
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "ShadowChicken.h"
+
+#include "CodeBlock.h"
+#include "JSCInlines.h"
+#include "ShadowChickenInlines.h"
+#include <wtf/ListDump.h>
+
+namespace JSC {
+
+static const bool verbose = false;
+
+void ShadowChicken::Packet::dump(PrintStream& out) const
+{
+ if (!*this) {
+ out.print("empty");
+ return;
+ }
+
+ if (isPrologue()) {
+ out.print(
+ "{callee = ", RawPointer(callee), ", frame = ", RawPointer(frame), ", callerFrame = ",
+ RawPointer(callerFrame), "}");
+ return;
+ }
+
+ if (isTail()) {
+ out.print("tail-packet:{frame = ", RawPointer(frame), "}");
+ return;
+ }
+
+ ASSERT(isThrow());
+ out.print("throw");
+}
+
+void ShadowChicken::Frame::dump(PrintStream& out) const
+{
+ out.print(
+ "{callee = ", RawPointer(callee), ", frame = ", RawPointer(frame), ", isTailDeleted = ",
+ isTailDeleted, "}");
+}
+
+ShadowChicken::ShadowChicken()
+ : m_logSize(Options::shadowChickenLogSize())
+{
+ m_log = static_cast<Packet*>(fastZeroedMalloc(sizeof(Packet) * m_logSize));
+ m_logCursor = m_log;
+ m_logEnd = m_log + m_logSize;
+}
+
+ShadowChicken::~ShadowChicken()
+{
+ fastFree(m_log);
+}
+
+void ShadowChicken::log(VM& vm, ExecState* exec, const Packet& packet)
+{
+ update(vm, exec);
+ *m_logCursor++ = packet;
+}
+
+void ShadowChicken::update(VM& vm, ExecState* exec)
+{
+ if (verbose) {
+ dataLog("Running update on: ", *this, "\n");
+ WTFReportBacktrace();
+ }
+
+ const unsigned logCursorIndex = m_logCursor - m_log;
+
+ // We need to figure out how to reconcile the current machine stack with our shadow stack. We do
+ // that by figuring out how much of the shadow stack to pop. We apply three different rules. The
+ // precise rule relies on the log. The log contains caller frames, which means that we know
+ // where we bottomed out after making any call. If we bottomed out but made no calls then 'exec'
+ // will tell us. That's why "highestPointSinceLastTime" will go no lower than exec. The third
+ // rule, based on comparing to the current real stack, is executed in a later loop.
+ CallFrame* highestPointSinceLastTime = exec;
+ for (unsigned i = logCursorIndex; i--;) {
+ Packet packet = m_log[i];
+ if (packet.isPrologue()) {
+ CallFrame* watermark;
+ if (i && m_log[i - 1].isTail())
+ watermark = packet.frame;
+ else
+ watermark = packet.callerFrame;
+ highestPointSinceLastTime = std::max(highestPointSinceLastTime, watermark);
+ }
+ }
+
+ if (verbose)
+ dataLog("Highest point since last time: ", RawPointer(highestPointSinceLastTime), "\n");
+
+ while (!m_stack.isEmpty() && (m_stack.last().frame < highestPointSinceLastTime || m_stack.last().isTailDeleted))
+ m_stack.removeLast();
+
+ if (verbose)
+ dataLog(" Revised stack: ", listDump(m_stack), "\n");
+
+ // It's possible that the top of stack is now tail-deleted. The stack no longer contains any
+ // frames below the log's high watermark. That means that we just need to look for the first
+ // occurence of a tail packet for the current stack top.
+ if (!m_stack.isEmpty()) {
+ ASSERT(!m_stack.last().isTailDeleted);
+ for (unsigned i = 0; i < logCursorIndex; ++i) {
+ Packet& packet = m_log[i];
+ if (packet.isTail() && packet.frame == m_stack.last().frame) {
+ Frame& frame = m_stack.last();
+ frame.thisValue = packet.thisValue;
+ frame.scope = packet.scope;
+ frame.codeBlock = packet.codeBlock;
+ frame.callSiteIndex = packet.callSiteIndex;
+ frame.isTailDeleted = true;
+ break;
+ }
+ }
+ }
+
+
+ if (verbose)
+ dataLog(" Revised stack: ", listDump(m_stack), "\n");
+
+ // The log-based and exec-based rules require that ShadowChicken was enabled. The point of
+ // ShadowChicken is to give sensible-looking results even if we had not logged. This means that
+ // we need to reconcile the shadow stack and the real stack by actually looking at the real
+ // stack. This reconciliation allows the shadow stack to have extra tail-deleted frames, but it
+ // forbids it from diverging from the real stack on normal frames.
+ if (!m_stack.isEmpty()) {
+ Vector<Frame> stackRightNow;
+ StackVisitor::visit(
+ exec, [&] (StackVisitor& visitor) -> StackVisitor::Status {
+ if (visitor->isInlinedFrame())
+ return StackVisitor::Continue;
+ if (visitor->isWasmFrame()) {
+ // FIXME: Make shadow chicken work with Wasm.
+ // https://bugs.webkit.org/show_bug.cgi?id=165441
+ return StackVisitor::Continue;
+ }
+
+ bool isTailDeleted = false;
+ // FIXME: Make shadow chicken work with Wasm.
+ // https://bugs.webkit.org/show_bug.cgi?id=165441
+ stackRightNow.append(Frame(jsCast<JSObject*>(visitor->callee()), visitor->callFrame(), isTailDeleted));
+ return StackVisitor::Continue;
+ });
+ stackRightNow.reverse();
+
+ if (verbose)
+ dataLog(" Stack right now: ", listDump(stackRightNow), "\n");
+
+ unsigned shadowIndex = 0;
+ unsigned rightNowIndex = 0;
+ while (shadowIndex < m_stack.size() && rightNowIndex < stackRightNow.size()) {
+ if (m_stack[shadowIndex].isTailDeleted) {
+ shadowIndex++;
+ continue;
+ }
+
+ // We specifically don't use operator== here because we are using a less
+ // strict filter on equality of frames. For example, the scope pointer
+ // could change, but we wouldn't want to consider the frames different entities
+ // because of that because it's natural for the program to change scopes.
+ if (m_stack[shadowIndex].frame == stackRightNow[rightNowIndex].frame
+ && m_stack[shadowIndex].callee == stackRightNow[rightNowIndex].callee) {
+ shadowIndex++;
+ rightNowIndex++;
+ continue;
+ }
+ break;
+ }
+ m_stack.resize(shadowIndex);
+
+ if (verbose)
+ dataLog(" Revised stack: ", listDump(m_stack), "\n");
+ }
+
+ // It's possible that the top stack frame is actually lower than highestPointSinceLastTime.
+ // Account for that here.
+ highestPointSinceLastTime = nullptr;
+ for (unsigned i = m_stack.size(); i--;) {
+ if (!m_stack[i].isTailDeleted) {
+ highestPointSinceLastTime = m_stack[i].frame;
+ break;
+ }
+ }
+
+ if (verbose)
+ dataLog(" Highest point since last time: ", RawPointer(highestPointSinceLastTime), "\n");
+
+ // Set everything up so that we know where the top frame is in the log.
+ unsigned indexInLog = logCursorIndex;
+
+ auto advanceIndexInLogTo = [&] (CallFrame* frame, JSObject* callee, CallFrame* callerFrame) -> bool {
+ if (verbose)
+ dataLog(" Advancing to frame = ", RawPointer(frame), " from indexInLog = ", indexInLog, "\n");
+ if (indexInLog > logCursorIndex) {
+ if (verbose)
+ dataLog(" Bailing.\n");
+ return false;
+ }
+
+ unsigned oldIndexInLog = indexInLog;
+
+ while (indexInLog--) {
+ Packet packet = m_log[indexInLog];
+
+ // If all callees opt into ShadowChicken, then this search will rapidly terminate when
+ // we find our frame. But if our frame's callee didn't emit a prologue packet because it
+ // didn't opt in, then we will keep looking backwards until we *might* find a different
+ // frame. If we've been given the callee and callerFrame as a filter, then it's unlikely
+ // that we will hit the wrong frame. But we don't always have that information.
+ //
+ // This means it's worth adding other filters. For example, we could track changes in
+ // stack size. Once we've seen a frame at some height, we're no longer interested in
+ // frames below that height. Also, we can break as soon as we see a frame higher than
+ // the one we're looking for.
+ // FIXME: Add more filters.
+ // https://bugs.webkit.org/show_bug.cgi?id=155685
+
+ if (packet.isPrologue() && packet.frame == frame
+ && (!callee || packet.callee == callee)
+ && (!callerFrame || packet.callerFrame == callerFrame)) {
+ if (verbose)
+ dataLog(" Found at indexInLog = ", indexInLog, "\n");
+ return true;
+ }
+ }
+
+ // This is an interesting eventuality. We will see this if ShadowChicken was not
+ // consistently enabled. We have a choice between:
+ //
+ // - Leaving the log index at -1, which will prevent the log from being considered. This is
+ // the most conservative. It means that we will not be able to recover tail-deleted frames
+ // from anything that sits above a frame that didn't log a prologue packet. This means
+ // that everyone who creates prologues must log prologue packets.
+ //
+ // - Restoring the log index to what it was before. This prevents us from considering
+ // whether this frame has tail-deleted frames behind it, but that's about it. The problem
+ // with this approach is that it might recover tail-deleted frames that aren't relevant.
+ // I haven't thought about this too deeply, though.
+ //
+ // It seems like the latter option is less harmful, so that's what we do.
+ indexInLog = oldIndexInLog;
+
+ if (verbose)
+ dataLog(" Didn't find it.\n");
+ return false;
+ };
+
+ Vector<Frame> toPush;
+ StackVisitor::visit(
+ exec, [&] (StackVisitor& visitor) -> StackVisitor::Status {
+ if (visitor->isInlinedFrame()) {
+ // FIXME: Handle inlining.
+ // https://bugs.webkit.org/show_bug.cgi?id=155686
+ return StackVisitor::Continue;
+ }
+
+ if (visitor->isWasmFrame()) {
+ // FIXME: Make shadow chicken work with Wasm.
+ return StackVisitor::Continue;
+ }
+
+ CallFrame* callFrame = visitor->callFrame();
+ if (verbose)
+ dataLog(" Examining ", RawPointer(callFrame), "\n");
+ if (callFrame == highestPointSinceLastTime) {
+ if (verbose)
+ dataLog(" Bailing at ", RawPointer(callFrame), " because it's the highest point since last time.\n");
+ return StackVisitor::Done;
+ }
+
+ bool foundFrame = advanceIndexInLogTo(callFrame, callFrame->jsCallee(), callFrame->callerFrame());
+ bool isTailDeleted = false;
+ JSScope* scope = nullptr;
+ CodeBlock* codeBlock = callFrame->codeBlock();
+ if (codeBlock && codeBlock->wasCompiledWithDebuggingOpcodes() && codeBlock->scopeRegister().isValid()) {
+ scope = callFrame->scope(codeBlock->scopeRegister().offset());
+ RELEASE_ASSERT(scope->inherits(vm, JSScope::info()));
+ } else if (foundFrame) {
+ scope = m_log[indexInLog].scope;
+ if (scope)
+ RELEASE_ASSERT(scope->inherits(vm, JSScope::info()));
+ }
+ toPush.append(Frame(jsCast<JSObject*>(visitor->callee()), callFrame, isTailDeleted, callFrame->thisValue(), scope, codeBlock, callFrame->callSiteIndex()));
+
+ if (indexInLog < logCursorIndex
+ // This condition protects us from the case where advanceIndexInLogTo didn't find
+ // anything.
+ && m_log[indexInLog].frame == toPush.last().frame) {
+ if (verbose)
+ dataLog(" Going to loop through to find tail deleted frames with indexInLog = ", indexInLog, " and push-stack top = ", toPush.last(), "\n");
+ for (;;) {
+ ASSERT(m_log[indexInLog].frame == toPush.last().frame);
+
+ // Right now the index is pointing at a prologue packet of the last frame that
+ // we pushed. Peek behind that packet to see if there is a tail packet. If there
+ // is one then we know that there is a corresponding prologue packet that will
+ // tell us about a tail-deleted frame.
+
+ if (!indexInLog)
+ break;
+ Packet tailPacket = m_log[indexInLog - 1];
+ if (!tailPacket.isTail()) {
+ // Last frame that we recorded was not the outcome of a tail call. So, there
+ // will not be any more deleted frames.
+ // FIXME: We might want to have a filter here. Consider that this was a tail
+ // marker for a tail call to something that didn't log anything. It should
+ // be sufficient to give the tail marker a copy of the caller frame.
+ // https://bugs.webkit.org/show_bug.cgi?id=155687
+ break;
+ }
+ indexInLog--; // Skip over the tail packet.
+
+ if (!advanceIndexInLogTo(tailPacket.frame, nullptr, nullptr)) {
+ if (verbose)
+ dataLog("Can't find prologue packet for tail: ", RawPointer(tailPacket.frame), "\n");
+ // We were unable to locate the prologue packet for this tail packet.
+ // This is rare but can happen in a situation like:
+ // function foo() {
+ // ... call some deeply tail-recursive function, causing a random number of log processings.
+ // return bar(); // tail call
+ // }
+ break;
+ }
+ Packet packet = m_log[indexInLog];
+ bool isTailDeleted = true;
+ RELEASE_ASSERT(tailPacket.scope->inherits(vm, JSScope::info()));
+ toPush.append(Frame(packet.callee, packet.frame, isTailDeleted, tailPacket.thisValue, tailPacket.scope, tailPacket.codeBlock, tailPacket.callSiteIndex));
+ }
+ }
+
+ return StackVisitor::Continue;
+ });
+
+ if (verbose)
+ dataLog(" Pushing: ", listDump(toPush), "\n");
+
+ for (unsigned i = toPush.size(); i--;)
+ m_stack.append(toPush[i]);
+
+ // We want to reset the log. There is a fun corner-case: there could be a tail marker at the end
+ // of this log. We could make that work by setting isTailDeleted on the top of stack, but that
+ // would require more corner cases in the complicated reconciliation code above. That code
+ // already knows how to handle a tail packet at the beginning, so we just leverage that here.
+ if (logCursorIndex && m_log[logCursorIndex - 1].isTail()) {
+ m_log[0] = m_log[logCursorIndex - 1];
+ m_logCursor = m_log + 1;
+ } else
+ m_logCursor = m_log;
+
+ if (verbose)
+ dataLog(" After pushing: ", *this, "\n");
+
+ // Remove tail frames until the number of tail deleted frames is small enough.
+ const unsigned maxTailDeletedFrames = Options::shadowChickenMaxTailDeletedFramesSize();
+ if (m_stack.size() > maxTailDeletedFrames) {
+ unsigned numberOfTailDeletedFrames = 0;
+ for (const Frame& frame : m_stack) {
+ if (frame.isTailDeleted)
+ numberOfTailDeletedFrames++;
+ }
+ if (numberOfTailDeletedFrames > maxTailDeletedFrames) {
+ unsigned dstIndex = 0;
+ unsigned srcIndex = 0;
+ while (srcIndex < m_stack.size()) {
+ Frame frame = m_stack[srcIndex++];
+ if (numberOfTailDeletedFrames > maxTailDeletedFrames && frame.isTailDeleted) {
+ numberOfTailDeletedFrames--;
+ continue;
+ }
+ m_stack[dstIndex++] = frame;
+ }
+ m_stack.resize(dstIndex);
+ }
+ }
+
+ if (verbose)
+ dataLog(" After clean-up: ", *this, "\n");
+}
+
+void ShadowChicken::visitChildren(SlotVisitor& visitor)
+{
+ for (unsigned i = m_logCursor - m_log; i--;) {
+ JSObject* callee = m_log[i].callee;
+ if (callee != Packet::tailMarker() && callee != Packet::throwMarker())
+ visitor.appendUnbarriered(callee);
+ if (callee != Packet::throwMarker())
+ visitor.appendUnbarriered(m_log[i].scope);
+ if (callee == Packet::tailMarker()) {
+ visitor.appendUnbarriered(m_log[i].thisValue);
+ visitor.appendUnbarriered(m_log[i].codeBlock);
+ }
+ }
+
+ for (unsigned i = m_stack.size(); i--; ) {
+ Frame& frame = m_stack[i];
+ visitor.appendUnbarriered(frame.thisValue);
+ visitor.appendUnbarriered(frame.callee);
+ if (frame.scope)
+ visitor.appendUnbarriered(frame.scope);
+ if (frame.codeBlock)
+ visitor.appendUnbarriered(frame.codeBlock);
+ }
+}
+
+void ShadowChicken::reset()
+{
+ m_logCursor = m_log;
+ m_stack.clear();
+}
+
+void ShadowChicken::dump(PrintStream& out) const
+{
+ out.print("{stack = [", listDump(m_stack), "], log = [");
+
+ CommaPrinter comma;
+ unsigned limit = static_cast<unsigned>(m_logCursor - m_log);
+ out.print("\n");
+ for (unsigned i = 0; i < limit; ++i)
+ out.print("\t", comma, m_log[i], "\n");
+ out.print("]}");
+}
+
+JSArray* ShadowChicken::functionsOnStack(ExecState* exec)
+{
+ VM& vm = exec->vm();
+ auto scope = DECLARE_THROW_SCOPE(vm);
+ JSArray* result = constructEmptyArray(exec, 0);
+ RETURN_IF_EXCEPTION(scope, nullptr);
+
+ iterate(
+ vm, exec,
+ [&] (const Frame& frame) -> bool {
+ result->push(exec, frame.callee);
+ RELEASE_ASSERT(!scope.exception()); // This function is only called from tests.
+ return true;
+ });
+
+ return result;
+}
+
+} // namespace JSC
+
diff --git a/Source/JavaScriptCore/interpreter/ShadowChicken.h b/Source/JavaScriptCore/interpreter/ShadowChicken.h
new file mode 100644
index 000000000..d7455a663
--- /dev/null
+++ b/Source/JavaScriptCore/interpreter/ShadowChicken.h
@@ -0,0 +1,225 @@
+/*
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#include "CallFrame.h"
+#include "JSCJSValue.h"
+#include <wtf/FastMalloc.h>
+#include <wtf/Noncopyable.h>
+#include <wtf/PrintStream.h>
+#include <wtf/StdLibExtras.h>
+#include <wtf/Vector.h>
+
+namespace JSC {
+
+class CodeBlock;
+class ExecState;
+class JSArray;
+class JSObject;
+class JSScope;
+class LLIntOffsetsExtractor;
+class SlotVisitor;
+class VM;
+
+typedef ExecState CallFrame;
+
+// ShadowChicken is a log that can be used to produce a shadow stack of CHICKEN-style stack frames.
+// This enables the debugger to almost always see the tail-deleted stack frames, so long as we have
+// memory inside ShadowChicken to remember them.
+//
+// The ShadowChicken log comprises packets that have one of two shapes:
+//
+// Prologue Packet, which has:
+// - Callee object.
+// - Frame pointer.
+// - Caller frame pointer.
+//
+// Tail Call Packet, which has just:
+// - Frame pointer.
+//
+// Prologue Packets are placed into the log in any JS function's prologue. Tail Call Packets are
+// placed into the log just before making a proper tail call. We never log returns, since that would
+// require a lot of infrastructure (unwinding, multiple ways of returning, etc). We don't need to
+// see the returns because the prologue packets have a frame pointer. The tail call packets tell us
+// when there was a tail call, and record the FP *before* the tail call.
+//
+// At any time it is possible to construct a shadow stack from the log and the actual machine stack.
+
+class ShadowChicken {
+ WTF_MAKE_NONCOPYABLE(ShadowChicken);
+ WTF_MAKE_FAST_ALLOCATED;
+public:
+ struct Packet {
+ Packet()
+ {
+ }
+
+ static const unsigned unlikelyValue = 0x7a11;
+
+ static JSObject* tailMarker()
+ {
+ return bitwise_cast<JSObject*>(static_cast<intptr_t>(unlikelyValue));
+ }
+
+ static JSObject* throwMarker()
+ {
+ return bitwise_cast<JSObject*>(static_cast<intptr_t>(unlikelyValue + 1));
+ }
+
+ static Packet prologue(JSObject* callee, CallFrame* frame, CallFrame* callerFrame, JSScope* scope)
+ {
+ Packet result;
+ result.callee = callee;
+ result.frame = frame;
+ result.callerFrame = callerFrame;
+ result.scope = scope;
+ return result;
+ }
+
+ static Packet tail(CallFrame* frame, JSValue thisValue, JSScope* scope, CodeBlock* codeBlock, CallSiteIndex callSiteIndex)
+ {
+ Packet result;
+ result.callee = tailMarker();
+ result.frame = frame;
+ result.thisValue = thisValue;
+ result.scope = scope;
+ result.codeBlock = codeBlock;
+ result.callSiteIndex = callSiteIndex;
+ return result;
+ }
+
+ static Packet throwPacket()
+ {
+ Packet result;
+ result.callee = throwMarker();
+ return result;
+ }
+
+ explicit operator bool() const { return !!callee; }
+
+ bool isPrologue() const { return *this && callee != tailMarker() && callee != throwMarker(); }
+ bool isTail() const { return *this && callee == tailMarker(); }
+ bool isThrow() const { return *this && callee == throwMarker(); }
+
+ void dump(PrintStream&) const;
+
+ // Only tail packets have a valid thisValue, CodeBlock*, and CallSiteIndex. We grab 'this' and CodeBlock* from non tail-deleted frames from the machine frame.
+ JSValue thisValue { JSValue() };
+ JSObject* callee { nullptr };
+ CallFrame* frame { nullptr };
+ CallFrame* callerFrame { nullptr };
+ JSScope* scope { nullptr };
+ CodeBlock* codeBlock { nullptr };
+ CallSiteIndex callSiteIndex;
+ };
+
+ struct Frame {
+ Frame()
+ {
+ }
+
+ Frame(JSObject* callee, CallFrame* frame, bool isTailDeleted, JSValue thisValue = JSValue(), JSScope* scope = nullptr, CodeBlock* codeBlock = nullptr, CallSiteIndex callSiteIndex = CallSiteIndex())
+ : callee(callee)
+ , frame(frame)
+ , thisValue(thisValue)
+ , scope(scope)
+ , codeBlock(codeBlock)
+ , callSiteIndex(callSiteIndex)
+ , isTailDeleted(isTailDeleted)
+ {
+ }
+
+ bool operator==(const Frame& other) const
+ {
+ return callee == other.callee
+ && frame == other.frame
+ && thisValue == other.thisValue
+ && scope == other.scope
+ && codeBlock == other.codeBlock
+ && callSiteIndex.bits() == other.callSiteIndex.bits()
+ && isTailDeleted == other.isTailDeleted;
+ }
+
+ bool operator!=(const Frame& other) const
+ {
+ return !(*this == other);
+ }
+
+ void dump(PrintStream&) const;
+
+ // FIXME: This should be able to hold the moral equivalent of StackVisitor::Frame, so that
+ // we can support inlining.
+ // https://bugs.webkit.org/show_bug.cgi?id=155686
+ JSObject* callee { nullptr };
+ CallFrame* frame { nullptr };
+ JSValue thisValue { JSValue() };
+ JSScope* scope { nullptr };
+ CodeBlock* codeBlock { nullptr };
+ CallSiteIndex callSiteIndex;
+ bool isTailDeleted { false };
+ };
+
+ ShadowChicken();
+ ~ShadowChicken();
+
+ void log(VM& vm, ExecState* exec, const Packet&);
+
+ void update(VM&, ExecState*);
+
+ // Expects this signature: (const Frame& frame) -> bool. Return true to keep iterating. Return false to stop iterating.
+ // Note that this only works right with inlining disabled, but that's OK since for now we
+ // disable inlining when the inspector is attached. It would be easy to make this work with
+ // inlining, and would mostly require that we can request that StackVisitor doesn't skip tail
+ // frames.
+ template<typename Functor>
+ void iterate(VM&, ExecState*, const Functor&);
+
+ void visitChildren(SlotVisitor&);
+ void reset();
+
+ // JIT support.
+ Packet* log() const { return m_log; }
+ unsigned logSize() const { return m_logSize; }
+ Packet** addressOfLogCursor() { return &m_logCursor; }
+ Packet* logEnd() { return m_logEnd; }
+
+ void dump(PrintStream&) const;
+
+ JS_EXPORT_PRIVATE JSArray* functionsOnStack(ExecState*);
+
+private:
+ friend class LLIntOffsetsExtractor;
+
+ Packet* m_log { nullptr };
+ unsigned m_logSize { 0 };
+ Packet* m_logCursor { nullptr };
+ Packet* m_logEnd { nullptr };
+
+ Vector<Frame> m_stack;
+};
+
+} // namespace JSC
+
diff --git a/Source/JavaScriptCore/interpreter/ShadowChickenInlines.h b/Source/JavaScriptCore/interpreter/ShadowChickenInlines.h
new file mode 100644
index 000000000..88d3dd326
--- /dev/null
+++ b/Source/JavaScriptCore/interpreter/ShadowChickenInlines.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2016 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#include "JSCInlines.h"
+#include "ShadowChicken.h"
+
+namespace JSC {
+
+template<typename Functor>
+void ShadowChicken::iterate(VM& vm, ExecState* exec, const Functor& functor)
+{
+ DeferGC deferGC(exec->vm().heap);
+
+ update(vm, exec);
+
+ for (unsigned i = m_stack.size(); i--;) {
+ if (!functor(m_stack[i]))
+ break;
+ }
+}
+
+} // namespace JSC
+
diff --git a/Source/JavaScriptCore/interpreter/StackVisitor.cpp b/Source/JavaScriptCore/interpreter/StackVisitor.cpp
index d922e7f8f..aae6daa78 100644
--- a/Source/JavaScriptCore/interpreter/StackVisitor.cpp
+++ b/Source/JavaScriptCore/interpreter/StackVisitor.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2013, 2015-2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,42 +26,88 @@
#include "config.h"
#include "StackVisitor.h"
-#include "Arguments.h"
-#include "CallFrameInlines.h"
-#include "Executable.h"
+#include "ClonedArguments.h"
+#include "DebuggerPrimitives.h"
+#include "InlineCallFrame.h"
#include "Interpreter.h"
-#include "Operations.h"
-#include <wtf/DataLog.h>
+#include "JSCInlines.h"
+#include "JSWebAssemblyCallee.h"
+#include <wtf/text/StringBuilder.h>
namespace JSC {
StackVisitor::StackVisitor(CallFrame* startFrame)
{
m_frame.m_index = 0;
- readFrame(startFrame);
+ m_frame.m_isWasmFrame = false;
+ CallFrame* topFrame;
+ if (startFrame) {
+ m_frame.m_VMEntryFrame = startFrame->vm().topVMEntryFrame;
+ topFrame = startFrame->vm().topCallFrame;
+
+ if (topFrame && static_cast<void*>(m_frame.m_VMEntryFrame) == static_cast<void*>(topFrame)) {
+ topFrame = vmEntryRecord(m_frame.m_VMEntryFrame)->m_prevTopCallFrame;
+ m_frame.m_VMEntryFrame = vmEntryRecord(m_frame.m_VMEntryFrame)->m_prevTopVMEntryFrame;
+ }
+ } else {
+ m_frame.m_VMEntryFrame = 0;
+ topFrame = 0;
+ }
+ m_frame.m_callerIsVMEntryFrame = false;
+ readFrame(topFrame);
+
+ // Find the frame the caller wants to start unwinding from.
+ while (m_frame.callFrame() && m_frame.callFrame() != startFrame)
+ gotoNextFrame();
}
void StackVisitor::gotoNextFrame()
{
+ m_frame.m_index++;
#if ENABLE(DFG_JIT)
if (m_frame.isInlinedFrame()) {
InlineCallFrame* inlineCallFrame = m_frame.inlineCallFrame();
- CodeOrigin* callerCodeOrigin = &inlineCallFrame->caller;
- readInlinedFrame(m_frame.callFrame(), callerCodeOrigin);
-
- } else
+ CodeOrigin* callerCodeOrigin = inlineCallFrame->getCallerSkippingTailCalls();
+ if (!callerCodeOrigin) {
+ while (inlineCallFrame) {
+ readInlinedFrame(m_frame.callFrame(), &inlineCallFrame->directCaller);
+ inlineCallFrame = m_frame.inlineCallFrame();
+ }
+ m_frame.m_VMEntryFrame = m_frame.m_CallerVMEntryFrame;
+ readFrame(m_frame.callerFrame());
+ } else
+ readInlinedFrame(m_frame.callFrame(), callerCodeOrigin);
+ return;
+ }
#endif // ENABLE(DFG_JIT)
- readFrame(m_frame.callerFrame());
+ m_frame.m_VMEntryFrame = m_frame.m_CallerVMEntryFrame;
+ readFrame(m_frame.callerFrame());
+}
+
+void StackVisitor::unwindToMachineCodeBlockFrame()
+{
+#if ENABLE(DFG_JIT)
+ if (m_frame.isInlinedFrame()) {
+ CodeOrigin codeOrigin = m_frame.inlineCallFrame()->directCaller;
+ while (codeOrigin.inlineCallFrame)
+ codeOrigin = codeOrigin.inlineCallFrame->directCaller;
+ readNonInlinedFrame(m_frame.callFrame(), &codeOrigin);
+ }
+#endif
}
void StackVisitor::readFrame(CallFrame* callFrame)
{
- ASSERT(!callFrame->isVMEntrySentinel());
if (!callFrame) {
m_frame.setToEnd();
return;
}
+ if (callFrame->callee()->isAnyWasmCallee(callFrame->vm())) {
+ readNonInlinedFrame(callFrame);
+ return;
+ }
+
#if !ENABLE(DFG_JIT)
readNonInlinedFrame(callFrame);
@@ -81,7 +127,7 @@ void StackVisitor::readFrame(CallFrame* callFrame)
return;
}
- unsigned index = callFrame->locationAsCodeOriginIndex();
+ CallSiteIndex index = callFrame->callSiteIndex();
ASSERT(codeBlock->canGetCodeOrigin(index));
if (!codeBlock->canGetCodeOrigin(index)) {
// See assertion above. In release builds, we try to protect ourselves
@@ -104,13 +150,26 @@ void StackVisitor::readNonInlinedFrame(CallFrame* callFrame, CodeOrigin* codeOri
{
m_frame.m_callFrame = callFrame;
m_frame.m_argumentCountIncludingThis = callFrame->argumentCountIncludingThis();
- m_frame.m_callerFrame = callFrame->callerFrameSkippingVMEntrySentinel();
- m_frame.m_callee = callFrame->callee();
- m_frame.m_scope = callFrame->scope();
- m_frame.m_codeBlock = callFrame->codeBlock();
- m_frame.m_bytecodeOffset = !m_frame.codeBlock() ? 0
- : codeOrigin ? codeOrigin->bytecodeIndex
- : callFrame->locationAsBytecodeOffset();
+ m_frame.m_CallerVMEntryFrame = m_frame.m_VMEntryFrame;
+ m_frame.m_callerFrame = callFrame->callerFrame(m_frame.m_CallerVMEntryFrame);
+ m_frame.m_callerIsVMEntryFrame = m_frame.m_CallerVMEntryFrame != m_frame.m_VMEntryFrame;
+ m_frame.m_isWasmFrame = false;
+
+ JSCell* callee = callFrame->callee();
+ m_frame.m_callee = callee;
+
+ if (callee->isAnyWasmCallee(*callee->vm())) {
+ m_frame.m_isWasmFrame = true;
+ m_frame.m_codeBlock = nullptr;
+ m_frame.m_bytecodeOffset = 0;
+ } else {
+ m_frame.m_codeBlock = callFrame->codeBlock();
+ m_frame.m_bytecodeOffset = !m_frame.codeBlock() ? 0
+ : codeOrigin ? codeOrigin->bytecodeIndex
+ : callFrame->bytecodeOffset();
+
+ }
+
#if ENABLE(DFG_JIT)
m_frame.m_inlineCallFrame = 0;
#endif
@@ -127,7 +186,7 @@ static int inlinedFrameOffset(CodeOrigin* codeOrigin)
void StackVisitor::readInlinedFrame(CallFrame* callFrame, CodeOrigin* codeOrigin)
{
ASSERT(codeOrigin);
- ASSERT(!callFrame->isVMEntrySentinel());
+ m_frame.m_isWasmFrame = false;
int frameOffset = inlinedFrameOffset(codeOrigin);
bool isInlined = !!frameOffset;
@@ -136,14 +195,15 @@ void StackVisitor::readInlinedFrame(CallFrame* callFrame, CodeOrigin* codeOrigin
m_frame.m_callFrame = callFrame;
m_frame.m_inlineCallFrame = inlineCallFrame;
- m_frame.m_argumentCountIncludingThis = inlineCallFrame->arguments.size();
- m_frame.m_codeBlock = inlineCallFrame->baselineCodeBlock();
+ if (inlineCallFrame->argumentCountRegister.isValid())
+ m_frame.m_argumentCountIncludingThis = callFrame->r(inlineCallFrame->argumentCountRegister.offset()).unboxedInt32();
+ else
+ m_frame.m_argumentCountIncludingThis = inlineCallFrame->arguments.size();
+ m_frame.m_codeBlock = inlineCallFrame->baselineCodeBlock.get();
m_frame.m_bytecodeOffset = codeOrigin->bytecodeIndex;
JSFunction* callee = inlineCallFrame->calleeForCallFrame(callFrame);
- m_frame.m_scope = callee->scope();
m_frame.m_callee = callee;
- ASSERT(m_frame.scope());
ASSERT(m_frame.callee());
// The callerFrame just needs to be non-null to indicate that we
@@ -158,14 +218,24 @@ void StackVisitor::readInlinedFrame(CallFrame* callFrame, CodeOrigin* codeOrigin
}
#endif // ENABLE(DFG_JIT)
+bool StackVisitor::Frame::isWasmFrame() const
+{
+ return m_isWasmFrame;
+}
+
StackVisitor::Frame::CodeType StackVisitor::Frame::codeType() const
{
- if (!isJSFrame())
+ if (isWasmFrame())
+ return CodeType::Wasm;
+
+ if (!codeBlock())
return CodeType::Native;
switch (codeBlock()->codeType()) {
case EvalCode:
return CodeType::Eval;
+ case ModuleCode:
+ return CodeType::Module;
case FunctionCode:
return CodeType::Function;
case GlobalCode:
@@ -175,50 +245,88 @@ StackVisitor::Frame::CodeType StackVisitor::Frame::codeType() const
return CodeType::Global;
}
-String StackVisitor::Frame::functionName()
+RegisterAtOffsetList* StackVisitor::Frame::calleeSaveRegisters()
+{
+ if (isInlinedFrame())
+ return nullptr;
+
+#if ENABLE(JIT) && NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
+
+#if ENABLE(WEBASSEMBLY)
+ if (isWasmFrame()) {
+ if (JSCell* callee = this->callee()) {
+ if (JSWebAssemblyCallee* wasmCallee = jsDynamicCast<JSWebAssemblyCallee*>(*callee->vm(), callee))
+ return wasmCallee->calleeSaveRegisters();
+ // Other wasm callees (e.g, stubs) don't use callee save registers, so nothing needs
+ // to be restored for them.
+ }
+
+ return nullptr;
+ }
+#endif // ENABLE(WEBASSEMBLY)
+
+ if (CodeBlock* codeBlock = this->codeBlock())
+ return codeBlock->calleeSaveRegisters();
+
+#endif // ENABLE(JIT) && NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
+
+ return nullptr;
+}
+
+String StackVisitor::Frame::functionName() const
{
String traceLine;
- JSObject* callee = this->callee();
+ JSCell* callee = this->callee();
switch (codeType()) {
+ case CodeType::Wasm:
+ traceLine = ASCIILiteral("wasm code");
+ break;
case CodeType::Eval:
- traceLine = "eval code";
+ traceLine = ASCIILiteral("eval code");
+ break;
+ case CodeType::Module:
+ traceLine = ASCIILiteral("module code");
break;
case CodeType::Native:
if (callee)
- traceLine = getCalculatedDisplayName(callFrame(), callee).impl();
+ traceLine = getCalculatedDisplayName(callFrame()->vm(), jsCast<JSObject*>(callee)).impl();
break;
case CodeType::Function:
- traceLine = getCalculatedDisplayName(callFrame(), callee).impl();
+ traceLine = getCalculatedDisplayName(callFrame()->vm(), jsCast<JSObject*>(callee)).impl();
break;
case CodeType::Global:
- traceLine = "global code";
+ traceLine = ASCIILiteral("global code");
break;
}
return traceLine.isNull() ? emptyString() : traceLine;
}
-String StackVisitor::Frame::sourceURL()
+String StackVisitor::Frame::sourceURL() const
{
String traceLine;
switch (codeType()) {
case CodeType::Eval:
+ case CodeType::Module:
case CodeType::Function:
case CodeType::Global: {
- String sourceURL = codeBlock()->ownerExecutable()->sourceURL();
+ String sourceURL = codeBlock()->ownerScriptExecutable()->sourceURL();
if (!sourceURL.isEmpty())
traceLine = sourceURL.impl();
break;
}
case CodeType::Native:
- traceLine = "[native code]";
+ traceLine = ASCIILiteral("[native code]");
+ break;
+ case CodeType::Wasm:
+ traceLine = ASCIILiteral("[wasm code]");
break;
}
return traceLine.isNull() ? emptyString() : traceLine;
}
-String StackVisitor::Frame::toString()
+String StackVisitor::Frame::toString() const
{
StringBuilder traceBuild;
String functionName = this->functionName();
@@ -228,7 +336,7 @@ String StackVisitor::Frame::toString()
if (!functionName.isEmpty())
traceBuild.append('@');
traceBuild.append(sourceURL);
- if (isJSFrame()) {
+ if (hasLineAndColumnInfo()) {
unsigned line = 0;
unsigned column = 0;
computeLineAndColumn(line, column);
@@ -241,49 +349,39 @@ String StackVisitor::Frame::toString()
return traceBuild.toString().impl();
}
-Arguments* StackVisitor::Frame::createArguments()
+intptr_t StackVisitor::Frame::sourceID()
+{
+ if (CodeBlock* codeBlock = this->codeBlock())
+ return codeBlock->ownerScriptExecutable()->sourceID();
+ return noSourceID;
+}
+
+ClonedArguments* StackVisitor::Frame::createArguments()
{
ASSERT(m_callFrame);
CallFrame* physicalFrame = m_callFrame;
- VM& vm = physicalFrame->vm();
- Arguments* arguments;
+ ClonedArguments* arguments;
+ ArgumentsMode mode;
+ if (Options::useFunctionDotArguments())
+ mode = ArgumentsMode::Cloned;
+ else
+ mode = ArgumentsMode::FakeValues;
#if ENABLE(DFG_JIT)
if (isInlinedFrame()) {
ASSERT(m_inlineCallFrame);
- arguments = Arguments::create(vm, physicalFrame, m_inlineCallFrame);
- arguments->tearOff(physicalFrame, m_inlineCallFrame);
+ arguments = ClonedArguments::createWithInlineFrame(physicalFrame, physicalFrame, m_inlineCallFrame, mode);
} else
#endif
- {
- arguments = Arguments::create(vm, physicalFrame);
- arguments->tearOff(physicalFrame);
- }
+ arguments = ClonedArguments::createWithMachineFrame(physicalFrame, physicalFrame, mode);
return arguments;
}
-Arguments* StackVisitor::Frame::existingArguments()
+bool StackVisitor::Frame::hasLineAndColumnInfo() const
{
- if (codeBlock()->codeType() != FunctionCode)
- return 0;
- if (!codeBlock()->usesArguments())
- return 0;
-
- VirtualRegister reg;
-
-#if ENABLE(DFG_JIT)
- if (isInlinedFrame())
- reg = inlineCallFrame()->argumentsRegister;
- else
-#endif // ENABLE(DFG_JIT)
- reg = codeBlock()->argumentsRegister();
-
- JSValue result = callFrame()->r(unmodifiedArgumentsRegister(reg).offset()).jsValue();
- if (!result)
- return 0;
- return jsCast<Arguments*>(result);
+ return !!codeBlock();
}
-void StackVisitor::Frame::computeLineAndColumn(unsigned& line, unsigned& column)
+void StackVisitor::Frame::computeLineAndColumn(unsigned& line, unsigned& column) const
{
CodeBlock* codeBlock = this->codeBlock();
if (!codeBlock) {
@@ -299,11 +397,14 @@ void StackVisitor::Frame::computeLineAndColumn(unsigned& line, unsigned& column)
unsigned divotColumn = 0;
retrieveExpressionInfo(divot, unusedStartOffset, unusedEndOffset, divotLine, divotColumn);
- line = divotLine + codeBlock->ownerExecutable()->lineNo();
+ line = divotLine + codeBlock->ownerScriptExecutable()->firstLine();
column = divotColumn + (divotLine ? 1 : codeBlock->firstLineColumnOffset());
+
+ if (codeBlock->ownerScriptExecutable()->hasOverrideLineNumber())
+ line = codeBlock->ownerScriptExecutable()->overrideLineNumber();
}
-void StackVisitor::Frame::retrieveExpressionInfo(int& divot, int& startOffset, int& endOffset, unsigned& line, unsigned& column)
+void StackVisitor::Frame::retrieveExpressionInfo(int& divot, int& startOffset, int& endOffset, unsigned& line, unsigned& column) const
{
CodeBlock* codeBlock = this->codeBlock();
codeBlock->unlinkedCodeBlock()->expressionRangeForBytecodeOffset(bytecodeOffset(), divot, startOffset, endOffset, line, column);
@@ -316,159 +417,89 @@ void StackVisitor::Frame::setToEnd()
#if ENABLE(DFG_JIT)
m_inlineCallFrame = 0;
#endif
+ m_isWasmFrame = false;
}
-#ifndef NDEBUG
-
-static const char* jitTypeName(JITCode::JITType jitType)
+void StackVisitor::Frame::dump(PrintStream& out, Indenter indent) const
{
- switch (jitType) {
- case JITCode::None: return "None";
- case JITCode::HostCallThunk: return "HostCallThunk";
- case JITCode::InterpreterThunk: return "InterpreterThunk";
- case JITCode::BaselineJIT: return "BaselineJIT";
- case JITCode::DFGJIT: return "DFGJIT";
- case JITCode::FTLJIT: return "FTLJIT";
- }
- return "<unknown>";
+ dump(out, indent, [] (PrintStream&) { });
}
-static void printIndents(int levels)
+void StackVisitor::Frame::dump(PrintStream& out, Indenter indent, std::function<void(PrintStream&)> prefix) const
{
- while (levels--)
- dataLogFString(" ");
-}
-
-static void printif(int indentLevels, const char* format, ...)
-{
- va_list argList;
- va_start(argList, format);
-
- if (indentLevels)
- printIndents(indentLevels);
-
-#if COMPILER(CLANG) || (COMPILER(GCC) && GCC_VERSION_AT_LEAST(4, 6, 0))
-#pragma GCC diagnostic push
-#pragma GCC diagnostic ignored "-Wformat-nonliteral"
-#pragma GCC diagnostic ignored "-Wmissing-format-attribute"
-#endif
-
- WTF::dataLogFV(format, argList);
-
-#if COMPILER(CLANG) || (COMPILER(GCC) && GCC_VERSION_AT_LEAST(4, 6, 0))
-#pragma GCC diagnostic pop
-#endif
-
- va_end(argList);
-}
-
-void StackVisitor::Frame::print(int indentLevel)
-{
- int i = indentLevel;
-
if (!this->callFrame()) {
- printif(i, "frame 0x0\n");
+ out.print(indent, "frame 0x0\n");
return;
}
CodeBlock* codeBlock = this->codeBlock();
- printif(i, "frame %p {\n", this->callFrame());
+ out.print(indent);
+ prefix(out);
+ out.print("frame ", RawPointer(this->callFrame()), " {\n");
- CallFrame* callFrame = m_callFrame;
- CallFrame* callerFrame = this->callerFrame();
- void* returnPC = callFrame->hasReturnPC() ? callFrame->returnPC().value() : nullptr;
+ {
+ indent++;
+
+ CallFrame* callFrame = m_callFrame;
+ CallFrame* callerFrame = this->callerFrame();
+ void* returnPC = callFrame->hasReturnPC() ? callFrame->returnPC().value() : nullptr;
- printif(i, " name '%s'\n", functionName().utf8().data());
- printif(i, " sourceURL '%s'\n", sourceURL().utf8().data());
- printif(i, " isVMEntrySentinel %d\n", callerFrame->isVMEntrySentinel());
+ out.print(indent, "name: ", functionName(), "\n");
+ out.print(indent, "sourceURL: ", sourceURL(), "\n");
+ bool isInlined = false;
#if ENABLE(DFG_JIT)
- printif(i, " isInlinedFrame %d\n", isInlinedFrame());
- if (isInlinedFrame())
- printif(i, " InlineCallFrame %p\n", m_inlineCallFrame);
+ isInlined = isInlinedFrame();
+ out.print(indent, "isInlinedFrame: ", isInlinedFrame(), "\n");
+ if (isInlinedFrame())
+ out.print(indent, "InlineCallFrame: ", RawPointer(m_inlineCallFrame), "\n");
#endif
- printif(i, " callee %p\n", callee());
- printif(i, " returnPC %p\n", returnPC);
- printif(i, " callerFrame %p\n", callerFrame);
- unsigned locationRawBits = callFrame->locationAsRawBits();
- printif(i, " rawLocationBits %u 0x%x\n", locationRawBits, locationRawBits);
- printif(i, " codeBlock %p\n", codeBlock);
- if (codeBlock) {
- JITCode::JITType jitType = codeBlock->jitType();
- if (callFrame->hasLocationAsBytecodeOffset()) {
- unsigned bytecodeOffset = callFrame->locationAsBytecodeOffset();
- printif(i, " bytecodeOffset %u %p / %zu\n", bytecodeOffset, reinterpret_cast<void*>(bytecodeOffset), codeBlock->instructions().size());
+ out.print(indent, "callee: ", RawPointer(callee()), "\n");
+ out.print(indent, "returnPC: ", RawPointer(returnPC), "\n");
+ out.print(indent, "callerFrame: ", RawPointer(callerFrame), "\n");
+ unsigned locationRawBits = callFrame->callSiteAsRawBits();
+ out.print(indent, "rawLocationBits: ", static_cast<uintptr_t>(locationRawBits),
+ " ", RawPointer(reinterpret_cast<void*>(locationRawBits)), "\n");
+ out.print(indent, "codeBlock: ", RawPointer(codeBlock));
+ if (codeBlock)
+ out.print(" ", *codeBlock);
+ out.print("\n");
+ if (codeBlock && !isInlined) {
+ indent++;
+
+ if (callFrame->callSiteBitsAreBytecodeOffset()) {
+ unsigned bytecodeOffset = callFrame->bytecodeOffset();
+ out.print(indent, "bytecodeOffset: ", bytecodeOffset, " of ", codeBlock->instructions().size(), "\n");
#if ENABLE(DFG_JIT)
- } else {
- unsigned codeOriginIndex = callFrame->locationAsCodeOriginIndex();
- printif(i, " codeOriginIdex %u %p / %zu\n", codeOriginIndex, reinterpret_cast<void*>(codeOriginIndex), codeBlock->codeOrigins().size());
+ } else {
+ out.print(indent, "hasCodeOrigins: ", codeBlock->hasCodeOrigins(), "\n");
+ if (codeBlock->hasCodeOrigins()) {
+ CallSiteIndex callSiteIndex = callFrame->callSiteIndex();
+ out.print(indent, "callSiteIndex: ", callSiteIndex.bits(), " of ", codeBlock->codeOrigins().size(), "\n");
+
+ JITCode::JITType jitType = codeBlock->jitType();
+ if (jitType != JITCode::FTLJIT) {
+ JITCode* jitCode = codeBlock->jitCode().get();
+ out.print(indent, "jitCode: ", RawPointer(jitCode),
+ " start ", RawPointer(jitCode->start()),
+ " end ", RawPointer(jitCode->end()), "\n");
+ }
+ }
#endif
+ }
+ unsigned line = 0;
+ unsigned column = 0;
+ computeLineAndColumn(line, column);
+ out.print(indent, "line: ", line, "\n");
+ out.print(indent, "column: ", column, "\n");
+
+ indent--;
}
- unsigned line = 0;
- unsigned column = 0;
- computeLineAndColumn(line, column);
- printif(i, " line %d\n", line);
- printif(i, " column %d\n", column);
- printif(i, " jitType %d <%s> isOptimizingJIT %d\n", jitType, jitTypeName(jitType), JITCode::isOptimizingJIT(jitType));
-#if ENABLE(DFG_JIT)
- printif(i, " hasCodeOrigins %d\n", codeBlock->hasCodeOrigins());
- if (codeBlock->hasCodeOrigins()) {
- JITCode* jitCode = codeBlock->jitCode().get();
- printif(i, " jitCode %p start %p end %p\n", jitCode, jitCode->start(), jitCode->end());
- }
-#endif
+ out.print(indent, "vmEntryFrame: ", RawPointer(vmEntryFrame()), "\n");
+ indent--;
}
- printif(i, "}\n");
+ out.print(indent, "}\n");
}
-#endif // NDEBUG
-
} // namespace JSC
-
-#ifndef NDEBUG
-using JSC::StackVisitor;
-
-// For debugging use
-JS_EXPORT_PRIVATE void debugPrintCallFrame(JSC::CallFrame*);
-JS_EXPORT_PRIVATE void debugPrintStack(JSC::CallFrame* topCallFrame);
-
-class DebugPrintFrameFunctor {
-public:
- enum Action {
- PrintOne,
- PrintAll
- };
-
- DebugPrintFrameFunctor(Action action)
- : m_action(action)
- {
- }
-
- StackVisitor::Status operator()(StackVisitor& visitor)
- {
- visitor->print(2);
- return m_action == PrintAll ? StackVisitor::Continue : StackVisitor::Done;
- }
-
-private:
- Action m_action;
-};
-
-void debugPrintCallFrame(JSC::CallFrame* callFrame)
-{
- if (!callFrame)
- return;
- DebugPrintFrameFunctor functor(DebugPrintFrameFunctor::PrintOne);
- callFrame->iterate(functor);
-}
-
-void debugPrintStack(JSC::CallFrame* topCallFrame)
-{
- if (!topCallFrame)
- return;
- DebugPrintFrameFunctor functor(DebugPrintFrameFunctor::PrintAll);
- topCallFrame->iterate(functor);
-}
-
-#endif // !NDEBUG
diff --git a/Source/JavaScriptCore/interpreter/StackVisitor.h b/Source/JavaScriptCore/interpreter/StackVisitor.h
index 990a226b3..81a9c7b74 100644
--- a/Source/JavaScriptCore/interpreter/StackVisitor.h
+++ b/Source/JavaScriptCore/interpreter/StackVisitor.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2013, 2015-2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -23,9 +23,11 @@
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef StackVisitor_h
-#define StackVisitor_h
+#pragma once
+#include "VMEntryRecord.h"
+#include <functional>
+#include <wtf/Indenter.h>
#include <wtf/text/WTFString.h>
namespace JSC {
@@ -33,13 +35,13 @@ namespace JSC {
struct CodeOrigin;
struct InlineCallFrame;
-class Arguments;
class CodeBlock;
class ExecState;
+class JSCell;
class JSFunction;
-class JSObject;
-class JSScope;
+class ClonedArguments;
class Register;
+class RegisterAtOffsetList;
typedef ExecState CallFrame;
@@ -51,58 +53,70 @@ public:
Global,
Eval,
Function,
- Native
+ Module,
+ Native,
+ Wasm
};
size_t index() const { return m_index; }
size_t argumentCountIncludingThis() const { return m_argumentCountIncludingThis; }
+ bool callerIsVMEntryFrame() const { return m_callerIsVMEntryFrame; }
CallFrame* callerFrame() const { return m_callerFrame; }
- JSObject* callee() const { return m_callee; }
- JSScope* scope() const { return m_scope; }
+ JSCell* callee() const { return m_callee; }
CodeBlock* codeBlock() const { return m_codeBlock; }
unsigned bytecodeOffset() const { return m_bytecodeOffset; }
+ InlineCallFrame* inlineCallFrame() const {
#if ENABLE(DFG_JIT)
- InlineCallFrame* inlineCallFrame() const { return m_inlineCallFrame; }
+ return m_inlineCallFrame;
+#else
+ return nullptr;
#endif
+ }
- bool isJSFrame() const { return !!codeBlock(); }
-#if ENABLE(DFG_JIT)
- bool isInlinedFrame() const { return !!m_inlineCallFrame; }
-#endif
+ bool isNativeFrame() const { return !codeBlock() && !isWasmFrame(); }
+ bool isInlinedFrame() const { return !!inlineCallFrame(); }
+ bool isWasmFrame() const;
+
+ JS_EXPORT_PRIVATE String functionName() const;
+ JS_EXPORT_PRIVATE String sourceURL() const;
+ JS_EXPORT_PRIVATE String toString() const;
- JS_EXPORT_PRIVATE String functionName();
- JS_EXPORT_PRIVATE String sourceURL();
- JS_EXPORT_PRIVATE String toString();
+ intptr_t sourceID();
CodeType codeType() const;
- JS_EXPORT_PRIVATE void computeLineAndColumn(unsigned& line, unsigned& column);
+ bool hasLineAndColumnInfo() const;
+ JS_EXPORT_PRIVATE void computeLineAndColumn(unsigned& line, unsigned& column) const;
- Arguments* createArguments();
- Arguments* existingArguments();
+ RegisterAtOffsetList* calleeSaveRegisters();
+
+ ClonedArguments* createArguments();
+ VMEntryFrame* vmEntryFrame() const { return m_VMEntryFrame; }
CallFrame* callFrame() const { return m_callFrame; }
-#ifndef NDEBUG
- JS_EXPORT_PRIVATE void print(int indentLevel);
-#endif
+ void dump(PrintStream&, Indenter = Indenter()) const;
+ void dump(PrintStream&, Indenter, std::function<void(PrintStream&)> prefix) const;
private:
Frame() { }
~Frame() { }
- void retrieveExpressionInfo(int& divot, int& startOffset, int& endOffset, unsigned& line, unsigned& column);
+ void retrieveExpressionInfo(int& divot, int& startOffset, int& endOffset, unsigned& line, unsigned& column) const;
void setToEnd();
- size_t m_index;
- size_t m_argumentCountIncludingThis;
- CallFrame* m_callerFrame;
- JSObject* m_callee;
- JSScope* m_scope;
- CodeBlock* m_codeBlock;
- unsigned m_bytecodeOffset;
#if ENABLE(DFG_JIT)
InlineCallFrame* m_inlineCallFrame;
#endif
CallFrame* m_callFrame;
+ VMEntryFrame* m_VMEntryFrame;
+ VMEntryFrame* m_CallerVMEntryFrame;
+ CallFrame* m_callerFrame;
+ JSCell* m_callee;
+ CodeBlock* m_codeBlock;
+ size_t m_index;
+ size_t m_argumentCountIncludingThis;
+ unsigned m_bytecodeOffset;
+ bool m_callerIsVMEntryFrame : 1;
+ bool m_isWasmFrame : 1;
friend class StackVisitor;
};
@@ -113,10 +127,10 @@ public:
};
// StackVisitor::visit() expects a Functor that implements the following method:
- // Status operator()(StackVisitor&);
+ // Status operator()(StackVisitor&) const;
template <typename Functor>
- static void visit(CallFrame* startFrame, Functor& functor)
+ static void visit(CallFrame* startFrame, const Functor& functor)
{
StackVisitor visitor(startFrame);
while (visitor->callFrame()) {
@@ -129,6 +143,7 @@ public:
Frame& operator*() { return m_frame; }
ALWAYS_INLINE Frame* operator->() { return &m_frame; }
+ void unwindToMachineCodeBlockFrame();
private:
JS_EXPORT_PRIVATE StackVisitor(CallFrame* startFrame);
@@ -144,7 +159,30 @@ private:
Frame m_frame;
};
-} // namespace JSC
+class CallerFunctor {
+public:
+ CallerFunctor()
+ : m_hasSkippedFirstFrame(false)
+ , m_callerFrame(0)
+ {
+ }
+
+ CallFrame* callerFrame() const { return m_callerFrame; }
-#endif // StackVisitor_h
+ StackVisitor::Status operator()(StackVisitor& visitor) const
+ {
+ if (!m_hasSkippedFirstFrame) {
+ m_hasSkippedFirstFrame = true;
+ return StackVisitor::Continue;
+ }
+ m_callerFrame = visitor->callFrame();
+ return StackVisitor::Done;
+ }
+
+private:
+ mutable bool m_hasSkippedFirstFrame;
+ mutable CallFrame* m_callerFrame;
+};
+
+} // namespace JSC
diff --git a/Source/JavaScriptCore/interpreter/VMEntryRecord.h b/Source/JavaScriptCore/interpreter/VMEntryRecord.h
new file mode 100644
index 000000000..f36c2bed4
--- /dev/null
+++ b/Source/JavaScriptCore/interpreter/VMEntryRecord.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2014, 2016 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#include "GPRInfo.h"
+
+namespace JSC {
+
+struct VMEntryFrame;
+class ExecState;
+class VM;
+
+struct VMEntryRecord {
+ /*
+ * This record stored in a vmEntryTo{JavaScript,Host} allocated frame. It is allocated on the stack
+ * after callee save registers where local variables would go.
+ */
+ VM* m_vm;
+ ExecState* m_prevTopCallFrame;
+ VMEntryFrame* m_prevTopVMEntryFrame;
+
+#if ENABLE(JIT) && NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
+ intptr_t calleeSaveRegistersBuffer[NUMBER_OF_CALLEE_SAVES_REGISTERS];
+#endif
+
+ ExecState* prevTopCallFrame() { return m_prevTopCallFrame; }
+ SUPPRESS_ASAN ExecState* unsafePrevTopCallFrame() { return m_prevTopCallFrame; }
+
+ VMEntryFrame* prevTopVMEntryFrame() { return m_prevTopVMEntryFrame; }
+ SUPPRESS_ASAN VMEntryFrame* unsafePrevTopVMEntryFrame() { return m_prevTopVMEntryFrame; }
+};
+
+extern "C" VMEntryRecord* vmEntryRecord(VMEntryFrame*);
+
+struct VMEntryFrame {
+#if ENABLE(JIT) && NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
+ static ptrdiff_t vmEntryRecordOffset()
+ {
+ VMEntryFrame* fakeVMEntryFrame = reinterpret_cast<VMEntryFrame*>(0x1000);
+ VMEntryRecord* record = vmEntryRecord(fakeVMEntryFrame);
+ return static_cast<ptrdiff_t>(
+ reinterpret_cast<char*>(record) - reinterpret_cast<char*>(fakeVMEntryFrame));
+ }
+
+ static ptrdiff_t calleeSaveRegistersBufferOffset()
+ {
+ return vmEntryRecordOffset() + OBJECT_OFFSETOF(VMEntryRecord, calleeSaveRegistersBuffer);
+ }
+#endif
+};
+
+} // namespace JSC
diff --git a/Source/JavaScriptCore/interpreter/VMInspector.cpp b/Source/JavaScriptCore/interpreter/VMInspector.cpp
deleted file mode 100644
index fbb49413d..000000000
--- a/Source/JavaScriptCore/interpreter/VMInspector.cpp
+++ /dev/null
@@ -1,572 +0,0 @@
-/*
- * Copyright (C) 2012 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "VMInspector.h"
-
-#if ENABLE(VMINSPECTOR)
-
-#include <wtf/ASCIICType.h>
-#include <wtf/text/WTFString.h>
-
-namespace JSC {
-
-const char* VMInspector::getTypeName(JSValue value)
-{
- if (value.isInt32())
- return "<Int32>";
- if (value.isBoolean())
- return "<Boolean>";
- if (value.isNull())
- return "<Empty>";
- if (value.isUndefined())
- return "<Undefined>";
- if (value.isCell())
- return "<Cell>";
- if (value.isEmpty())
- return "<Empty>";
- return "";
-}
-
-void VMInspector::dumpFrame0(CallFrame* frame)
-{
- dumpFrame(frame, 0, 0, 0, 0);
-}
-
-void VMInspector::dumpFrame(CallFrame* frame, const char* prefix,
- const char* funcName, const char* file, int line)
-{
- int frameCount = VMInspector::countFrames(frame);
- if (frameCount < 0)
- return;
-
- Instruction* vPC = 0;
- if (frame->codeBlock())
- vPC = frame->currentVPC();
-
- #define CAST reinterpret_cast
-
- if (prefix)
- printf("%s ", prefix);
-
- printf("frame [%d] %p { cb %p:%s, retPC %p:%s, scope %p:%s, callee %p:%s, callerFrame %p:%s, argc %d, vPC %p }",
- frameCount, frame,
- CAST<void*>(frame[JSStack::CodeBlock].payload()),
- getTypeName(frame[JSStack::CodeBlock].jsValue()),
- CAST<void*>(frame[JSStack::ReturnPC].payload()),
- getTypeName(frame[JSStack::ReturnPC].jsValue()),
- CAST<void*>(frame[JSStack::ScopeChain].payload()),
- getTypeName(frame[JSStack::ScopeChain].jsValue()),
- CAST<void*>(frame[JSStack::Callee].payload()),
- getTypeName(frame[JSStack::Callee].jsValue()),
- CAST<void*>(frame[JSStack::CallerFrame].callFrame()),
- getTypeName(frame[JSStack::CallerFrame].jsValue()),
- frame[JSStack::ArgumentCount].payload(),
- vPC);
-
- if (funcName || file || (line >= 0)) {
- printf(" @");
- if (funcName)
- printf(" %s", funcName);
- if (file)
- printf(" %s", file);
- if (line >= 0)
- printf(":%d", line);
- }
- printf("\n");
-}
-
-int VMInspector::countFrames(CallFrame* frame)
-{
- int count = -1;
- while (frame && !frame->isVMEntrySentinel()) {
- count++;
- frame = frame->callerFrame();
- }
- return count;
-}
-
-
-//============================================================================
-// class FormatPrinter
-// - implements functionality to support fprintf.
-//
-// The FormatPrinter classes do the real formatting and printing.
-// By default, the superclass FormatPrinter will print to stdout (printf).
-// Each of the subclass will implement the other ...printf() options.
-// The subclasses are:
-//
-// FileFormatPrinter - fprintf
-// StringFormatPrinter - sprintf
-// StringNFormatPrinter - snprintf
-
-class FormatPrinter {
-public:
- virtual ~FormatPrinter() { }
-
- void print(const char* format, va_list args);
-
-protected:
- // Low level printers:
- bool printArg(const char* format, ...);
- virtual bool printArg(const char* format, va_list args);
-
- // JS type specific printers:
- void printWTFString(va_list args, bool verbose);
-};
-
-
-// The public print() function is the real workhorse behind the printf
-// family of functions. print() deciphers the % formatting, translate them
-// to primitive formats, and dispatches to underlying printArg() functions
-// to do the printing.
-//
-// The non-public internal printArg() function is virtual and is responsible
-// for handling the variations between printf, fprintf, sprintf, and snprintf.
-
-void FormatPrinter::print(const char* format, va_list args)
-{
- const char* p = format;
- const char* errorStr;
-
- // buffer is only used for 2 purposes:
- // 1. To temporarily hold a copy of normal chars (not needing formatting)
- // to be passed to printArg() and printed.
- //
- // The incoming format string may contain a string of normal chars much
- // longer than 128, but we handle this by breaking them out to 128 chars
- // fragments and printing each fragment before re-using the buffer to
- // load up the next fragment.
- //
- // 2. To hold a single "%..." format to be passed to printArg() to process
- // a single va_arg.
-
- char buffer[129]; // 128 chars + null terminator.
- char* end = &buffer[sizeof(buffer) - 1];
- const char* startOfFormatSpecifier = 0;
-
- while (true) {
- char c = *p++;
- char* curr = buffer;
-
- // Print leading normal chars:
- while (c != '\0' && c != '%') {
- *curr++ = c;
- if (curr == end) {
- // Out of buffer space. Flush the fragment, and start over.
- *curr = '\0';
- bool success = printArg("%s", buffer);
- if (!success) {
- errorStr = buffer;
- goto handleError;
- }
- curr = buffer;
- }
- c = *p++;
- }
- // If we have stuff in the buffer, flush the fragment:
- if (curr != buffer) {
- ASSERT(curr < end + 1);
- *curr = '\0';
- bool success = printArg("%s", buffer);
- if (!success) {
- errorStr = buffer;
- goto handleError;
- }
- }
-
- // End if there are not more chars to print:
- if (c == '\0')
- break;
-
- // If we get here, we've must have seen a '%':
- startOfFormatSpecifier = p - 1;
- ASSERT(*startOfFormatSpecifier == '%');
- c = *p++;
-
- // Check for "%%" case:
- if (c == '%') {
- bool success = printArg("%c", '%');
- if (!success) {
- errorStr = p - 2;
- goto handleError;
- }
- continue;
- }
-
- // Check for JS (%J<x>) formatting extensions:
- if (c == 'J') {
- bool verbose = false;
-
- c = *p++;
- if (UNLIKELY(c == '\0')) {
- errorStr = p - 2; // Rewind to % in "%J\0"
- goto handleError;
- }
-
- if (c == '+') {
- verbose = true;
- c= *p++;
- if (UNLIKELY(c == '\0')) {
- errorStr = p - 3; // Rewind to % in "%J+\0"
- goto handleError;
- }
- }
-
- switch (c) {
- // %Js - WTF::String*
- case 's': {
- printWTFString(args, verbose);
- continue;
- }
- } // END switch.
-
- // Check for non-JS extensions:
- } else if (c == 'b') {
- int value = va_arg(args, int);
- printArg("%s", value ? "TRUE" : "FALSE");
- continue;
- }
-
- // If we didn't handle the format in one of the above cases,
- // rewind p and let the standard formatting check handle it
- // if possible:
- p = startOfFormatSpecifier;
- ASSERT(*p == '%');
-
- // Check for standard formatting:
- // A format specifier always starts with a % and ends with some
- // alphabet. We'll do the simple thing and scan until the next
- // alphabet, or the end of string.
-
- // In the following, we're going to use buffer as storage for a copy
- // of a single format specifier. Hence, conceptually, we can think of
- // 'buffer' as synonymous with 'argFormat' here:
-
-#define ABORT_IF_FORMAT_TOO_LONG(curr) \
- do { \
- if (UNLIKELY(curr >= end)) \
- goto formatTooLong; \
- } while (false)
-
- curr = buffer;
- *curr++ = *p++; // Output the first % in the format specifier.
- c = *p++; // Grab the next char in the format specifier.
-
- // Checks for leading modifiers e.g. "%-d":
- // 0, -, ' ', +, '\''
- if (c == '0' || c == '-' || c == ' ' || c == '+' || c == '\'' || c == '#') {
- ABORT_IF_FORMAT_TOO_LONG(curr);
- *curr++ = c;
- c = *p++;
- }
-
- // Checks for decimal digit field width modifiers e.g. "%2f":
- while (c >= '0' && c <= '9') {
- ABORT_IF_FORMAT_TOO_LONG(curr);
- *curr++ = c;
- c = *p++;
- }
-
- // Checks for '.' e.g. "%2.f":
- if (c == '.') {
- ABORT_IF_FORMAT_TOO_LONG(curr);
- *curr++ = c;
- c = *p++;
-
- // Checks for decimal digit precision modifiers e.g. "%.2f":
- while (c >= '0' && c <= '9') {
- ABORT_IF_FORMAT_TOO_LONG(curr);
- *curr++ = c;
- c = *p++;
- }
- }
-
- // Checks for the modifier <m> where <m> can be:
- // l, h, j, t, z
- // e.g. "%ld"
- if (c == 'l' || c == 'h' || c == 'j' || c == 't' || c == 'z' || c == 'L') {
- ABORT_IF_FORMAT_TOO_LONG(curr);
- *curr++ = c;
- char prevChar = c;
- c = *p++;
-
- // Checks for the modifier ll or hh in %<x><m>:
- if ((prevChar == 'l' || prevChar == 'h') && c == prevChar) {
- ABORT_IF_FORMAT_TOO_LONG(curr);
- *curr++ = c;
- c = *p++;
- }
- }
-
- // Checks for %<x> where <x> can be:
- // d, i, n, o, u, x, X
- // But hey, we're just going to do the simple thing and allow any
- // alphabet. The user is expected to pass correct format specifiers.
- // We won't do any format checking here. We'll just pass it on, and the
- // underlying ...printf() implementation may do the needed checking
- // at its discretion.
- while (c != '\0' && !isASCIIAlpha(c)) {
- ABORT_IF_FORMAT_TOO_LONG(curr);
- *curr++ = c;
- c = *p++;
- }
-
- ABORT_IF_FORMAT_TOO_LONG(curr);
- *curr++ = c;
- if (c == '\0') {
- // Uh oh. Bad format. We should have gotten an alphabet instead.
- // Print the supposed format as a string instead:
- errorStr = buffer;
- goto handleError;
- }
-
- // Otherwise, we have the alpha that terminates the format.
- // Terminate the buffer (i.e. argFormat) string:
- ASSERT(isASCIIAlpha(c));
- ABORT_IF_FORMAT_TOO_LONG(curr);
- *curr = '\0';
-
- bool success = printArg(buffer, args);
- if (!success) {
- errorStr = buffer;
- goto handleError;
- }
- }
-#undef ABORT_IF_FORMAT_TOO_LONG
-
- return;
-
-formatTooLong:
- // Print the error string:
- ASSERT(!!startOfFormatSpecifier);
- p = startOfFormatSpecifier;
- ASSERT(p >= format);
- printArg("ERROR @ Format too long at \"%s\"\n", p);
- return;
-
-handleError:
- // We've got an error. Can't do any more work. Print an error message if
- // possible and then just return.
-
- // The errorStr may be pointing into the middle of buffer, or the original
- // format string. Move the string to buffer for consistency, and also so
- // that we can strip it of newlines below.
- if (errorStr != buffer) {
- size_t length = strlen(errorStr);
- if (length > sizeof(buffer) - 1)
- length = sizeof(buffer) - 1;
- memmove(buffer, errorStr, length);
- buffer[length] = '\0'; // Terminate the moved error string.
- }
- // Strip the newlines:
- char* cp = buffer;
- while (*cp) {
- if (*cp == '\n' || *cp == '\r')
- *cp = ' ';
- cp++;
- }
- // Print the error string:
- printArg("ERROR @ \"%s\"\n", buffer);
-}
-
-
-bool FormatPrinter::printArg(const char* format, ...)
-{
- va_list args;
- va_start(args, format);
- bool success = printArg(format, args);
- va_end(args);
- return success;
-}
-
-bool FormatPrinter::printArg(const char* format, va_list args)
-{
- int count = ::vprintf(format, args);
- return (count >= 0); // Fail if less than 0 chars printed.
-}
-
-
-// %Js - WTF::String*
-// verbose mode prints: WTF::String "<your string>"
-void FormatPrinter::printWTFString(va_list args, bool verbose)
-{
- const String* str = va_arg(args, const String*);
-
- // Print verbose header if appropriate:
- if (verbose)
- printArg("WTF::String \"");
-
- // Print the string itself:
- if (!str->isEmpty()) {
- if (str->is8Bit()) {
- const LChar* chars = str->characters8();
- printArg("%s", reinterpret_cast<const char*>(chars));
- } else {
- const UChar* chars = str->characters16();
- printArg("%S", reinterpret_cast<const wchar_t*>(chars));
- }
- }
-
- // Print verbose footer if appropriate:
- if (verbose)
- printArg("\"");
-}
-
-
-//============================================================================
-// class FileFormatPrinter
-// - implements functionality to support fprintf.
-
-class FileFormatPrinter: public FormatPrinter {
-public:
- FileFormatPrinter(FILE*);
-private:
- virtual bool printArg(const char* format, va_list args);
-
- FILE* m_file;
-};
-
-FileFormatPrinter::FileFormatPrinter(FILE* file)
- : m_file(file)
-{
-}
-
-bool FileFormatPrinter::printArg(const char* format, va_list args)
-{
- int count = ::vfprintf(m_file, format, args);
- return (count >= 0); // Fail if less than 0 chars printed.
-}
-
-
-//============================================================================
-// class StringFormatPrinter
-// - implements functionality to support sprintf.
-
-class StringFormatPrinter: public FormatPrinter {
-public:
- StringFormatPrinter(char* buffer);
-private:
- virtual bool printArg(const char* format, va_list args);
-
- char* m_buffer;
-};
-
-StringFormatPrinter::StringFormatPrinter(char* buffer)
- : m_buffer(buffer)
-{
-}
-
-bool StringFormatPrinter::printArg(const char* format, va_list args)
-{
- int count = ::vsprintf(m_buffer, format, args);
- m_buffer += count;
- return (count >= 0); // Fail if less than 0 chars printed.
-}
-
-
-//============================================================================
-// class StringNFormatPrinter
-// - implements functionality to support snprintf.
-
-class StringNFormatPrinter: public FormatPrinter {
-public:
- StringNFormatPrinter(char* buffer, size_t);
-private:
- virtual bool printArg(const char* format, va_list args);
-
- char* m_buffer;
- size_t m_size;
-};
-
-
-StringNFormatPrinter::StringNFormatPrinter(char* buffer, size_t size)
- : m_buffer(buffer)
- , m_size(size)
-{
-}
-
-bool StringNFormatPrinter::printArg(const char* format, va_list args)
-{
- if (m_size > 0) {
- int count = ::vsnprintf(m_buffer, m_size, format, args);
-
- // According to vsnprintf specs, ...
- bool success = (count >= 0);
- if (static_cast<size_t>(count) >= m_size) {
- // If count > size, then we didn't have enough buffer space.
- count = m_size;
- }
-
- // Adjust the buffer to what's left if appropriate:
- if (success) {
- m_buffer += count;
- m_size -= count;
- }
- return success;
- }
- // No more room to print. Declare it a fail:
- return false;
-}
-
-
-//============================================================================
-// VMInspector printf family of methods:
-
-void VMInspector::fprintf(FILE* file, const char* format, ...)
-{
- va_list args;
- va_start(args, format);
- FileFormatPrinter(file).print(format, args);
- va_end(args);
-}
-
-void VMInspector::printf(const char* format, ...)
-{
- va_list args;
- va_start(args, format);
- FormatPrinter().print(format, args);
- va_end(args);
-}
-
-void VMInspector::sprintf(char* buffer, const char* format, ...)
-{
- va_list args;
- va_start(args, format);
- StringFormatPrinter(buffer).print(format, args);
- va_end(args);
-}
-
-void VMInspector::snprintf(char* buffer, size_t size, const char* format, ...)
-{
- va_list args;
- va_start(args, format);
- StringNFormatPrinter(buffer, size).print(format, args);
- va_end(args);
-}
-
-} // namespace JSC
-
-#endif // ENABLE(VMINSPECTOR)
diff --git a/Source/JavaScriptCore/interpreter/VMInspector.h b/Source/JavaScriptCore/interpreter/VMInspector.h
deleted file mode 100644
index 6623068dc..000000000
--- a/Source/JavaScriptCore/interpreter/VMInspector.h
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (C) 2012 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef VMInspector_h
-#define VMInspector_h
-
-#define ENABLE_VMINSPECTOR 0
-
-#include "CallFrame.h"
-#include "JSCJSValue.h"
-#include <stdarg.h>
-#include <stdio.h>
-#include <wtf/text/WTFString.h>
-
-namespace JSC {
-
-#if ENABLE(VMINSPECTOR)
-
-class VMInspector {
-public:
- static JS_EXPORT_PRIVATE const char* getTypeName(JSValue);
- static JS_EXPORT_PRIVATE void dumpFrame0(CallFrame*);
- static JS_EXPORT_PRIVATE void dumpFrame(CallFrame*, const char* prefix = 0, const char* funcName = 0, const char* file = 0, int line = -1);
- static JS_EXPORT_PRIVATE int countFrames(CallFrame*);
-
- // Special family of ...printf() functions that support, in addition to the
- // standard % formats (e.g. %d, %s, etc), the following extra JSC formatting
- // options, %J<x>, where <x> consists of:
- //
- // + - verbose mode modifier.
- // Used in combination with other options. Must come after the %J.
- // s - WTF::String*
- //
- // Examples of usage:
- //
- // WTF::String str("My WTF String");
- //
- // // Printing the string. Will print:
- // // The wtf string says: "My WTF String" and is NOT EMPTY.
- //
- // VMInspector::printf("The wtf string says: \"%Js\" and is %s\n",
- // &str, str.isEmpty()?"EMPTY":"NOT EMPTY");
- //
- // // Printing the string with verbose mode. Will print:
- // // <WTF::String "My WTF String">
- //
- // VMInspector::printf("<%J+s>\n", &str);
- //
- // Also added some convenience non-JS formats:
- //
- // %b - boolean (va_args will look for an int).
- // Prints TRUE if non-zero, else prints FALSE.
- //
- // Caution: the user is expected to pass the correctly matched arguments
- // to pair with the corresponding % fomatting.
-
- static JS_EXPORT_PRIVATE void fprintf(FILE*, const char* format, ...);
- static JS_EXPORT_PRIVATE void printf(const char* format, ...);
- static JS_EXPORT_PRIVATE void sprintf(char*, const char* format, ...);
- static JS_EXPORT_PRIVATE void snprintf(char*, size_t, const char* format, ...);
-};
-
-#endif // ENABLE(VMINSPECTOR)
-
-} // namespace JSC
-
-#endif // VMInspector.h