summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/runtime/VM.cpp
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
committerLorry Tar Creator <lorry-tar-importer@lorry>2017-06-27 06:07:23 +0000
commit1bf1084f2b10c3b47fd1a588d85d21ed0eb41d0c (patch)
tree46dcd36c86e7fbc6e5df36deb463b33e9967a6f7 /Source/JavaScriptCore/runtime/VM.cpp
parent32761a6cee1d0dee366b885b7b9c777e67885688 (diff)
downloadWebKitGtk-tarball-master.tar.gz
Diffstat (limited to 'Source/JavaScriptCore/runtime/VM.cpp')
-rw-r--r--Source/JavaScriptCore/runtime/VM.cpp894
1 files changed, 519 insertions, 375 deletions
diff --git a/Source/JavaScriptCore/runtime/VM.cpp b/Source/JavaScriptCore/runtime/VM.cpp
index d7e50ed61..a50f9db0a 100644
--- a/Source/JavaScriptCore/runtime/VM.cpp
+++ b/Source/JavaScriptCore/runtime/VM.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008, 2011, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -10,7 +10,7 @@
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
- * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * 3. Neither the name of Apple Inc. ("Apple") nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
@@ -31,54 +31,94 @@
#include "ArgList.h"
#include "ArrayBufferNeuteringWatchpoint.h"
-#include "CallFrameInlines.h"
+#include "BuiltinExecutables.h"
+#include "BytecodeIntrinsicRegistry.h"
#include "CodeBlock.h"
#include "CodeCache.h"
#include "CommonIdentifiers.h"
+#include "CommonSlowPaths.h"
+#include "CustomGetterSetter.h"
#include "DFGLongLivedState.h"
#include "DFGWorklist.h"
-#include "DebuggerActivation.h"
+#include "Disassembler.h"
#include "ErrorInstance.h"
+#include "EvalCodeBlock.h"
+#include "Exception.h"
#include "FTLThunks.h"
+#include "FunctionCodeBlock.h"
#include "FunctionConstructor.h"
#include "GCActivityCallback.h"
#include "GetterSetter.h"
+#include "HasOwnPropertyCache.h"
#include "Heap.h"
#include "HeapIterationScope.h"
+#include "HeapProfiler.h"
#include "HostCallReturnValue.h"
#include "Identifier.h"
#include "IncrementalSweeper.h"
+#include "InferredTypeTable.h"
#include "Interpreter.h"
+#include "JITCode.h"
+#include "JITWorklist.h"
#include "JSAPIValueWrapper.h"
-#include "JSActivation.h"
#include "JSArray.h"
+#include "JSCInlines.h"
+#include "JSFixedArray.h"
#include "JSFunction.h"
#include "JSGlobalObjectFunctions.h"
+#include "JSInternalPromiseDeferred.h"
#include "JSLock.h"
-#include "JSNameScope.h"
-#include "JSNotAnObject.h"
+#include "JSMap.h"
#include "JSPromiseDeferred.h"
-#include "JSPromiseReaction.h"
-#include "JSPropertyNameIterator.h"
+#include "JSPropertyNameEnumerator.h"
+#include "JSScriptFetcher.h"
+#include "JSSourceCode.h"
+#include "JSTemplateRegistryKey.h"
+#include "JSWebAssembly.h"
#include "JSWithScope.h"
+#include "LLIntData.h"
#include "Lexer.h"
#include "Lookup.h"
-#include "MapData.h"
+#include "ModuleProgramCodeBlock.h"
+#include "NativeStdFunctionCell.h"
#include "Nodes.h"
-#include "ParserArena.h"
+#include "Parser.h"
+#include "ProfilerDatabase.h"
+#include "ProgramCodeBlock.h"
+#include "PropertyMapHashTable.h"
#include "RegExpCache.h"
#include "RegExpObject.h"
+#include "RegisterAtOffsetList.h"
+#include "RuntimeType.h"
+#include "SamplingProfiler.h"
+#include "ShadowChicken.h"
#include "SimpleTypedArrayController.h"
#include "SourceProviderCache.h"
+#include "StackVisitor.h"
#include "StrictEvalActivation.h"
#include "StrongInlines.h"
+#include "StructureInlines.h"
+#include "TypeProfiler.h"
+#include "TypeProfilerLog.h"
#include "UnlinkedCodeBlock.h"
+#include "VMEntryScope.h"
+#include "VMInspector.h"
+#include "Watchdog.h"
+#include "WeakGCMapInlines.h"
#include "WeakMapData.h"
+#include <wtf/CurrentTime.h>
#include <wtf/ProcessID.h>
-#include <wtf/RetainPtr.h>
+#include <wtf/SimpleStats.h>
#include <wtf/StringPrintStream.h>
#include <wtf/Threading.h>
#include <wtf/WTFThreadData.h>
+#include <wtf/text/AtomicStringTable.h>
+#include <wtf/text/SymbolRegistry.h>
+
+#if !ENABLE(JIT)
+#include "CLoopStack.h"
+#include "CLoopStackInlines.h"
+#endif
#if ENABLE(DFG_JIT)
#include "ConservativeRoots.h"
@@ -96,28 +136,6 @@ using namespace WTF;
namespace JSC {
-extern const HashTable arrayConstructorTable;
-extern const HashTable arrayPrototypeTable;
-extern const HashTable booleanPrototypeTable;
-extern const HashTable jsonTable;
-extern const HashTable dataViewTable;
-extern const HashTable dateTable;
-extern const HashTable dateConstructorTable;
-extern const HashTable errorPrototypeTable;
-extern const HashTable globalObjectTable;
-extern const HashTable numberConstructorTable;
-extern const HashTable numberPrototypeTable;
-JS_EXPORTDATA extern const HashTable objectConstructorTable;
-extern const HashTable privateNamePrototypeTable;
-extern const HashTable regExpTable;
-extern const HashTable regExpConstructorTable;
-extern const HashTable regExpPrototypeTable;
-extern const HashTable stringConstructorTable;
-#if ENABLE(PROMISES)
-extern const HashTable promisePrototypeTable;
-extern const HashTable promiseConstructorTable;
-#endif
-
// Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
// ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
// just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
@@ -134,20 +152,6 @@ static bool enableAssembler(ExecutableAllocator& executableAllocator)
return false;
}
-#if USE(CF)
-#if COMPILER(GCC) && !COMPILER(CLANG)
- // FIXME: remove this once the EWS have been upgraded to LLVM.
- // Work around a bug of GCC with strict-aliasing.
- RetainPtr<CFStringRef> canUseJITKeyRetain = adoptCF(CFStringCreateWithCString(0 , "JavaScriptCoreUseJIT", kCFStringEncodingMacRoman));
- CFStringRef canUseJITKey = canUseJITKeyRetain.get();
-#else
- CFStringRef canUseJITKey = CFSTR("JavaScriptCoreUseJIT");
-#endif // COMPILER(GCC) && !COMPILER(CLANG)
- RetainPtr<CFTypeRef> canUseJIT = adoptCF(CFPreferencesCopyAppValue(canUseJITKey, kCFPreferencesCurrentApplication));
- if (canUseJIT)
- return kCFBooleanTrue == canUseJIT.get();
-#endif
-
#if USE(CF) || OS(UNIX)
char* canUseJITString = getenv("JavaScriptCoreUseJIT");
return !canUseJITString || atoi(canUseJITString);
@@ -163,35 +167,25 @@ VM::VM(VMType vmType, HeapType heapType)
, executableAllocator(*this)
#endif
, heap(this, heapType)
+ , auxiliarySpace("Auxiliary", heap, AllocatorAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary))
+ , cellSpace("JSCell", heap, AllocatorAttributes(DoesNotNeedDestruction, HeapCell::JSCell))
+ , destructibleCellSpace("Destructible JSCell", heap, AllocatorAttributes(NeedsDestruction, HeapCell::JSCell))
+ , stringSpace("JSString", heap)
+ , destructibleObjectSpace("JSDestructibleObject", heap)
+ , segmentedVariableObjectSpace("JSSegmentedVariableObjectSpace", heap)
, vmType(vmType)
, clientData(0)
+ , topVMEntryFrame(nullptr)
, topCallFrame(CallFrame::noCaller())
- , arrayConstructorTable(adoptPtr(new HashTable(JSC::arrayConstructorTable)))
- , arrayPrototypeTable(adoptPtr(new HashTable(JSC::arrayPrototypeTable)))
- , booleanPrototypeTable(adoptPtr(new HashTable(JSC::booleanPrototypeTable)))
- , dataViewTable(adoptPtr(new HashTable(JSC::dataViewTable)))
- , dateTable(adoptPtr(new HashTable(JSC::dateTable)))
- , dateConstructorTable(adoptPtr(new HashTable(JSC::dateConstructorTable)))
- , errorPrototypeTable(adoptPtr(new HashTable(JSC::errorPrototypeTable)))
- , globalObjectTable(adoptPtr(new HashTable(JSC::globalObjectTable)))
- , jsonTable(adoptPtr(new HashTable(JSC::jsonTable)))
- , numberConstructorTable(adoptPtr(new HashTable(JSC::numberConstructorTable)))
- , numberPrototypeTable(adoptPtr(new HashTable(JSC::numberPrototypeTable)))
- , objectConstructorTable(adoptPtr(new HashTable(JSC::objectConstructorTable)))
- , privateNamePrototypeTable(adoptPtr(new HashTable(JSC::privateNamePrototypeTable)))
- , regExpTable(adoptPtr(new HashTable(JSC::regExpTable)))
- , regExpConstructorTable(adoptPtr(new HashTable(JSC::regExpConstructorTable)))
- , regExpPrototypeTable(adoptPtr(new HashTable(JSC::regExpPrototypeTable)))
- , stringConstructorTable(adoptPtr(new HashTable(JSC::stringConstructorTable)))
-#if ENABLE(PROMISES)
- , promisePrototypeTable(adoptPtr(new HashTable(JSC::promisePrototypeTable)))
- , promiseConstructorTable(adoptPtr(new HashTable(JSC::promiseConstructorTable)))
-#endif
- , identifierTable(vmType == Default ? wtfThreadData().currentIdentifierTable() : createIdentifierTable())
- , propertyNames(new CommonIdentifiers(this))
- , emptyList(new MarkedArgumentBuffer)
- , parserArena(adoptPtr(new ParserArena))
- , keywords(adoptPtr(new Keywords(*this)))
+ , topJSWebAssemblyInstance(nullptr)
+ , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
+ , propertyNames(nullptr)
+ , emptyList(new ArgList)
+ , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
+ , customGetterSetterFunctionMap(*this)
+ , stringCache(*this)
+ , symbolImplToSymbolMap(*this)
+ , prototypeMap(*this)
, interpreter(0)
, jsArrayClassInfo(JSArray::info())
, jsFinalObjectClassInfo(JSFinalObject::info())
@@ -201,8 +195,6 @@ VM::VM(VMType vmType, HeapType heapType)
#if ENABLE(REGEXP_TRACING)
, m_rtTraceList(new RTTraceList())
#endif
- , exclusiveThread(0)
- , m_newStringsSinceLastHashCons(0)
#if ENABLE(ASSEMBLER)
, m_canUseAssembler(enableAssembler(executableAllocator))
#endif
@@ -215,29 +207,29 @@ VM::VM(VMType vmType, HeapType heapType)
#if ENABLE(GC_VALIDATION)
, m_initializingObjectClass(0)
#endif
- , m_stackLimit(0)
-#if USE(SEPARATE_C_AND_JS_STACK)
- , m_jsStackLimit(0)
-#endif
- , m_inDefineOwnProperty(false)
- , m_codeCache(CodeCache::create())
- , m_enabledProfiler(nullptr)
+ , m_stackPointerAtVMEntry(0)
+ , m_codeCache(std::make_unique<CodeCache>())
+ , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
+ , m_typeProfilerEnabledCount(0)
+ , m_controlFlowProfilerEnabledCount(0)
+ , m_shadowChicken(std::make_unique<ShadowChicken>())
{
interpreter = new Interpreter(*this);
StackBounds stack = wtfThreadData().stack();
- setStackLimit(stack.recursionLimit());
+ updateSoftReservedZoneSize(Options::softReservedZoneSize());
+ setLastStackTop(stack.origin());
// Need to be careful to keep everything consistent here
JSLockHolder lock(this);
- IdentifierTable* existingEntryIdentifierTable = wtfThreadData().setCurrentIdentifierTable(identifierTable);
+ AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
+ propertyNames = new CommonIdentifiers(this);
structureStructure.set(*this, Structure::createStructure(*this));
structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
- debuggerActivationStructure.set(*this, DebuggerActivation::createStructure(*this, 0, jsNull()));
terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
- notAnObjectStructure.set(*this, JSNotAnObject::createStructure(*this, 0, jsNull()));
- propertyNameIteratorStructure.set(*this, JSPropertyNameIterator::createStructure(*this, 0, jsNull()));
- getterSetterStructure.set(*this, GetterSetter::createStructure(*this, 0, jsNull()));
+ propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
+ customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
+ scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
@@ -245,84 +237,158 @@ VM::VM(VMType vmType, HeapType heapType)
evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
+#if ENABLE(WEBASSEMBLY)
+ webAssemblyCalleeStructure.set(*this, JSWebAssemblyCallee::createStructure(*this, 0, jsNull()));
+ webAssemblyToJSCalleeStructure.set(*this, WebAssemblyToJSCallee::createStructure(*this, 0, jsNull()));
+ webAssemblyToJSCallee.set(*this, WebAssemblyToJSCallee::create(*this, webAssemblyToJSCalleeStructure.get()));
+#endif
+ moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
+ symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
+ fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
+ sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
+ scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
+ templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
- withScopeStructure.set(*this, JSWithScope::createStructure(*this, 0, jsNull()));
unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
+ unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
- mapDataStructure.set(*this, MapData::createStructure(*this, 0, jsNull()));
weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
+ inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
+ inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
+ inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
+ functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
+ exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
- promiseReactionStructure.set(*this, JSPromiseReaction::createStructure(*this, 0, jsNull()));
+ internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
+ programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
+ moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
+ evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
+ functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
+ hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
+ hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
+ hashMapImplSetStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKey>>::createStructure(*this, 0, jsNull()));
+ hashMapImplMapStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKeyValue>>::createStructure(*this, 0, jsNull()));
+
iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
+ nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
smallStrings.initializeCommonStrings(*this);
- wtfThreadData().setCurrentIdentifierTable(existingEntryIdentifierTable);
+ wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
#if ENABLE(JIT)
- jitStubs = adoptPtr(new JITThunks());
+ jitStubs = std::make_unique<JITThunks>();
+ allCalleeSaveRegisterOffsets = std::make_unique<RegisterAtOffsetList>(RegisterSet::vmCalleeSaveRegisters(), RegisterAtOffsetList::ZeroBased);
#endif
+ arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
#if ENABLE(FTL_JIT)
ftlThunks = std::make_unique<FTL::Thunks>();
#endif // ENABLE(FTL_JIT)
- interpreter->initialize(this->canUseJIT());
+ interpreter->initialize();
#if ENABLE(JIT)
initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
#endif
heap.notifyIsSafeToCollect();
-
+
LLInt::Data::performAssertions(*this);
- if (Options::enableProfiler()) {
- m_perBytecodeProfiler = adoptPtr(new Profiler::Database(*this));
+ if (Options::useProfiler()) {
+ m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
StringPrintStream pathOut;
-#if !OS(WINCE)
const char* profilerPath = getenv("JSC_PROFILER_PATH");
if (profilerPath)
pathOut.print(profilerPath, "/");
-#endif
pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
}
+ callFrameForCatch = nullptr;
+
#if ENABLE(DFG_JIT)
if (canUseJIT())
- dfgState = adoptPtr(new DFG::LongLivedState());
+ dfgState = std::make_unique<DFG::LongLivedState>();
#endif
// Initialize this last, as a free way of asserting that VM initialization itself
// won't use this.
m_typedArrayController = adoptRef(new SimpleTypedArrayController());
+
+ m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
+
+ if (Options::useTypeProfiler())
+ enableTypeProfiler();
+ if (Options::useControlFlowProfiler())
+ enableControlFlowProfiler();
+#if ENABLE(SAMPLING_PROFILER)
+ if (Options::useSamplingProfiler()) {
+ setShouldBuildPCToCodeOriginMapping();
+ Ref<Stopwatch> stopwatch = Stopwatch::create();
+ stopwatch->start();
+ m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
+ if (Options::samplingProfilerPath())
+ m_samplingProfiler->registerForReportAtExit();
+ m_samplingProfiler->start();
+ }
+#endif // ENABLE(SAMPLING_PROFILER)
+
+ if (Options::alwaysGeneratePCToCodeOriginMap())
+ setShouldBuildPCToCodeOriginMapping();
+
+ if (Options::watchdog()) {
+ std::chrono::milliseconds timeoutMillis(Options::watchdog());
+ Watchdog& watchdog = ensureWatchdog();
+ watchdog.setTimeLimit(timeoutMillis);
+ }
+
+ VMInspector::instance().add(this);
}
VM::~VM()
{
+ VMInspector::instance().remove(this);
+
// Never GC, ever again.
heap.incrementDeferralDepth();
+
+#if ENABLE(SAMPLING_PROFILER)
+ if (m_samplingProfiler) {
+ m_samplingProfiler->reportDataToOptionFile();
+ m_samplingProfiler->shutdown();
+ }
+#endif // ENABLE(SAMPLING_PROFILER)
+#if ENABLE(JIT)
+ JITWorklist::instance()->completeAllForVM(*this);
+#endif // ENABLE(JIT)
+
#if ENABLE(DFG_JIT)
// Make sure concurrent compilations are done, but don't install them, since there is
// no point to doing so.
- if (worklist) {
- worklist->waitUntilAllPlansForVMAreReady(*this);
- worklist->removeAllReadyPlansForVM(*this);
+ for (unsigned i = DFG::numberOfWorklists(); i--;) {
+ if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
+ worklist->removeNonCompilingPlansForVM(*this);
+ worklist->waitUntilAllPlansForVMAreReady(*this);
+ worklist->removeAllReadyPlansForVM(*this);
+ }
}
#endif // ENABLE(DFG_JIT)
- // Clear this first to ensure that nobody tries to remove themselves from it.
- m_perBytecodeProfiler.clear();
+ waitForAsynchronousDisassembly();
+ // Clear this first to ensure that nobody tries to remove themselves from it.
+ m_perBytecodeProfiler = nullptr;
+
ASSERT(m_apiLock->currentThreadIsHoldingLock());
m_apiLock->willDestroyVM(this);
heap.lastChanceToFinalize();
@@ -332,33 +398,11 @@ VM::~VM()
interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
#endif
- arrayPrototypeTable->deleteTable();
- arrayConstructorTable->deleteTable();
- booleanPrototypeTable->deleteTable();
- dataViewTable->deleteTable();
- dateTable->deleteTable();
- dateConstructorTable->deleteTable();
- errorPrototypeTable->deleteTable();
- globalObjectTable->deleteTable();
- jsonTable->deleteTable();
- numberConstructorTable->deleteTable();
- numberPrototypeTable->deleteTable();
- objectConstructorTable->deleteTable();
- privateNamePrototypeTable->deleteTable();
- regExpTable->deleteTable();
- regExpConstructorTable->deleteTable();
- regExpPrototypeTable->deleteTable();
- stringConstructorTable->deleteTable();
-#if ENABLE(PROMISES)
- promisePrototypeTable->deleteTable();
- promiseConstructorTable->deleteTable();
-#endif
-
delete emptyList;
delete propertyNames;
if (vmType != Default)
- deleteIdentifierTable(identifierTable);
+ delete m_atomicStringTable;
delete clientData;
delete m_regExpCache;
@@ -372,17 +416,22 @@ VM::~VM()
#endif
}
-PassRefPtr<VM> VM::createContextGroup(HeapType heapType)
+void VM::setLastStackTop(void* lastStackTop)
+{
+ m_lastStackTop = lastStackTop;
+}
+
+Ref<VM> VM::createContextGroup(HeapType heapType)
{
- return adoptRef(new VM(APIContextGroup, heapType));
+ return adoptRef(*new VM(APIContextGroup, heapType));
}
-PassRefPtr<VM> VM::create(HeapType heapType)
+Ref<VM> VM::create(HeapType heapType)
{
- return adoptRef(new VM(Default, heapType));
+ return adoptRef(*new VM(Default, heapType));
}
-PassRefPtr<VM> VM::createLeaked(HeapType heapType)
+Ref<VM> VM::createLeaked(HeapType heapType)
{
return create(heapType);
}
@@ -396,10 +445,8 @@ VM& VM::sharedInstance()
{
GlobalJSLock globalLock;
VM*& instance = sharedInstanceInternal();
- if (!instance) {
+ if (!instance)
instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
- instance->makeUsableFromMultipleThreads();
- }
return *instance;
}
@@ -409,6 +456,40 @@ VM*& VM::sharedInstanceInternal()
return sharedInstance;
}
+Watchdog& VM::ensureWatchdog()
+{
+ if (!m_watchdog) {
+ m_watchdog = adoptRef(new Watchdog());
+
+ // The LLINT peeks into the Watchdog object directly. In order to do that,
+ // the LLINT assumes that the internal shape of a std::unique_ptr is the
+ // same as a plain C++ pointer, and loads the address of Watchdog from it.
+ RELEASE_ASSERT(*reinterpret_cast<Watchdog**>(&m_watchdog) == m_watchdog.get());
+
+ // And if we've previously compiled any functions, we need to revert
+ // them because they don't have the needed polling checks for the watchdog
+ // yet.
+ deleteAllCode(PreventCollectionAndDeleteAllCode);
+ }
+ return *m_watchdog;
+}
+
+HeapProfiler& VM::ensureHeapProfiler()
+{
+ if (!m_heapProfiler)
+ m_heapProfiler = std::make_unique<HeapProfiler>(*this);
+ return *m_heapProfiler;
+}
+
+#if ENABLE(SAMPLING_PROFILER)
+SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
+{
+ if (!m_samplingProfiler)
+ m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
+ return *m_samplingProfiler;
+}
+#endif // ENABLE(SAMPLING_PROFILER)
+
#if ENABLE(JIT)
static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
{
@@ -417,18 +498,20 @@ static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
return charCodeAtThunkGenerator;
case CharAtIntrinsic:
return charAtThunkGenerator;
+ case Clz32Intrinsic:
+ return clz32ThunkGenerator;
case FromCharCodeIntrinsic:
return fromCharCodeThunkGenerator;
case SqrtIntrinsic:
return sqrtThunkGenerator;
- case PowIntrinsic:
- return powThunkGenerator;
case AbsIntrinsic:
return absThunkGenerator;
case FloorIntrinsic:
return floorThunkGenerator;
case CeilIntrinsic:
return ceilThunkGenerator;
+ case TruncIntrinsic:
+ return truncThunkGenerator;
case RoundIntrinsic:
return roundThunkGenerator;
case ExpIntrinsic:
@@ -437,37 +520,40 @@ static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
return logThunkGenerator;
case IMulIntrinsic:
return imulThunkGenerator;
- case ArrayIteratorNextKeyIntrinsic:
- return arrayIteratorNextKeyThunkGenerator;
- case ArrayIteratorNextValueIntrinsic:
- return arrayIteratorNextValueThunkGenerator;
+ case RandomIntrinsic:
+ return randomThunkGenerator;
+ case BoundThisNoArgsFunctionCallIntrinsic:
+ return boundThisNoArgsFunctionCallGenerator;
default:
- return 0;
+ return nullptr;
}
}
-NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor)
-{
- return jitStubs->hostFunctionStub(this, function, constructor);
-}
-NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic)
+#endif // ENABLE(JIT)
+
+NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
{
- ASSERT(canUseJIT());
- return jitStubs->hostFunctionStub(this, function, intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0, intrinsic);
+ return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
}
-#else // !ENABLE(JIT)
-
-NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor)
+NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
{
+#if ENABLE(JIT)
+ if (canUseJIT()) {
+ return jitStubs->hostFunctionStub(
+ this, function, constructor,
+ intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
+ intrinsic, signature, name);
+ }
+#else // ENABLE(JIT)
+ UNUSED_PARAM(intrinsic);
+#endif // ENABLE(JIT)
return NativeExecutable::create(*this,
- MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), function,
- MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), constructor,
- NoIntrinsic);
+ adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
+ adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
+ NoIntrinsic, signature, name);
}
-#endif // !ENABLE(JIT)
-
VM::ClientData::~ClientData()
{
}
@@ -476,44 +562,36 @@ void VM::resetDateCache()
{
localTimeOffsetCache.reset();
cachedDateString = String();
- cachedDateStringValue = QNaN;
+ cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
dateInstanceCache.reset();
}
-void VM::startSampling()
-{
- interpreter->startSampling();
-}
-
-void VM::stopSampling()
-{
- interpreter->stopSampling();
-}
-
-void VM::prepareToDiscardCode()
+void VM::whenIdle(std::function<void()> callback)
{
-#if ENABLE(DFG_JIT)
- if (!worklist)
+ if (!entryScope) {
+ callback();
return;
-
- worklist->completeAllPlansForVM(*this);
-#endif
+ }
+
+ entryScope->addDidPopListener(callback);
}
-void VM::discardAllCode()
+void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
{
- prepareToDiscardCode();
- m_codeCache->clear();
- heap.deleteAllCompiledCode();
- heap.reportAbandonedObjectGraph();
+ whenIdle([=] () {
+ heap.deleteAllCodeBlocks(effort);
+ });
}
-void VM::dumpSampleData(ExecState* exec)
+void VM::deleteAllCode(DeleteAllCodeEffort effort)
{
- interpreter->dumpSampleData(exec);
-#if ENABLE(ASSEMBLER)
- ExecutableAllocator::dumpProfile();
-#endif
+ whenIdle([=] () {
+ m_codeCache->clear();
+ m_regExpCache->deleteAllCode();
+ heap.deleteAllCodeBlocks(effort);
+ heap.deleteAllUnlinkedCodeBlocks(effort);
+ heap.reportAbandonedObjectGraph();
+ });
}
SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
@@ -529,213 +607,144 @@ void VM::clearSourceProviderCaches()
sourceProviderCacheMap.clear();
}
-struct StackPreservingRecompiler : public MarkedBlock::VoidFunctor {
- HashSet<FunctionExecutable*> currentlyExecutingFunctions;
- void operator()(JSCell* cell)
- {
- if (!cell->inherits(FunctionExecutable::info()))
- return;
- FunctionExecutable* executable = jsCast<FunctionExecutable*>(cell);
- if (currentlyExecutingFunctions.contains(executable))
- return;
- executable->clearCodeIfNotCompiling();
- }
-};
-
-void VM::releaseExecutableMemory()
+void VM::throwException(ExecState* exec, Exception* exception)
{
- prepareToDiscardCode();
-
- if (entryScope) {
- StackPreservingRecompiler recompiler;
- HeapIterationScope iterationScope(heap);
- HashSet<JSCell*> roots;
- heap.getConservativeRegisterRoots(roots);
- HashSet<JSCell*>::iterator end = roots.end();
- for (HashSet<JSCell*>::iterator ptr = roots.begin(); ptr != end; ++ptr) {
- ScriptExecutable* executable = 0;
- JSCell* cell = *ptr;
- if (cell->inherits(ScriptExecutable::info()))
- executable = static_cast<ScriptExecutable*>(*ptr);
- else if (cell->inherits(JSFunction::info())) {
- JSFunction* function = jsCast<JSFunction*>(*ptr);
- if (function->isHostFunction())
- continue;
- executable = function->jsExecutable();
- } else
- continue;
- ASSERT(executable->inherits(ScriptExecutable::info()));
- executable->unlinkCalls();
- if (executable->inherits(FunctionExecutable::info()))
- recompiler.currentlyExecutingFunctions.add(static_cast<FunctionExecutable*>(executable));
-
- }
- heap.objectSpace().forEachLiveCell<StackPreservingRecompiler>(iterationScope, recompiler);
+ if (Options::breakOnThrow()) {
+ CodeBlock* codeBlock = exec->codeBlock();
+ dataLog("Throwing exception in call frame ", RawPointer(exec), " for code block ");
+ if (codeBlock)
+ dataLog(*codeBlock, "\n");
+ else
+ dataLog("<nullptr>\n");
+ CRASH();
}
- m_regExpCache->invalidateCode();
- heap.collectAllGarbage();
-}
-static void appendSourceToError(CallFrame* callFrame, ErrorInstance* exception, unsigned bytecodeOffset)
-{
- exception->clearAppendSourceToMessage();
-
- if (!callFrame->codeBlock()->hasExpressionInfo())
- return;
-
- int startOffset = 0;
- int endOffset = 0;
- int divotPoint = 0;
- unsigned line = 0;
- unsigned column = 0;
-
- CodeBlock* codeBlock = callFrame->codeBlock();
- codeBlock->expressionRangeForBytecodeOffset(bytecodeOffset, divotPoint, startOffset, endOffset, line, column);
-
- int expressionStart = divotPoint - startOffset;
- int expressionStop = divotPoint + endOffset;
-
- const String& sourceString = codeBlock->source()->source();
- if (!expressionStop || expressionStart > static_cast<int>(sourceString.length()))
- return;
-
- VM* vm = &callFrame->vm();
- JSValue jsMessage = exception->getDirect(*vm, vm->propertyNames->message);
- if (!jsMessage || !jsMessage.isString())
- return;
-
- String message = asString(jsMessage)->value(callFrame);
-
- if (expressionStart < expressionStop)
- message = makeString(message, " (evaluating '", codeBlock->source()->getRange(expressionStart, expressionStop), "')");
- else {
- // No range information, so give a few characters of context.
- const StringImpl* data = sourceString.impl();
- int dataLength = sourceString.length();
- int start = expressionStart;
- int stop = expressionStart;
- // Get up to 20 characters of context to the left and right of the divot, clamping to the line.
- // Then strip whitespace.
- while (start > 0 && (expressionStart - start < 20) && (*data)[start - 1] != '\n')
- start--;
- while (start < (expressionStart - 1) && isStrWhiteSpace((*data)[start]))
- start++;
- while (stop < dataLength && (stop - expressionStart < 20) && (*data)[stop] != '\n')
- stop++;
- while (stop > expressionStart && isStrWhiteSpace((*data)[stop - 1]))
- stop--;
- message = makeString(message, " (near '...", codeBlock->source()->getRange(start, stop), "...')");
- }
-
- exception->putDirect(*vm, vm->propertyNames->message, jsString(vm, message));
+ ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
+
+ interpreter->notifyDebuggerOfExceptionToBeThrown(exec, exception);
+
+ setException(exception);
}
-
-JSValue VM::throwException(ExecState* exec, JSValue error)
+
+JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
{
- ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
-
- Vector<StackFrame> stackTrace;
- interpreter->getStackTrace(stackTrace);
- m_exceptionStack = RefCountedArray<StackFrame>(stackTrace);
- m_exception = error;
-
- if (stackTrace.isEmpty() || !error.isObject())
- return error;
- JSObject* exception = asObject(error);
-
- StackFrame stackFrame;
- for (unsigned i = 0 ; i < stackTrace.size(); ++i) {
- stackFrame = stackTrace.at(i);
- if (stackFrame.bytecodeOffset)
- break;
- }
- unsigned bytecodeOffset = stackFrame.bytecodeOffset;
- if (!hasErrorInfo(exec, exception)) {
- // FIXME: We should only really be adding these properties to VM generated exceptions,
- // but the inspector currently requires these for all thrown objects.
- unsigned line;
- unsigned column;
- stackFrame.computeLineAndColumn(line, column);
- exception->putDirect(*this, Identifier(this, "line"), jsNumber(line), ReadOnly | DontDelete);
- exception->putDirect(*this, Identifier(this, "column"), jsNumber(column), ReadOnly | DontDelete);
- if (!stackFrame.sourceURL.isEmpty())
- exception->putDirect(*this, Identifier(this, "sourceURL"), jsString(this, stackFrame.sourceURL), ReadOnly | DontDelete);
- }
- if (exception->isErrorInstance() && static_cast<ErrorInstance*>(exception)->appendSourceToMessage()) {
- unsigned stackIndex = 0;
- CallFrame* callFrame;
- for (callFrame = exec; callFrame && !callFrame->codeBlock(); ) {
- stackIndex++;
- callFrame = callFrame->callerFrameSkippingVMEntrySentinel();
- }
- if (callFrame && callFrame->codeBlock()) {
- stackFrame = stackTrace.at(stackIndex);
- bytecodeOffset = stackFrame.bytecodeOffset;
- appendSourceToError(callFrame, static_cast<ErrorInstance*>(exception), bytecodeOffset);
- }
- }
+ VM& vm = exec->vm();
+ Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
+ if (!exception)
+ exception = Exception::create(*this, thrownValue);
- if (exception->hasProperty(exec, this->propertyNames->stack))
- return error;
-
- exception->putDirect(*this, propertyNames->stack, interpreter->stackTraceAsString(topCallFrame, stackTrace), DontEnum);
- return error;
+ throwException(exec, exception);
+ return JSValue(exception);
}
-
+
JSObject* VM::throwException(ExecState* exec, JSObject* error)
{
return asObject(throwException(exec, JSValue(error)));
}
-void VM::getExceptionInfo(JSValue& exception, RefCountedArray<StackFrame>& exceptionStack)
-{
- exception = m_exception;
- exceptionStack = m_exceptionStack;
-}
-void VM::setExceptionInfo(JSValue& exception, RefCountedArray<StackFrame>& exceptionStack)
+
+void VM::setStackPointerAtVMEntry(void* sp)
{
- m_exception = exception;
- m_exceptionStack = exceptionStack;
+ m_stackPointerAtVMEntry = sp;
+ updateStackLimits();
}
-void VM::clearException()
+size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
{
- m_exception = JSValue();
+ size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
+ m_currentSoftReservedZoneSize = softReservedZoneSize;
+#if !ENABLE(JIT)
+ interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
+#endif
+
+ updateStackLimits();
+
+ return oldSoftReservedZoneSize;
}
-void VM:: clearExceptionStack()
+
+#if PLATFORM(WIN)
+// On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
+// where the guard page is a barrier between committed and uncommitted memory.
+// When data from the guard page is read or written, the guard page is moved, and memory is committed.
+// This is how the system grows the stack.
+// When using the C stack on Windows we need to precommit the needed stack space.
+// Otherwise we might crash later if we access uncommitted stack memory.
+// This can happen if we allocate stack space larger than the page guard size (4K).
+// The system does not get the chance to move the guard page, and commit more memory,
+// and we crash if uncommitted memory is accessed.
+// The MSVC compiler fixes this by inserting a call to the _chkstk() function,
+// when needed, see http://support.microsoft.com/kb/100775.
+// By touching every page up to the stack limit with a dummy operation,
+// we force the system to move the guard page, and commit memory.
+
+static void preCommitStackMemory(void* stackLimit)
{
- m_exceptionStack = RefCountedArray<StackFrame>();
+ const int pageSize = 4096;
+ for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
+ char ch = *p;
+ *p = ch;
+ }
}
+#endif
-void releaseExecutableMemory(VM& vm)
+inline void VM::updateStackLimits()
{
- vm.releaseExecutableMemory();
+#if PLATFORM(WIN)
+ void* lastSoftStackLimit = m_softStackLimit;
+#endif
+
+ size_t reservedZoneSize = Options::reservedZoneSize();
+ if (m_stackPointerAtVMEntry) {
+ ASSERT(wtfThreadData().stack().isGrowingDownward());
+ char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
+ m_softStackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
+ m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
+ } else {
+ m_softStackLimit = wtfThreadData().stack().recursionLimit(m_currentSoftReservedZoneSize);
+ m_stackLimit = wtfThreadData().stack().recursionLimit(reservedZoneSize);
+ }
+
+#if PLATFORM(WIN)
+ // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
+ // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
+ // generated code which can allocate stack space that the C++ compiler does not know
+ // about. As such, we have to precommit that stack memory manually.
+ //
+ // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
+ // used exclusively by C++ code, and the C++ compiler will automatically commit the
+ // needed stack pages.
+ if (lastSoftStackLimit != m_softStackLimit)
+ preCommitStackMemory(m_softStackLimit);
+#endif
}
#if ENABLE(DFG_JIT)
void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
{
- for (size_t i = 0; i < scratchBuffers.size(); i++) {
- ScratchBuffer* scratchBuffer = scratchBuffers[i];
+ for (auto* scratchBuffer : scratchBuffers) {
if (scratchBuffer->activeLength()) {
void* bufferStart = scratchBuffer->dataBuffer();
conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
}
}
}
+#endif
-DFG::Worklist* VM::ensureWorklist()
+void logSanitizeStack(VM* vm)
{
- if (!DFG::enableConcurrentJIT())
- return 0;
- if (!worklist)
- worklist = DFG::globalWorklist();
- return worklist.get();
+ if (Options::verboseSanitizeStack() && vm->topCallFrame) {
+ int dummy;
+ dataLog(
+ "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
+ ", current stack pointer at ", RawPointer(&dummy), ", in ",
+ pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
+ vm->topCallFrame->codeOrigin(), "\n");
+ }
}
-#endif
#if ENABLE(REGEXP_TRACING)
void VM::addRegExpToTrace(RegExp* regExp)
{
+ gcProtect(regExp);
m_rtTraceList->add(regExp);
}
@@ -746,14 +755,16 @@ void VM::dumpRegExpTrace()
if (iter != m_rtTraceList->end()) {
dataLogF("\nRegExp Tracing\n");
- dataLogF(" match() matches\n");
- dataLogF("Regular Expression JIT Address calls found\n");
- dataLogF("----------------------------------------+----------------+----------+----------\n");
+ dataLogF("Regular Expression 8 Bit 16 Bit match() Matches Average\n");
+ dataLogF(" <Match only / Match> JIT Addr JIT Address calls found String len\n");
+ dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
unsigned reCount = 0;
- for (; iter != m_rtTraceList->end(); ++iter, ++reCount)
+ for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
(*iter)->printTraceData();
+ gcUnprotect(*iter);
+ }
dataLogF("%d Regular Expressions\n", reCount);
}
@@ -766,37 +777,170 @@ void VM::dumpRegExpTrace()
}
#endif
-void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
+WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
{
auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
if (result.isNewEntry)
result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
- result.iterator->value->add(watchpoint);
+ return result.iterator->value.get();
+}
+
+void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
+{
+ ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
}
void VM::addImpureProperty(const String& propertyName)
{
if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
- watchpointSet->fireAll();
+ watchpointSet->fireAll(*this, "Impure property added");
}
-class SetEnabledProfilerFunctor {
-public:
- bool operator()(CodeBlock* codeBlock)
- {
- if (codeBlock->jitType() == JITCode::DFGJIT)
- codeBlock->jettison();
- return false;
+static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
+{
+ bool needsToRecompile = false;
+ if (!counter) {
+ doEnableWork();
+ needsToRecompile = true;
}
-};
+ counter++;
+
+ return needsToRecompile;
+}
-void VM::setEnabledProfiler(LegacyProfiler* profiler)
+static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
{
- m_enabledProfiler = profiler;
- if (m_enabledProfiler) {
- SetEnabledProfilerFunctor functor;
- heap.forEachCodeBlock(functor);
+ RELEASE_ASSERT(counter > 0);
+ bool needsToRecompile = false;
+ counter--;
+ if (!counter) {
+ doDisableWork();
+ needsToRecompile = true;
}
+
+ return needsToRecompile;
+}
+
+bool VM::enableTypeProfiler()
+{
+ auto enableTypeProfiler = [this] () {
+ this->m_typeProfiler = std::make_unique<TypeProfiler>();
+ this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
+ };
+
+ return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
}
+bool VM::disableTypeProfiler()
+{
+ auto disableTypeProfiler = [this] () {
+ this->m_typeProfiler.reset(nullptr);
+ this->m_typeProfilerLog.reset(nullptr);
+ };
+
+ return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
+}
+
+bool VM::enableControlFlowProfiler()
+{
+ auto enableControlFlowProfiler = [this] () {
+ this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
+ };
+
+ return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
+}
+
+bool VM::disableControlFlowProfiler()
+{
+ auto disableControlFlowProfiler = [this] () {
+ this->m_controlFlowProfiler.reset(nullptr);
+ };
+
+ return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
+}
+
+void VM::dumpTypeProfilerData()
+{
+ if (!typeProfiler())
+ return;
+
+ typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
+ typeProfiler()->dumpTypeProfilerData(*this);
+}
+
+void VM::queueMicrotask(JSGlobalObject* globalObject, Ref<Microtask>&& task)
+{
+ m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, globalObject, WTFMove(task)));
+}
+
+void VM::drainMicrotasks()
+{
+ while (!m_microtaskQueue.isEmpty())
+ m_microtaskQueue.takeFirst()->run();
+}
+
+void QueuedTask::run()
+{
+ m_microtask->run(m_globalObject->globalExec());
+}
+
+void sanitizeStackForVM(VM* vm)
+{
+ logSanitizeStack(vm);
+#if !ENABLE(JIT)
+ vm->interpreter->cloopStack().sanitizeStack();
+#else
+ sanitizeStackForVMImpl(vm);
+#endif
+}
+
+size_t VM::committedStackByteCount()
+{
+#if ENABLE(JIT)
+ // When using the C stack, we don't know how many stack pages are actually
+ // committed. So, we use the current stack usage as an estimate.
+ ASSERT(wtfThreadData().stack().isGrowingDownward());
+ int8_t* current = reinterpret_cast<int8_t*>(&current);
+ int8_t* high = reinterpret_cast<int8_t*>(wtfThreadData().stack().origin());
+ return high - current;
+#else
+ return CLoopStack::committedByteCount();
+#endif
+}
+
+#if !ENABLE(JIT)
+bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
+{
+ return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
+}
+
+bool VM::isSafeToRecurseSoftCLoop() const
+{
+ return interpreter->cloopStack().isSafeToRecurse();
+}
+#endif // !ENABLE(JIT)
+
+#if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
+void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
+{
+ if (!Options::validateExceptionChecks())
+ return;
+
+ if (UNLIKELY(m_needExceptionCheck)) {
+ auto throwDepth = m_simulatedThrowPointRecursionDepth;
+ auto& throwLocation = m_simulatedThrowPointLocation;
+
+ dataLog(
+ "ERROR: Unchecked JS exception:\n"
+ " This scope can throw a JS exception: ", throwLocation, "\n"
+ " (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
+ " But the exception was unchecked as of this scope: ", location, "\n"
+ " (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
+ "\n");
+
+ RELEASE_ASSERT(!m_needExceptionCheck);
+ }
+}
+#endif
+
} // namespace JSC