summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/heap/SlotVisitorInlines.h
diff options
context:
space:
mode:
Diffstat (limited to 'Source/JavaScriptCore/heap/SlotVisitorInlines.h')
-rw-r--r--Source/JavaScriptCore/heap/SlotVisitorInlines.h239
1 files changed, 45 insertions, 194 deletions
diff --git a/Source/JavaScriptCore/heap/SlotVisitorInlines.h b/Source/JavaScriptCore/heap/SlotVisitorInlines.h
index ccd2e4ae1..06475a093 100644
--- a/Source/JavaScriptCore/heap/SlotVisitorInlines.h
+++ b/Source/JavaScriptCore/heap/SlotVisitorInlines.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2017 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -23,250 +23,101 @@
* THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef SlotVisitorInlines_h
-#define SlotVisitorInlines_h
+#pragma once
-#include "CopiedBlockInlines.h"
-#include "CopiedSpaceInlines.h"
-#include "Options.h"
#include "SlotVisitor.h"
#include "Weak.h"
#include "WeakInlines.h"
namespace JSC {
-ALWAYS_INLINE void SlotVisitor::append(JSValue* slot, size_t count)
+inline void SlotVisitor::appendUnbarriered(JSValue* slot, size_t count)
{
- for (size_t i = 0; i < count; ++i) {
- JSValue& value = slot[i];
- internalAppend(&value, value);
- }
+ for (size_t i = count; i--;)
+ appendUnbarriered(slot[i]);
}
-template<typename T>
-inline void SlotVisitor::appendUnbarrieredPointer(T** slot)
+inline void SlotVisitor::appendUnbarriered(JSCell* cell)
{
- ASSERT(slot);
- JSCell* cell = *slot;
- internalAppend(slot, cell);
-}
-
-ALWAYS_INLINE void SlotVisitor::append(JSValue* slot)
-{
- ASSERT(slot);
- internalAppend(slot, *slot);
-}
-
-ALWAYS_INLINE void SlotVisitor::appendUnbarrieredValue(JSValue* slot)
-{
- ASSERT(slot);
- internalAppend(slot, *slot);
-}
-
-ALWAYS_INLINE void SlotVisitor::append(JSCell** slot)
-{
- ASSERT(slot);
- internalAppend(slot, *slot);
+ appendUnbarriered(JSValue(cell));
}
template<typename T>
-ALWAYS_INLINE void SlotVisitor::appendUnbarrieredWeak(Weak<T>* weak)
-{
- ASSERT(weak);
- if (weak->get())
- internalAppend(0, weak->get());
-}
-
-ALWAYS_INLINE void SlotVisitor::internalAppend(void* from, JSValue value)
+inline void SlotVisitor::append(const Weak<T>& weak)
{
- if (!value || !value.isCell())
- return;
- internalAppend(from, value.asCell());
+ appendUnbarriered(weak.get());
}
-ALWAYS_INLINE void SlotVisitor::internalAppend(void* from, JSCell* cell)
-{
- ASSERT(!m_isCheckingForDefaultMarkViolation);
- if (!cell)
- return;
-#if ENABLE(ALLOCATION_LOGGING)
- dataLogF("JSC GC noticing reference from %p to %p.\n", from, cell);
-#else
- UNUSED_PARAM(from);
-#endif
-#if ENABLE(GC_VALIDATION)
- validate(cell);
-#endif
- if (Heap::testAndSetMarked(cell) || !cell->structure())
- return;
-
- m_bytesVisited += MarkedBlock::blockFor(cell)->cellSize();
-
- MARK_LOG_CHILD(*this, cell);
-
- unconditionallyAppend(cell);
-}
-
-ALWAYS_INLINE void SlotVisitor::unconditionallyAppend(JSCell* cell)
+template<typename T>
+inline void SlotVisitor::append(const WriteBarrierBase<T>& slot)
{
- ASSERT(Heap::isMarked(cell));
- m_visitCount++;
-
- // Should never attempt to mark something that is zapped.
- ASSERT(!cell->isZapped());
-
- m_stack.append(cell);
+ appendUnbarriered(slot.get());
}
-template<typename T> inline void SlotVisitor::append(WriteBarrierBase<T>* slot)
+template<typename T>
+inline void SlotVisitor::appendHidden(const WriteBarrierBase<T>& slot)
{
- internalAppend(slot, *slot->slot());
+ appendHidden(slot.get());
}
-template<typename Iterator> inline void SlotVisitor::append(Iterator begin, Iterator end)
+template<typename Iterator>
+inline void SlotVisitor::append(Iterator begin, Iterator end)
{
for (auto it = begin; it != end; ++it)
- append(&*it);
-}
-
-ALWAYS_INLINE void SlotVisitor::appendValues(WriteBarrierBase<Unknown>* barriers, size_t count)
-{
- append(barriers->slot(), count);
+ append(*it);
}
-inline void SlotVisitor::addWeakReferenceHarvester(WeakReferenceHarvester* weakReferenceHarvester)
+inline void SlotVisitor::appendValues(const WriteBarrierBase<Unknown>* barriers, size_t count)
{
- m_shared.m_weakReferenceHarvesters.addThreadSafe(weakReferenceHarvester);
+ for (size_t i = 0; i < count; ++i)
+ append(barriers[i]);
}
-inline void SlotVisitor::addUnconditionalFinalizer(UnconditionalFinalizer* unconditionalFinalizer)
+inline void SlotVisitor::appendValuesHidden(const WriteBarrierBase<Unknown>* barriers, size_t count)
{
- m_shared.m_unconditionalFinalizers.addThreadSafe(unconditionalFinalizer);
+ for (size_t i = 0; i < count; ++i)
+ appendHidden(barriers[i]);
}
-inline void SlotVisitor::addOpaqueRoot(void* root)
+inline void SlotVisitor::reportExtraMemoryVisited(size_t size)
{
-#if ENABLE(PARALLEL_GC)
- if (Options::numberOfGCMarkers() == 1) {
- // Put directly into the shared HashSet.
- m_shared.m_opaqueRoots.add(root);
- return;
+ if (m_isFirstVisit) {
+ heap()->reportExtraMemoryVisited(size);
+ m_nonCellVisitCount += size;
}
- // Put into the local set, but merge with the shared one every once in
- // a while to make sure that the local sets don't grow too large.
- mergeOpaqueRootsIfProfitable();
- m_opaqueRoots.add(root);
-#else
- m_opaqueRoots.add(root);
-#endif
}
-inline bool SlotVisitor::containsOpaqueRoot(void* root)
+#if ENABLE(RESOURCE_USAGE)
+inline void SlotVisitor::reportExternalMemoryVisited(size_t size)
{
- ASSERT(!m_isInParallelMode);
-#if ENABLE(PARALLEL_GC)
- ASSERT(m_opaqueRoots.isEmpty());
- return m_shared.m_opaqueRoots.contains(root);
-#else
- return m_opaqueRoots.contains(root);
-#endif
+ if (m_isFirstVisit)
+ heap()->reportExternalMemoryVisited(size);
}
-
-inline TriState SlotVisitor::containsOpaqueRootTriState(void* root)
-{
- if (m_opaqueRoots.contains(root))
- return TrueTriState;
- MutexLocker locker(m_shared.m_opaqueRootsLock);
- if (m_shared.m_opaqueRoots.contains(root))
- return TrueTriState;
- return MixedTriState;
-}
-
-inline int SlotVisitor::opaqueRootCount()
-{
- ASSERT(!m_isInParallelMode);
-#if ENABLE(PARALLEL_GC)
- ASSERT(m_opaqueRoots.isEmpty());
- return m_shared.m_opaqueRoots.size();
-#else
- return m_opaqueRoots.size();
#endif
-}
-inline void SlotVisitor::mergeOpaqueRootsIfNecessary()
-{
- if (m_opaqueRoots.isEmpty())
- return;
- mergeOpaqueRoots();
-}
-
-inline void SlotVisitor::mergeOpaqueRootsIfProfitable()
-{
- if (static_cast<unsigned>(m_opaqueRoots.size()) < Options::opaqueRootMergeThreshold())
- return;
- mergeOpaqueRoots();
-}
-
-inline void SlotVisitor::donate()
+inline Heap* SlotVisitor::heap() const
{
- ASSERT(m_isInParallelMode);
- if (Options::numberOfGCMarkers() == 1)
- return;
-
- donateKnownParallel();
+ return &m_heap;
}
-inline void SlotVisitor::donateAndDrain()
+inline VM& SlotVisitor::vm()
{
- donate();
- drain();
+ return *m_heap.m_vm;
}
-inline void SlotVisitor::copyLater(JSCell* owner, CopyToken token, void* ptr, size_t bytes)
+inline const VM& SlotVisitor::vm() const
{
- ASSERT(bytes);
- CopiedBlock* block = CopiedSpace::blockFor(ptr);
- if (block->isOversize()) {
- m_shared.m_copiedSpace->pin(block);
- return;
- }
-
- SpinLockHolder locker(&block->workListLock());
- if (heap()->operationInProgress() == FullCollection || block->shouldReportLiveBytes(locker, owner)) {
- m_bytesCopied += bytes;
- block->reportLiveBytes(locker, owner, token, bytes);
- }
-}
-
-inline void SlotVisitor::reportExtraMemoryUsage(JSCell* owner, size_t size)
-{
-#if ENABLE(GGC)
- // We don't want to double-count the extra memory that was reported in previous collections.
- if (heap()->operationInProgress() == EdenCollection && MarkedBlock::blockFor(owner)->isRemembered(owner))
- return;
-#else
- UNUSED_PARAM(owner);
-#endif
-
- size_t* counter = &m_shared.m_vm->heap.m_extraMemoryUsage;
-
-#if ENABLE(COMPARE_AND_SWAP)
- for (;;) {
- size_t oldSize = *counter;
- if (WTF::weakCompareAndSwapSize(counter, oldSize, oldSize + size))
- return;
- }
-#else
- (*counter) += size;
-#endif
+ return *m_heap.m_vm;
}
-inline Heap* SlotVisitor::heap() const
+template<typename Func>
+IterationStatus SlotVisitor::forEachMarkStack(const Func& func)
{
- return &sharedData().m_vm->heap;
+ if (func(m_collectorStack) == IterationStatus::Done)
+ return IterationStatus::Done;
+ if (func(m_mutatorStack) == IterationStatus::Done)
+ return IterationStatus::Done;
+ return IterationStatus::Continue;
}
} // namespace JSC
-
-#endif // SlotVisitorInlines_h
-