Unreviewed, rolling out r161540.

http://trac.webkit.org/changeset/161540
https://bugs.webkit.org/show_bug.cgi?id=126704

Caused assertion failures on multiple tests (Requested by ap
on #webkit).

Source/JavaScriptCore:

* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::visitAggregate):
* bytecode/CodeBlock.h:
(JSC::CodeBlockSet::mark):
* dfg/DFGOperations.cpp:
* heap/CodeBlockSet.cpp:
(JSC::CodeBlockSet::add):
(JSC::CodeBlockSet::traceMarked):
* heap/CodeBlockSet.h:
* heap/CopiedBlockInlines.h:
(JSC::CopiedBlock::reportLiveBytes):
* heap/CopiedSpace.cpp:
* heap/CopiedSpace.h:
* heap/Heap.cpp:
(JSC::Heap::Heap):
(JSC::Heap::didAbandon):
(JSC::Heap::markRoots):
(JSC::Heap::copyBackingStores):
(JSC::Heap::collectAllGarbage):
(JSC::Heap::collect):
(JSC::Heap::didAllocate):
* heap/Heap.h:
(JSC::Heap::shouldCollect):
(JSC::Heap::isCollecting):
(JSC::Heap::isWriteBarrierEnabled):
(JSC::Heap::writeBarrier):
* heap/HeapOperation.h:
* heap/MarkStack.cpp:
(JSC::MarkStackArray::~MarkStackArray):
* heap/MarkStack.h:
* heap/MarkedAllocator.cpp:
(JSC::MarkedAllocator::isPagedOut):
(JSC::MarkedAllocator::tryAllocateHelper):
(JSC::MarkedAllocator::addBlock):
(JSC::MarkedAllocator::removeBlock):
* heap/MarkedAllocator.h:
(JSC::MarkedAllocator::MarkedAllocator):
(JSC::MarkedAllocator::reset):
* heap/MarkedBlock.cpp:
* heap/MarkedBlock.h:
(JSC::MarkedBlock::lastChanceToFinalize):
(JSC::MarkedBlock::didConsumeEmptyFreeList):
(JSC::MarkedBlock::clearMarks):
* heap/MarkedSpace.cpp:
(JSC::MarkedSpace::~MarkedSpace):
(JSC::MarkedSpace::resetAllocators):
(JSC::MarkedSpace::visitWeakSets):
(JSC::MarkedSpace::reapWeakSets):
* heap/MarkedSpace.h:
(JSC::ClearMarks::operator()):
(JSC::MarkedSpace::clearMarks):
* heap/SlotVisitor.cpp:
(JSC::SlotVisitor::~SlotVisitor):
* heap/SlotVisitor.h:
(JSC::SlotVisitor::sharedData):
* heap/SlotVisitorInlines.h:
(JSC::SlotVisitor::internalAppend):
(JSC::SlotVisitor::copyLater):
(JSC::SlotVisitor::reportExtraMemoryUsage):
* jit/Repatch.cpp:
* runtime/JSGenericTypedArrayViewInlines.h:
(JSC::JSGenericTypedArrayView<Adaptor>::visitChildren):
* runtime/JSPropertyNameIterator.h:
(JSC::StructureRareData::setEnumerationCache):
* runtime/JSString.cpp:
(JSC::JSString::visitChildren):
* runtime/StructureRareDataInlines.h:
(JSC::StructureRareData::setPreviousID):
(JSC::StructureRareData::setObjectToStringValue):
* runtime/WeakMapData.cpp:
(JSC::WeakMapData::visitChildren):

Source/WTF:

* wtf/Bitmap.h:
(WTF::WordType>::count):

git-svn-id: http://svn.webkit.org/repository/webkit/trunk@161557 268f45cc-cd09-0410-ab3c-d52691b4dbfc
parent cb8504d4
2014-01-09 Commit Queue <commit-queue@webkit.org>
Unreviewed, rolling out r161540.
http://trac.webkit.org/changeset/161540
https://bugs.webkit.org/show_bug.cgi?id=126704
Caused assertion failures on multiple tests (Requested by ap
on #webkit).
* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::visitAggregate):
* bytecode/CodeBlock.h:
(JSC::CodeBlockSet::mark):
* dfg/DFGOperations.cpp:
* heap/CodeBlockSet.cpp:
(JSC::CodeBlockSet::add):
(JSC::CodeBlockSet::traceMarked):
* heap/CodeBlockSet.h:
* heap/CopiedBlockInlines.h:
(JSC::CopiedBlock::reportLiveBytes):
* heap/CopiedSpace.cpp:
* heap/CopiedSpace.h:
* heap/Heap.cpp:
(JSC::Heap::Heap):
(JSC::Heap::didAbandon):
(JSC::Heap::markRoots):
(JSC::Heap::copyBackingStores):
(JSC::Heap::collectAllGarbage):
(JSC::Heap::collect):
(JSC::Heap::didAllocate):
* heap/Heap.h:
(JSC::Heap::shouldCollect):
(JSC::Heap::isCollecting):
(JSC::Heap::isWriteBarrierEnabled):
(JSC::Heap::writeBarrier):
* heap/HeapOperation.h:
* heap/MarkStack.cpp:
(JSC::MarkStackArray::~MarkStackArray):
* heap/MarkStack.h:
* heap/MarkedAllocator.cpp:
(JSC::MarkedAllocator::isPagedOut):
(JSC::MarkedAllocator::tryAllocateHelper):
(JSC::MarkedAllocator::addBlock):
(JSC::MarkedAllocator::removeBlock):
* heap/MarkedAllocator.h:
(JSC::MarkedAllocator::MarkedAllocator):
(JSC::MarkedAllocator::reset):
* heap/MarkedBlock.cpp:
* heap/MarkedBlock.h:
(JSC::MarkedBlock::lastChanceToFinalize):
(JSC::MarkedBlock::didConsumeEmptyFreeList):
(JSC::MarkedBlock::clearMarks):
* heap/MarkedSpace.cpp:
(JSC::MarkedSpace::~MarkedSpace):
(JSC::MarkedSpace::resetAllocators):
(JSC::MarkedSpace::visitWeakSets):
(JSC::MarkedSpace::reapWeakSets):
* heap/MarkedSpace.h:
(JSC::ClearMarks::operator()):
(JSC::MarkedSpace::clearMarks):
* heap/SlotVisitor.cpp:
(JSC::SlotVisitor::~SlotVisitor):
* heap/SlotVisitor.h:
(JSC::SlotVisitor::sharedData):
* heap/SlotVisitorInlines.h:
(JSC::SlotVisitor::internalAppend):
(JSC::SlotVisitor::copyLater):
(JSC::SlotVisitor::reportExtraMemoryUsage):
* jit/Repatch.cpp:
* runtime/JSGenericTypedArrayViewInlines.h:
(JSC::JSGenericTypedArrayView<Adaptor>::visitChildren):
* runtime/JSPropertyNameIterator.h:
(JSC::StructureRareData::setEnumerationCache):
* runtime/JSString.cpp:
(JSC::JSString::visitChildren):
* runtime/StructureRareDataInlines.h:
(JSC::StructureRareData::setPreviousID):
(JSC::StructureRareData::setObjectToStringValue):
* runtime/WeakMapData.cpp:
(JSC::WeakMapData::visitChildren):
2014-01-09 Andreas Kling <akling@apple.com>
Shrink WatchpointSet.
......
......@@ -1954,15 +1954,15 @@ void CodeBlock::visitAggregate(SlotVisitor& visitor)
if (CodeBlock* otherBlock = specialOSREntryBlockOrNull())
otherBlock->visitAggregate(visitor);
visitor.reportExtraMemoryUsage(ownerExecutable(), sizeof(CodeBlock));
visitor.reportExtraMemoryUsage(sizeof(CodeBlock));
if (m_jitCode)
visitor.reportExtraMemoryUsage(ownerExecutable(), m_jitCode->size());
visitor.reportExtraMemoryUsage(m_jitCode->size());
if (m_instructions.size()) {
// Divide by refCount() because m_instructions points to something that is shared
// by multiple CodeBlocks, and we only want to count it towards the heap size once.
// Having each CodeBlock report only its proportional share of the size is one way
// of accomplishing this.
visitor.reportExtraMemoryUsage(ownerExecutable(), m_instructions.size() * sizeof(Instruction) / m_instructions.refCount());
visitor.reportExtraMemoryUsage(m_instructions.size() * sizeof(Instruction) / m_instructions.refCount());
}
visitor.append(&m_unlinkedCode);
......
......@@ -1269,7 +1269,6 @@ inline void CodeBlockSet::mark(void* candidateCodeBlock)
return;
(*iter)->m_mayBeExecuting = true;
m_currentlyExecuting.append(static_cast<CodeBlock*>(candidateCodeBlock));
}
} // namespace JSC
......
......@@ -850,7 +850,6 @@ char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitial
NativeCallFrameTracer tracer(&vm, exec);
ASSERT(!object->structure()->outOfLineCapacity());
DeferGC deferGC(vm.heap);
Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
object->setButterflyWithoutChangingStructure(vm, result);
return reinterpret_cast<char*>(result);
......@@ -861,7 +860,6 @@ char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState*
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
DeferGC deferGC(vm.heap);
Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
object->setButterflyWithoutChangingStructure(vm, result);
return reinterpret_cast<char*>(result);
......
......@@ -45,8 +45,7 @@ CodeBlockSet::~CodeBlockSet()
void CodeBlockSet::add(PassRefPtr<CodeBlock> codeBlock)
{
CodeBlock* block = codeBlock.leakRef();
bool isNewEntry = m_set.add(block).isNewEntry;
bool isNewEntry = m_set.add(codeBlock.leakRef()).isNewEntry;
ASSERT_UNUSED(isNewEntry, isNewEntry);
}
......@@ -102,16 +101,9 @@ void CodeBlockSet::traceMarked(SlotVisitor& visitor)
CodeBlock* codeBlock = *iter;
if (!codeBlock->m_mayBeExecuting)
continue;
codeBlock->ownerExecutable()->visitChildren(codeBlock->ownerExecutable(), visitor);
codeBlock->visitAggregate(visitor);
}
}
void CodeBlockSet::rememberCurrentlyExecutingCodeBlocks(Heap* heap)
{
for (size_t i = 0; i < m_currentlyExecuting.size(); ++i)
heap->addToRememberedSet(m_currentlyExecuting[i]->ownerExecutable());
m_currentlyExecuting.clear();
}
} // namespace JSC
......@@ -30,12 +30,10 @@
#include <wtf/Noncopyable.h>
#include <wtf/PassRefPtr.h>
#include <wtf/RefPtr.h>
#include <wtf/Vector.h>
namespace JSC {
class CodeBlock;
class Heap;
class SlotVisitor;
// CodeBlockSet tracks all CodeBlocks. Every CodeBlock starts out with one
......@@ -67,16 +65,11 @@ public:
// mayBeExecuting.
void traceMarked(SlotVisitor&);
// Add all currently executing CodeBlocks to the remembered set to be
// re-scanned during the next collection.
void rememberCurrentlyExecutingCodeBlocks(Heap*);
private:
// This is not a set of RefPtr<CodeBlock> because we need to be able to find
// arbitrary bogus pointers. I could have written a thingy that had peek types
// and all, but that seemed like overkill.
HashSet<CodeBlock* > m_set;
Vector<CodeBlock*> m_currentlyExecuting;
};
} // namespace JSC
......
......@@ -42,9 +42,6 @@ inline void CopiedBlock::reportLiveBytes(JSCell* owner, CopyToken token, unsigne
#endif
m_liveBytes += bytes;
if (isPinned())
return;
if (!shouldEvacuate()) {
pin();
return;
......
......@@ -316,17 +316,4 @@ bool CopiedSpace::isPagedOut(double deadline)
|| isBlockListPagedOut(deadline, &m_oversizeBlocks);
}
void CopiedSpace::didStartFullCollection()
{
ASSERT(heap()->operationInProgress() == FullCollection);
ASSERT(m_fromSpace->isEmpty());
for (CopiedBlock* block = m_toSpace->head(); block; block = block->next())
block->didSurviveGC();
for (CopiedBlock* block = m_oversizeBlocks.head(); block; block = block->next())
block->didSurviveGC();
}
} // namespace JSC
......@@ -60,8 +60,6 @@ public:
CopiedAllocator& allocator() { return m_allocator; }
void didStartFullCollection();
void startedCopying();
void doneCopying();
bool isInCopyPhase() { return m_inCopyingPhase; }
......@@ -82,8 +80,6 @@ public:
static CopiedBlock* blockFor(void*);
Heap* heap() const { return m_heap; }
private:
static bool isOversize(size_t);
......
......@@ -253,11 +253,9 @@ Heap::Heap(VM* vm, HeapType heapType)
, m_ramSize(ramSize())
, m_minBytesPerCycle(minHeapSize(m_heapType, m_ramSize))
, m_sizeAfterLastCollect(0)
, m_bytesAllocatedThisCycle(0)
, m_bytesAbandonedThisCycle(0)
, m_maxEdenSize(m_minBytesPerCycle)
, m_maxHeapSize(m_minBytesPerCycle)
, m_shouldDoFullCollection(false)
, m_bytesAllocatedLimit(m_minBytesPerCycle)
, m_bytesAllocated(0)
, m_bytesAbandoned(0)
, m_totalBytesVisited(0)
, m_totalBytesCopied(0)
, m_operationInProgress(NoOperation)
......@@ -271,7 +269,7 @@ Heap::Heap(VM* vm, HeapType heapType)
, m_copyVisitor(m_sharedData)
, m_handleSet(vm)
, m_isSafeToCollect(false)
, m_writeBarrierBuffer(256)
, m_writeBarrierBuffer(128)
, m_vm(vm)
, m_lastGCLength(0)
, m_lastCodeDiscardTime(WTF::monotonicallyIncreasingTime())
......@@ -334,8 +332,8 @@ void Heap::reportAbandonedObjectGraph()
void Heap::didAbandon(size_t bytes)
{
if (m_activityCallback)
m_activityCallback->didAllocate(m_bytesAllocatedThisCycle + m_bytesAbandonedThisCycle);
m_bytesAbandonedThisCycle += bytes;
m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned);
m_bytesAbandoned += bytes;
}
void Heap::protect(JSValue k)
......@@ -489,9 +487,6 @@ void Heap::markRoots()
visitor.setup();
HeapRootVisitor heapRootVisitor(visitor);
Vector<const JSCell*> rememberedSet(m_slotVisitor.markStack().size());
m_slotVisitor.markStack().fillVector(rememberedSet);
{
ParallelModeEnabler enabler(visitor);
......@@ -595,14 +590,6 @@ void Heap::markRoots()
}
}
{
GCPHASE(ClearRememberedSet);
for (unsigned i = 0; i < rememberedSet.size(); ++i) {
const JSCell* cell = rememberedSet[i];
MarkedBlock::blockFor(cell)->clearRemembered(cell);
}
}
GCCOUNTER(VisitedValueCount, visitor.visitCount());
m_sharedData.didFinishMarking();
......@@ -614,14 +601,8 @@ void Heap::markRoots()
MARK_LOG_MESSAGE2("\nNumber of live Objects after full GC %lu, took %.6f secs\n", visitCount, WTF::monotonicallyIncreasingTime() - gcStartTime);
#endif
if (m_operationInProgress == EdenCollection) {
m_totalBytesVisited += visitor.bytesVisited();
m_totalBytesCopied += visitor.bytesCopied();
} else {
ASSERT(m_operationInProgress == FullCollection);
m_totalBytesVisited = visitor.bytesVisited();
m_totalBytesCopied = visitor.bytesCopied();
}
m_totalBytesVisited = visitor.bytesVisited();
m_totalBytesCopied = visitor.bytesCopied();
#if ENABLE(PARALLEL_GC)
m_totalBytesVisited += m_sharedData.childBytesVisited();
m_totalBytesCopied += m_sharedData.childBytesCopied();
......@@ -634,12 +615,8 @@ void Heap::markRoots()
m_sharedData.reset();
}
template <HeapOperation collectionType>
void Heap::copyBackingStores()
{
if (collectionType == EdenCollection)
return;
m_storageSpace.startedCopying();
if (m_storageSpace.shouldDoCopyPhase()) {
m_sharedData.didStartCopying();
......@@ -650,7 +627,7 @@ void Heap::copyBackingStores()
// before signaling that the phase is complete.
m_storageSpace.doneCopying();
m_sharedData.didFinishCopying();
} else
} else
m_storageSpace.doneCopying();
}
......@@ -746,23 +723,11 @@ void Heap::deleteUnmarkedCompiledCode()
m_jitStubRoutines.deleteUnmarkedJettisonedStubRoutines();
}
void Heap::addToRememberedSet(const JSCell* cell)
{
ASSERT(cell);
ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());
ASSERT(isMarked(cell));
if (isInRememberedSet(cell))
return;
MarkedBlock::blockFor(cell)->setRemembered(cell);
m_slotVisitor.unconditionallyAppend(const_cast<JSCell*>(cell));
}
void Heap::collectAllGarbage()
{
if (!m_isSafeToCollect)
return;
m_shouldDoFullCollection = true;
collect();
SamplingRegion samplingRegion("Garbage Collection: Sweeping");
......@@ -799,28 +764,9 @@ void Heap::collect()
RecursiveAllocationScope scope(*this);
m_vm->prepareToDiscardCode();
}
bool isFullCollection = m_shouldDoFullCollection;
if (isFullCollection) {
m_operationInProgress = FullCollection;
m_slotVisitor.clearMarkStack();
m_shouldDoFullCollection = false;
if (Options::logGC())
dataLog("FullCollection, ");
} else {
#if ENABLE(GGC)
m_operationInProgress = EdenCollection;
if (Options::logGC())
dataLog("EdenCollection, ");
#else
m_operationInProgress = FullCollection;
m_slotVisitor.clearMarkStack();
if (Options::logGC())
dataLog("FullCollection, ");
#endif
}
if (m_operationInProgress == FullCollection)
m_extraMemoryUsage = 0;
m_operationInProgress = Collection;
m_extraMemoryUsage = 0;
if (m_activityCallback)
m_activityCallback->willCollect();
......@@ -834,16 +780,6 @@ void Heap::collect()
{
GCPHASE(StopAllocation);
m_objectSpace.stopAllocating();
if (m_operationInProgress == FullCollection)
m_storageSpace.didStartFullCollection();
}
{
GCPHASE(FlushWriteBarrierBuffer);
if (m_operationInProgress == EdenCollection)
m_writeBarrierBuffer.flush(*this);
else
m_writeBarrierBuffer.reset();
}
markRoots();
......@@ -860,16 +796,13 @@ void Heap::collect()
m_arrayBuffers.sweep();
}
if (m_operationInProgress == FullCollection) {
{
m_blockSnapshot.resize(m_objectSpace.blocks().set().size());
MarkedBlockSnapshotFunctor functor(m_blockSnapshot);
m_objectSpace.forEachBlock(functor);
}
if (m_operationInProgress == FullCollection)
copyBackingStores<FullCollection>();
else
copyBackingStores<EdenCollection>();
copyBackingStores();
{
GCPHASE(FinalizeUnconditionalFinalizers);
......@@ -886,15 +819,8 @@ void Heap::collect()
m_vm->clearSourceProviderCaches();
}
if (m_operationInProgress == FullCollection)
m_sweeper->startSweeping(m_blockSnapshot);
{
GCPHASE(AddCurrentlyExecutingCodeBlocksToRememberedSet);
m_codeBlocks.rememberCurrentlyExecutingCodeBlocks(this);
}
m_bytesAbandonedThisCycle = 0;
m_sweeper->startSweeping(m_blockSnapshot);
m_bytesAbandoned = 0;
{
GCPHASE(ResetAllocators);
......@@ -905,32 +831,21 @@ void Heap::collect()
if (Options::gcMaxHeapSize() && currentHeapSize > Options::gcMaxHeapSize())
HeapStatistics::exitWithFailure();
if (m_operationInProgress == FullCollection) {
// To avoid pathological GC churn in very small and very large heaps, we set
// the new allocation limit based on the current size of the heap, with a
// fixed minimum.
m_maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize));
m_maxEdenSize = m_maxHeapSize - currentHeapSize;
} else {
ASSERT(currentHeapSize >= m_sizeAfterLastCollect);
m_maxEdenSize = m_maxHeapSize - currentHeapSize;
double edenToOldGenerationRatio = (double)m_maxEdenSize / (double)m_maxHeapSize;
double minEdenToOldGenerationRatio = 1.0 / 3.0;
if (edenToOldGenerationRatio < minEdenToOldGenerationRatio)
m_shouldDoFullCollection = true;
m_maxHeapSize += currentHeapSize - m_sizeAfterLastCollect;
m_maxEdenSize = m_maxHeapSize - currentHeapSize;
}
m_sizeAfterLastCollect = currentHeapSize;
m_bytesAllocatedThisCycle = 0;
// To avoid pathological GC churn in very small and very large heaps, we set
// the new allocation limit based on the current size of the heap, with a
// fixed minimum.
size_t maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize));
m_bytesAllocatedLimit = maxHeapSize - currentHeapSize;
m_bytesAllocated = 0;
double lastGCEndTime = WTF::monotonicallyIncreasingTime();
m_lastGCLength = lastGCEndTime - lastGCStartTime;
if (Options::recordGCPauseTimes())
HeapStatistics::recordGCPauseTime(lastGCStartTime, lastGCEndTime);
RELEASE_ASSERT(m_operationInProgress == EdenCollection || m_operationInProgress == FullCollection);
RELEASE_ASSERT(m_operationInProgress == Collection);
m_operationInProgress = NoOperation;
JAVASCRIPTCORE_GC_END();
......@@ -948,6 +863,10 @@ void Heap::collect()
double after = currentTimeMS();
dataLog(after - before, " ms, ", currentHeapSize / 1024, " kb]\n");
}
#if ENABLE(ALLOCATION_LOGGING)
dataLogF("JSC GC finishing collection.\n");
#endif
}
bool Heap::collectIfNecessaryOrDefer()
......@@ -997,8 +916,8 @@ void Heap::setGarbageCollectionTimerEnabled(bool enable)
void Heap::didAllocate(size_t bytes)
{
if (m_activityCallback)
m_activityCallback->didAllocate(m_bytesAllocatedThisCycle + m_bytesAbandonedThisCycle);
m_bytesAllocatedThisCycle += bytes;
m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned);
m_bytesAllocated += bytes;
}
bool Heap::isValidAllocation(size_t)
......@@ -1075,15 +994,6 @@ void Heap::decrementDeferralDepthAndGCIfNeeded()
collectIfNecessaryOrDefer();
}
void Heap::writeBarrier(const JSCell* from)
{
ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));
if (!from || !isMarked(from))
return;
Heap* heap = Heap::heap(from);
heap->addToRememberedSet(from);
}
void Heap::flushWriteBarrierBuffer(JSCell* cell)
{
#if ENABLE(GGC)
......
......@@ -94,17 +94,11 @@ namespace JSC {
static bool testAndSetMarked(const void*);
static void setMarked(const void*);
JS_EXPORT_PRIVATE void addToRememberedSet(const JSCell*);
bool isInRememberedSet(const JSCell* cell) const
{
ASSERT(cell);
ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());
return MarkedBlock::blockFor(cell)->isRemembered(cell);
}
static bool isWriteBarrierEnabled();
JS_EXPORT_PRIVATE static void writeBarrier(const JSCell*);
static void writeBarrier(const JSCell*);
static void writeBarrier(const JSCell*, JSValue);
static void writeBarrier(const JSCell*, JSCell*);
static uint8_t* addressOfCardFor(JSCell*);
WriteBarrierBuffer& writeBarrierBuffer() { return m_writeBarrierBuffer; }
void flushWriteBarrierBuffer(JSCell*);
......@@ -126,7 +120,6 @@ namespace JSC {
// true if collection is in progress
inline bool isCollecting();
inline HeapOperation operationInProgress() { return m_operationInProgress; }
// true if an allocation or collection is in progress
inline bool isBusy();
......@@ -243,7 +236,6 @@ namespace JSC {
void markRoots();
void markProtectedObjects(HeapRootVisitor&);
void markTempSortVectors(HeapRootVisitor&);
template <HeapOperation collectionType>
void copyBackingStores();
void harvestWeakReferences();
void finalizeUnconditionalFinalizers();
......@@ -265,11 +257,10 @@ namespace JSC {
const size_t m_minBytesPerCycle;
size_t m_sizeAfterLastCollect;
size_t m_bytesAllocatedThisCycle;
size_t m_bytesAbandonedThisCycle;
size_t m_maxEdenSize;
size_t m_maxHeapSize;
bool m_shouldDoFullCollection;
size_t m_bytesAllocatedLimit;
size_t m_bytesAllocated;
size_t m_bytesAbandoned;
size_t m_totalBytesVisited;
size_t m_totalBytesCopied;
......@@ -280,8 +271,6 @@ namespace JSC {
GCIncomingRefCountedSet<ArrayBuffer> m_arrayBuffers;
size_t m_extraMemoryUsage;
HashSet<const JSCell*> m_copyingRememberedSet;
ProtectCountSet m_protectedValues;
Vector<Vector<ValueStringPair, 0, UnsafeVectorOverflow>* > m_tempSortingVectors;
OwnPtr<HashSet<MarkedArgumentBuffer*>> m_markListSet;
......@@ -333,8 +322,8 @@ namespace JSC {
if (isDeferred())
return false;
if (Options::gcMaxHeapSize())
return m_bytesAllocatedThisCycle > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation;
return m_bytesAllocatedThisCycle > m_maxEdenSize && m_isSafeToCollect && m_operationInProgress == NoOperation;
return m_bytesAllocated > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation;
return m_bytesAllocated > m_bytesAllocatedLimit && m_isSafeToCollect && m_operationInProgress == NoOperation;
}
bool Heap::isBusy()
......@@ -344,7 +333,7 @@ namespace JSC {
bool Heap::isCollecting()
{
return m_operationInProgress == FullCollection || m_operationInProgress == EdenCollection;
return m_operationInProgress == Collection;
}
inline Heap* Heap::heap(const JSCell* cell)
......@@ -381,33 +370,26 @@ namespace JSC {
inline bool Heap::isWriteBarrierEnabled()
{
#if ENABLE(WRITE_BARRIER_PROFILING) || ENABLE(GGC)
#if ENABLE(WRITE_BARRIER_PROFILING)
return true;
#else
return false;
#endif
}
inline void Heap::writeBarrier(const JSCell* from, JSCell* to)
inline void Heap::writeBarrier(const JSCell*)
{
#if ENABLE(WRITE_BARRIER_PROFILING)
WriteBarrierCounters::countWriteBarrier();
#endif
if (!from || !isMarked(from))
return;
if (!to || isMarked(to))
return;
Heap::heap(from)->addToRememberedSet(from);
}
inline void Heap::writeBarrier(const JSCell* from, JSValue to)
inline void Heap::writeBarrier(const JSCell*, JSCell*)
{
WriteBarrierCounters::countWriteBarrier();
}
inline void Heap::writeBarrier(const JSCell*, JSValue)
{
#if ENABLE(WRITE_BARRIER_PROFILING)
WriteBarrierCounters::countWriteBarrier();
#endif
if (!to.isCell())
return;
writeBarrier(from, to.asCell());
}