Unreviewed, rolling out r161540.

http://trac.webkit.org/changeset/161540
https://bugs.webkit.org/show_bug.cgi?id=126704

Caused assertion failures on multiple tests (Requested by ap
on #webkit).

Source/JavaScriptCore:

* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::visitAggregate):
* bytecode/CodeBlock.h:
(JSC::CodeBlockSet::mark):
* dfg/DFGOperations.cpp:
* heap/CodeBlockSet.cpp:
(JSC::CodeBlockSet::add):
(JSC::CodeBlockSet::traceMarked):
* heap/CodeBlockSet.h:
* heap/CopiedBlockInlines.h:
(JSC::CopiedBlock::reportLiveBytes):
* heap/CopiedSpace.cpp:
* heap/CopiedSpace.h:
* heap/Heap.cpp:
(JSC::Heap::Heap):
(JSC::Heap::didAbandon):
(JSC::Heap::markRoots):
(JSC::Heap::copyBackingStores):
(JSC::Heap::collectAllGarbage):
(JSC::Heap::collect):
(JSC::Heap::didAllocate):
* heap/Heap.h:
(JSC::Heap::shouldCollect):
(JSC::Heap::isCollecting):
(JSC::Heap::isWriteBarrierEnabled):
(JSC::Heap::writeBarrier):
* heap/HeapOperation.h:
* heap/MarkStack.cpp:
(JSC::MarkStackArray::~MarkStackArray):
* heap/MarkStack.h:
* heap/MarkedAllocator.cpp:
(JSC::MarkedAllocator::isPagedOut):
(JSC::MarkedAllocator::tryAllocateHelper):
(JSC::MarkedAllocator::addBlock):
(JSC::MarkedAllocator::removeBlock):
* heap/MarkedAllocator.h:
(JSC::MarkedAllocator::MarkedAllocator):
(JSC::MarkedAllocator::reset):
* heap/MarkedBlock.cpp:
* heap/MarkedBlock.h:
(JSC::MarkedBlock::lastChanceToFinalize):
(JSC::MarkedBlock::didConsumeEmptyFreeList):
(JSC::MarkedBlock::clearMarks):
* heap/MarkedSpace.cpp:
(JSC::MarkedSpace::~MarkedSpace):
(JSC::MarkedSpace::resetAllocators):
(JSC::MarkedSpace::visitWeakSets):
(JSC::MarkedSpace::reapWeakSets):
* heap/MarkedSpace.h:
(JSC::ClearMarks::operator()):
(JSC::MarkedSpace::clearMarks):
* heap/SlotVisitor.cpp:
(JSC::SlotVisitor::~SlotVisitor):
* heap/SlotVisitor.h:
(JSC::SlotVisitor::sharedData):
* heap/SlotVisitorInlines.h:
(JSC::SlotVisitor::internalAppend):
(JSC::SlotVisitor::copyLater):
(JSC::SlotVisitor::reportExtraMemoryUsage):
* jit/Repatch.cpp:
* runtime/JSGenericTypedArrayViewInlines.h:
(JSC::JSGenericTypedArrayView<Adaptor>::visitChildren):
* runtime/JSPropertyNameIterator.h:
(JSC::StructureRareData::setEnumerationCache):
* runtime/JSString.cpp:
(JSC::JSString::visitChildren):
* runtime/StructureRareDataInlines.h:
(JSC::StructureRareData::setPreviousID):
(JSC::StructureRareData::setObjectToStringValue):
* runtime/WeakMapData.cpp:
(JSC::WeakMapData::visitChildren):

Source/WTF:

* wtf/Bitmap.h:
(WTF::WordType>::count):

git-svn-id: http://svn.webkit.org/repository/webkit/trunk@161557 268f45cc-cd09-0410-ab3c-d52691b4dbfc
parent cb8504d4
2014-01-09 Commit Queue <commit-queue@webkit.org>
Unreviewed, rolling out r161540.
http://trac.webkit.org/changeset/161540
https://bugs.webkit.org/show_bug.cgi?id=126704
Caused assertion failures on multiple tests (Requested by ap
on #webkit).
* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::visitAggregate):
* bytecode/CodeBlock.h:
(JSC::CodeBlockSet::mark):
* dfg/DFGOperations.cpp:
* heap/CodeBlockSet.cpp:
(JSC::CodeBlockSet::add):
(JSC::CodeBlockSet::traceMarked):
* heap/CodeBlockSet.h:
* heap/CopiedBlockInlines.h:
(JSC::CopiedBlock::reportLiveBytes):
* heap/CopiedSpace.cpp:
* heap/CopiedSpace.h:
* heap/Heap.cpp:
(JSC::Heap::Heap):
(JSC::Heap::didAbandon):
(JSC::Heap::markRoots):
(JSC::Heap::copyBackingStores):
(JSC::Heap::collectAllGarbage):
(JSC::Heap::collect):
(JSC::Heap::didAllocate):
* heap/Heap.h:
(JSC::Heap::shouldCollect):
(JSC::Heap::isCollecting):
(JSC::Heap::isWriteBarrierEnabled):
(JSC::Heap::writeBarrier):
* heap/HeapOperation.h:
* heap/MarkStack.cpp:
(JSC::MarkStackArray::~MarkStackArray):
* heap/MarkStack.h:
* heap/MarkedAllocator.cpp:
(JSC::MarkedAllocator::isPagedOut):
(JSC::MarkedAllocator::tryAllocateHelper):
(JSC::MarkedAllocator::addBlock):
(JSC::MarkedAllocator::removeBlock):
* heap/MarkedAllocator.h:
(JSC::MarkedAllocator::MarkedAllocator):
(JSC::MarkedAllocator::reset):
* heap/MarkedBlock.cpp:
* heap/MarkedBlock.h:
(JSC::MarkedBlock::lastChanceToFinalize):
(JSC::MarkedBlock::didConsumeEmptyFreeList):
(JSC::MarkedBlock::clearMarks):
* heap/MarkedSpace.cpp:
(JSC::MarkedSpace::~MarkedSpace):
(JSC::MarkedSpace::resetAllocators):
(JSC::MarkedSpace::visitWeakSets):
(JSC::MarkedSpace::reapWeakSets):
* heap/MarkedSpace.h:
(JSC::ClearMarks::operator()):
(JSC::MarkedSpace::clearMarks):
* heap/SlotVisitor.cpp:
(JSC::SlotVisitor::~SlotVisitor):
* heap/SlotVisitor.h:
(JSC::SlotVisitor::sharedData):
* heap/SlotVisitorInlines.h:
(JSC::SlotVisitor::internalAppend):
(JSC::SlotVisitor::copyLater):
(JSC::SlotVisitor::reportExtraMemoryUsage):
* jit/Repatch.cpp:
* runtime/JSGenericTypedArrayViewInlines.h:
(JSC::JSGenericTypedArrayView<Adaptor>::visitChildren):
* runtime/JSPropertyNameIterator.h:
(JSC::StructureRareData::setEnumerationCache):
* runtime/JSString.cpp:
(JSC::JSString::visitChildren):
* runtime/StructureRareDataInlines.h:
(JSC::StructureRareData::setPreviousID):
(JSC::StructureRareData::setObjectToStringValue):
* runtime/WeakMapData.cpp:
(JSC::WeakMapData::visitChildren):
2014-01-09 Andreas Kling <akling@apple.com> 2014-01-09 Andreas Kling <akling@apple.com>
Shrink WatchpointSet. Shrink WatchpointSet.
......
...@@ -1954,15 +1954,15 @@ void CodeBlock::visitAggregate(SlotVisitor& visitor) ...@@ -1954,15 +1954,15 @@ void CodeBlock::visitAggregate(SlotVisitor& visitor)
if (CodeBlock* otherBlock = specialOSREntryBlockOrNull()) if (CodeBlock* otherBlock = specialOSREntryBlockOrNull())
otherBlock->visitAggregate(visitor); otherBlock->visitAggregate(visitor);
visitor.reportExtraMemoryUsage(ownerExecutable(), sizeof(CodeBlock)); visitor.reportExtraMemoryUsage(sizeof(CodeBlock));
if (m_jitCode) if (m_jitCode)
visitor.reportExtraMemoryUsage(ownerExecutable(), m_jitCode->size()); visitor.reportExtraMemoryUsage(m_jitCode->size());
if (m_instructions.size()) { if (m_instructions.size()) {
// Divide by refCount() because m_instructions points to something that is shared // Divide by refCount() because m_instructions points to something that is shared
// by multiple CodeBlocks, and we only want to count it towards the heap size once. // by multiple CodeBlocks, and we only want to count it towards the heap size once.
// Having each CodeBlock report only its proportional share of the size is one way // Having each CodeBlock report only its proportional share of the size is one way
// of accomplishing this. // of accomplishing this.
visitor.reportExtraMemoryUsage(ownerExecutable(), m_instructions.size() * sizeof(Instruction) / m_instructions.refCount()); visitor.reportExtraMemoryUsage(m_instructions.size() * sizeof(Instruction) / m_instructions.refCount());
} }
visitor.append(&m_unlinkedCode); visitor.append(&m_unlinkedCode);
......
...@@ -1269,7 +1269,6 @@ inline void CodeBlockSet::mark(void* candidateCodeBlock) ...@@ -1269,7 +1269,6 @@ inline void CodeBlockSet::mark(void* candidateCodeBlock)
return; return;
(*iter)->m_mayBeExecuting = true; (*iter)->m_mayBeExecuting = true;
m_currentlyExecuting.append(static_cast<CodeBlock*>(candidateCodeBlock));
} }
} // namespace JSC } // namespace JSC
......
...@@ -850,7 +850,6 @@ char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitial ...@@ -850,7 +850,6 @@ char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitial
NativeCallFrameTracer tracer(&vm, exec); NativeCallFrameTracer tracer(&vm, exec);
ASSERT(!object->structure()->outOfLineCapacity()); ASSERT(!object->structure()->outOfLineCapacity());
DeferGC deferGC(vm.heap);
Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity); Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
object->setButterflyWithoutChangingStructure(vm, result); object->setButterflyWithoutChangingStructure(vm, result);
return reinterpret_cast<char*>(result); return reinterpret_cast<char*>(result);
...@@ -861,7 +860,6 @@ char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* ...@@ -861,7 +860,6 @@ char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState*
VM& vm = exec->vm(); VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec); NativeCallFrameTracer tracer(&vm, exec);
DeferGC deferGC(vm.heap);
Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize); Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize);
object->setButterflyWithoutChangingStructure(vm, result); object->setButterflyWithoutChangingStructure(vm, result);
return reinterpret_cast<char*>(result); return reinterpret_cast<char*>(result);
......
...@@ -45,8 +45,7 @@ CodeBlockSet::~CodeBlockSet() ...@@ -45,8 +45,7 @@ CodeBlockSet::~CodeBlockSet()
void CodeBlockSet::add(PassRefPtr<CodeBlock> codeBlock) void CodeBlockSet::add(PassRefPtr<CodeBlock> codeBlock)
{ {
CodeBlock* block = codeBlock.leakRef(); bool isNewEntry = m_set.add(codeBlock.leakRef()).isNewEntry;
bool isNewEntry = m_set.add(block).isNewEntry;
ASSERT_UNUSED(isNewEntry, isNewEntry); ASSERT_UNUSED(isNewEntry, isNewEntry);
} }
...@@ -102,16 +101,9 @@ void CodeBlockSet::traceMarked(SlotVisitor& visitor) ...@@ -102,16 +101,9 @@ void CodeBlockSet::traceMarked(SlotVisitor& visitor)
CodeBlock* codeBlock = *iter; CodeBlock* codeBlock = *iter;
if (!codeBlock->m_mayBeExecuting) if (!codeBlock->m_mayBeExecuting)
continue; continue;
codeBlock->ownerExecutable()->visitChildren(codeBlock->ownerExecutable(), visitor); codeBlock->visitAggregate(visitor);
} }
} }
void CodeBlockSet::rememberCurrentlyExecutingCodeBlocks(Heap* heap)
{
for (size_t i = 0; i < m_currentlyExecuting.size(); ++i)
heap->addToRememberedSet(m_currentlyExecuting[i]->ownerExecutable());
m_currentlyExecuting.clear();
}
} // namespace JSC } // namespace JSC
...@@ -30,12 +30,10 @@ ...@@ -30,12 +30,10 @@
#include <wtf/Noncopyable.h> #include <wtf/Noncopyable.h>
#include <wtf/PassRefPtr.h> #include <wtf/PassRefPtr.h>
#include <wtf/RefPtr.h> #include <wtf/RefPtr.h>
#include <wtf/Vector.h>
namespace JSC { namespace JSC {
class CodeBlock; class CodeBlock;
class Heap;
class SlotVisitor; class SlotVisitor;
// CodeBlockSet tracks all CodeBlocks. Every CodeBlock starts out with one // CodeBlockSet tracks all CodeBlocks. Every CodeBlock starts out with one
...@@ -67,16 +65,11 @@ public: ...@@ -67,16 +65,11 @@ public:
// mayBeExecuting. // mayBeExecuting.
void traceMarked(SlotVisitor&); void traceMarked(SlotVisitor&);
// Add all currently executing CodeBlocks to the remembered set to be
// re-scanned during the next collection.
void rememberCurrentlyExecutingCodeBlocks(Heap*);
private: private:
// This is not a set of RefPtr<CodeBlock> because we need to be able to find // This is not a set of RefPtr<CodeBlock> because we need to be able to find
// arbitrary bogus pointers. I could have written a thingy that had peek types // arbitrary bogus pointers. I could have written a thingy that had peek types
// and all, but that seemed like overkill. // and all, but that seemed like overkill.
HashSet<CodeBlock* > m_set; HashSet<CodeBlock* > m_set;
Vector<CodeBlock*> m_currentlyExecuting;
}; };
} // namespace JSC } // namespace JSC
......
...@@ -42,9 +42,6 @@ inline void CopiedBlock::reportLiveBytes(JSCell* owner, CopyToken token, unsigne ...@@ -42,9 +42,6 @@ inline void CopiedBlock::reportLiveBytes(JSCell* owner, CopyToken token, unsigne
#endif #endif
m_liveBytes += bytes; m_liveBytes += bytes;
if (isPinned())
return;
if (!shouldEvacuate()) { if (!shouldEvacuate()) {
pin(); pin();
return; return;
......
...@@ -316,17 +316,4 @@ bool CopiedSpace::isPagedOut(double deadline) ...@@ -316,17 +316,4 @@ bool CopiedSpace::isPagedOut(double deadline)
|| isBlockListPagedOut(deadline, &m_oversizeBlocks); || isBlockListPagedOut(deadline, &m_oversizeBlocks);
} }
void CopiedSpace::didStartFullCollection()
{
ASSERT(heap()->operationInProgress() == FullCollection);
ASSERT(m_fromSpace->isEmpty());
for (CopiedBlock* block = m_toSpace->head(); block; block = block->next())
block->didSurviveGC();
for (CopiedBlock* block = m_oversizeBlocks.head(); block; block = block->next())
block->didSurviveGC();
}
} // namespace JSC } // namespace JSC
...@@ -60,8 +60,6 @@ public: ...@@ -60,8 +60,6 @@ public:
CopiedAllocator& allocator() { return m_allocator; } CopiedAllocator& allocator() { return m_allocator; }
void didStartFullCollection();
void startedCopying(); void startedCopying();
void doneCopying(); void doneCopying();
bool isInCopyPhase() { return m_inCopyingPhase; } bool isInCopyPhase() { return m_inCopyingPhase; }
...@@ -82,8 +80,6 @@ public: ...@@ -82,8 +80,6 @@ public:
static CopiedBlock* blockFor(void*); static CopiedBlock* blockFor(void*);
Heap* heap() const { return m_heap; }
private: private:
static bool isOversize(size_t); static bool isOversize(size_t);
......
...@@ -253,11 +253,9 @@ Heap::Heap(VM* vm, HeapType heapType) ...@@ -253,11 +253,9 @@ Heap::Heap(VM* vm, HeapType heapType)
, m_ramSize(ramSize()) , m_ramSize(ramSize())
, m_minBytesPerCycle(minHeapSize(m_heapType, m_ramSize)) , m_minBytesPerCycle(minHeapSize(m_heapType, m_ramSize))
, m_sizeAfterLastCollect(0) , m_sizeAfterLastCollect(0)
, m_bytesAllocatedThisCycle(0) , m_bytesAllocatedLimit(m_minBytesPerCycle)
, m_bytesAbandonedThisCycle(0) , m_bytesAllocated(0)
, m_maxEdenSize(m_minBytesPerCycle) , m_bytesAbandoned(0)
, m_maxHeapSize(m_minBytesPerCycle)
, m_shouldDoFullCollection(false)
, m_totalBytesVisited(0) , m_totalBytesVisited(0)
, m_totalBytesCopied(0) , m_totalBytesCopied(0)
, m_operationInProgress(NoOperation) , m_operationInProgress(NoOperation)
...@@ -271,7 +269,7 @@ Heap::Heap(VM* vm, HeapType heapType) ...@@ -271,7 +269,7 @@ Heap::Heap(VM* vm, HeapType heapType)
, m_copyVisitor(m_sharedData) , m_copyVisitor(m_sharedData)
, m_handleSet(vm) , m_handleSet(vm)
, m_isSafeToCollect(false) , m_isSafeToCollect(false)
, m_writeBarrierBuffer(256) , m_writeBarrierBuffer(128)
, m_vm(vm) , m_vm(vm)
, m_lastGCLength(0) , m_lastGCLength(0)
, m_lastCodeDiscardTime(WTF::monotonicallyIncreasingTime()) , m_lastCodeDiscardTime(WTF::monotonicallyIncreasingTime())
...@@ -334,8 +332,8 @@ void Heap::reportAbandonedObjectGraph() ...@@ -334,8 +332,8 @@ void Heap::reportAbandonedObjectGraph()
void Heap::didAbandon(size_t bytes) void Heap::didAbandon(size_t bytes)
{ {
if (m_activityCallback) if (m_activityCallback)
m_activityCallback->didAllocate(m_bytesAllocatedThisCycle + m_bytesAbandonedThisCycle); m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned);
m_bytesAbandonedThisCycle += bytes; m_bytesAbandoned += bytes;
} }
void Heap::protect(JSValue k) void Heap::protect(JSValue k)
...@@ -489,9 +487,6 @@ void Heap::markRoots() ...@@ -489,9 +487,6 @@ void Heap::markRoots()
visitor.setup(); visitor.setup();
HeapRootVisitor heapRootVisitor(visitor); HeapRootVisitor heapRootVisitor(visitor);
Vector<const JSCell*> rememberedSet(m_slotVisitor.markStack().size());
m_slotVisitor.markStack().fillVector(rememberedSet);
{ {
ParallelModeEnabler enabler(visitor); ParallelModeEnabler enabler(visitor);
...@@ -595,14 +590,6 @@ void Heap::markRoots() ...@@ -595,14 +590,6 @@ void Heap::markRoots()
} }
} }
{
GCPHASE(ClearRememberedSet);
for (unsigned i = 0; i < rememberedSet.size(); ++i) {
const JSCell* cell = rememberedSet[i];
MarkedBlock::blockFor(cell)->clearRemembered(cell);
}
}
GCCOUNTER(VisitedValueCount, visitor.visitCount()); GCCOUNTER(VisitedValueCount, visitor.visitCount());
m_sharedData.didFinishMarking(); m_sharedData.didFinishMarking();
...@@ -614,14 +601,8 @@ void Heap::markRoots() ...@@ -614,14 +601,8 @@ void Heap::markRoots()
MARK_LOG_MESSAGE2("\nNumber of live Objects after full GC %lu, took %.6f secs\n", visitCount, WTF::monotonicallyIncreasingTime() - gcStartTime); MARK_LOG_MESSAGE2("\nNumber of live Objects after full GC %lu, took %.6f secs\n", visitCount, WTF::monotonicallyIncreasingTime() - gcStartTime);
#endif #endif
if (m_operationInProgress == EdenCollection) { m_totalBytesVisited = visitor.bytesVisited();
m_totalBytesVisited += visitor.bytesVisited(); m_totalBytesCopied = visitor.bytesCopied();
m_totalBytesCopied += visitor.bytesCopied();
} else {
ASSERT(m_operationInProgress == FullCollection);
m_totalBytesVisited = visitor.bytesVisited();
m_totalBytesCopied = visitor.bytesCopied();
}
#if ENABLE(PARALLEL_GC) #if ENABLE(PARALLEL_GC)
m_totalBytesVisited += m_sharedData.childBytesVisited(); m_totalBytesVisited += m_sharedData.childBytesVisited();
m_totalBytesCopied += m_sharedData.childBytesCopied(); m_totalBytesCopied += m_sharedData.childBytesCopied();
...@@ -634,12 +615,8 @@ void Heap::markRoots() ...@@ -634,12 +615,8 @@ void Heap::markRoots()
m_sharedData.reset(); m_sharedData.reset();
} }
template <HeapOperation collectionType>
void Heap::copyBackingStores() void Heap::copyBackingStores()
{ {
if (collectionType == EdenCollection)
return;
m_storageSpace.startedCopying(); m_storageSpace.startedCopying();
if (m_storageSpace.shouldDoCopyPhase()) { if (m_storageSpace.shouldDoCopyPhase()) {
m_sharedData.didStartCopying(); m_sharedData.didStartCopying();
...@@ -650,7 +627,7 @@ void Heap::copyBackingStores() ...@@ -650,7 +627,7 @@ void Heap::copyBackingStores()
// before signaling that the phase is complete. // before signaling that the phase is complete.
m_storageSpace.doneCopying(); m_storageSpace.doneCopying();
m_sharedData.didFinishCopying(); m_sharedData.didFinishCopying();
} else } else
m_storageSpace.doneCopying(); m_storageSpace.doneCopying();
} }
...@@ -746,23 +723,11 @@ void Heap::deleteUnmarkedCompiledCode() ...@@ -746,23 +723,11 @@ void Heap::deleteUnmarkedCompiledCode()
m_jitStubRoutines.deleteUnmarkedJettisonedStubRoutines(); m_jitStubRoutines.deleteUnmarkedJettisonedStubRoutines();
} }
void Heap::addToRememberedSet(const JSCell* cell)
{
ASSERT(cell);
ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());
ASSERT(isMarked(cell));
if (isInRememberedSet(cell))
return;
MarkedBlock::blockFor(cell)->setRemembered(cell);
m_slotVisitor.unconditionallyAppend(const_cast<JSCell*>(cell));
}
void Heap::collectAllGarbage() void Heap::collectAllGarbage()
{ {
if (!m_isSafeToCollect) if (!m_isSafeToCollect)
return; return;
m_shouldDoFullCollection = true;
collect(); collect();
SamplingRegion samplingRegion("Garbage Collection: Sweeping"); SamplingRegion samplingRegion("Garbage Collection: Sweeping");
...@@ -799,28 +764,9 @@ void Heap::collect() ...@@ -799,28 +764,9 @@ void Heap::collect()
RecursiveAllocationScope scope(*this); RecursiveAllocationScope scope(*this);
m_vm->prepareToDiscardCode(); m_vm->prepareToDiscardCode();
} }
bool isFullCollection = m_shouldDoFullCollection; m_operationInProgress = Collection;
if (isFullCollection) { m_extraMemoryUsage = 0;
m_operationInProgress = FullCollection;
m_slotVisitor.clearMarkStack();
m_shouldDoFullCollection = false;
if (Options::logGC())
dataLog("FullCollection, ");
} else {
#if ENABLE(GGC)
m_operationInProgress = EdenCollection;
if (Options::logGC())
dataLog("EdenCollection, ");
#else
m_operationInProgress = FullCollection;
m_slotVisitor.clearMarkStack();
if (Options::logGC())
dataLog("FullCollection, ");
#endif
}
if (m_operationInProgress == FullCollection)
m_extraMemoryUsage = 0;
if (m_activityCallback) if (m_activityCallback)
m_activityCallback->willCollect(); m_activityCallback->willCollect();
...@@ -834,16 +780,6 @@ void Heap::collect() ...@@ -834,16 +780,6 @@ void Heap::collect()
{ {
GCPHASE(StopAllocation); GCPHASE(StopAllocation);
m_objectSpace.stopAllocating(); m_objectSpace.stopAllocating();
if (m_operationInProgress == FullCollection)
m_storageSpace.didStartFullCollection();
}
{
GCPHASE(FlushWriteBarrierBuffer);
if (m_operationInProgress == EdenCollection)
m_writeBarrierBuffer.flush(*this);
else
m_writeBarrierBuffer.reset();
} }
markRoots(); markRoots();
...@@ -860,16 +796,13 @@ void Heap::collect() ...@@ -860,16 +796,13 @@ void Heap::collect()
m_arrayBuffers.sweep(); m_arrayBuffers.sweep();
} }
if (m_operationInProgress == FullCollection) { {
m_blockSnapshot.resize(m_objectSpace.blocks().set().size()); m_blockSnapshot.resize(m_objectSpace.blocks().set().size());
MarkedBlockSnapshotFunctor functor(m_blockSnapshot); MarkedBlockSnapshotFunctor functor(m_blockSnapshot);
m_objectSpace.forEachBlock(functor); m_objectSpace.forEachBlock(functor);
} }
if (m_operationInProgress == FullCollection) copyBackingStores();
copyBackingStores<FullCollection>();
else
copyBackingStores<EdenCollection>();
{ {
GCPHASE(FinalizeUnconditionalFinalizers); GCPHASE(FinalizeUnconditionalFinalizers);
...@@ -886,15 +819,8 @@ void Heap::collect() ...@@ -886,15 +819,8 @@ void Heap::collect()
m_vm->clearSourceProviderCaches(); m_vm->clearSourceProviderCaches();
} }
if (m_operationInProgress == FullCollection) m_sweeper->startSweeping(m_blockSnapshot);
m_sweeper->startSweeping(m_blockSnapshot); m_bytesAbandoned = 0;
{
GCPHASE(AddCurrentlyExecutingCodeBlocksToRememberedSet);
m_codeBlocks.rememberCurrentlyExecutingCodeBlocks(this);
}
m_bytesAbandonedThisCycle = 0;
{ {
GCPHASE(ResetAllocators); GCPHASE(ResetAllocators);
...@@ -905,32 +831,21 @@ void Heap::collect() ...@@ -905,32 +831,21 @@ void Heap::collect()
if (Options::gcMaxHeapSize() && currentHeapSize > Options::gcMaxHeapSize()) if (Options::gcMaxHeapSize() && currentHeapSize > Options::gcMaxHeapSize())
HeapStatistics::exitWithFailure(); HeapStatistics::exitWithFailure();
if (m_operationInProgress == FullCollection) {
// To avoid pathological GC churn in very small and very large heaps, we set
// the new allocation limit based on the current size of the heap, with a
// fixed minimum.
m_maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize));
m_maxEdenSize = m_maxHeapSize - currentHeapSize;
} else {
ASSERT(currentHeapSize >= m_sizeAfterLastCollect);
m_maxEdenSize = m_maxHeapSize - currentHeapSize;
double edenToOldGenerationRatio = (double)m_maxEdenSize / (double)m_maxHeapSize;
double minEdenToOldGenerationRatio = 1.0 / 3.0;
if (edenToOldGenerationRatio < minEdenToOldGenerationRatio)
m_shouldDoFullCollection = true;
m_maxHeapSize += currentHeapSize - m_sizeAfterLastCollect;
m_maxEdenSize = m_maxHeapSize - currentHeapSize;
}
m_sizeAfterLastCollect = currentHeapSize; m_sizeAfterLastCollect = currentHeapSize;
m_bytesAllocatedThisCycle = 0; // To avoid pathological GC churn in very small and very large heaps, we set
// the new allocation limit based on the current size of the heap, with a
// fixed minimum.
size_t maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize));
m_bytesAllocatedLimit = maxHeapSize - currentHeapSize;
m_bytesAllocated = 0;
double lastGCEndTime = WTF::monotonicallyIncreasingTime(); double lastGCEndTime = WTF::monotonicallyIncreasingTime();
m_lastGCLength = lastGCEndTime - lastGCStartTime; m_lastGCLength = lastGCEndTime - lastGCStartTime;
if (Options::recordGCPauseTimes()) if (Options::recordGCPauseTimes())
HeapStatistics::recordGCPauseTime(lastGCStartTime, lastGCEndTime); HeapStatistics::recordGCPauseTime(lastGCStartTime, lastGCEndTime);
RELEASE_ASSERT(m_operationInProgress == EdenCollection || m_operationInProgress == FullCollection); RELEASE_ASSERT(m_operationInProgress == Collection);
m_operationInProgress = NoOperation; m_operationInProgress = NoOperation;
JAVASCRIPTCORE_GC_END(); JAVASCRIPTCORE_GC_END();
...@@ -948,6 +863,10 @@ void Heap::collect() ...@@ -948,6 +863,10 @@ void Heap::collect()
double after = currentTimeMS(); double after = currentTimeMS();
dataLog(after - before, " ms, ", currentHeapSize / 1024, " kb]\n"); dataLog(after - before, " ms, ", currentHeapSize / 1024, " kb]\n");
} }
#if ENABLE(ALLOCATION_LOGGING)
dataLogF("JSC GC finishing collection.\n");
#endif
} }
bool Heap::collectIfNecessaryOrDefer() bool Heap::collectIfNecessaryOrDefer()
...@@ -997,8 +916,8 @@ void Heap::setGarbageCollectionTimerEnabled(bool enable) ...@@ -997,8 +916,8 @@ void Heap::setGarbageCollectionTimerEnabled(bool enable)
void Heap::didAllocate(size_t bytes) void Heap::didAllocate(size_t bytes)
{ {
if (m_activityCallback) if (m_activityCallback)
m_activityCallback->didAllocate(m_bytesAllocatedThisCycle + m_bytesAbandonedThisCycle); m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned);
m_bytesAllocatedThisCycle += bytes; m_bytesAllocated += bytes;
} }
bool Heap::isValidAllocation(size_t) bool Heap::isValidAllocation(size_t)
...@@ -1075,15 +994,6 @@ void Heap::decrementDeferralDepthAndGCIfNeeded() ...@@ -1075,15 +994,6 @@ void Heap::decrementDeferralDepthAndGCIfNeeded()
collectIfNecessaryOrDefer(); collectIfNecessaryOrDefer();
} }
void Heap::writeBarrier(const JSCell* from)
{
ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));
if (!from || !isMarked(from))