Changeset 161615 in webkit for trunk/Source/JavaScriptCore
- Timestamp:
- Jan 9, 2014, 6:28:27 PM (11 years ago)
- Location:
- trunk/Source/JavaScriptCore
- Files:
-
- 29 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ChangeLog
r161585 r161615 1 2014-01-07 Mark Hahnenberg <[email protected]> 2 3 Marking should be generational 4 https://bugs.webkit.org/show_bug.cgi?id=126552 5 6 Reviewed by Geoffrey Garen. 7 8 Re-marking the same objects over and over is a waste of effort. This patch implements 9 the sticky mark bit algorithm (along with our already-present write barriers) to reduce 10 overhead during garbage collection caused by rescanning objects. 11 12 There are now two collection modes, EdenCollection and FullCollection. EdenCollections 13 only visit new objects or objects that were added to the remembered set by a write barrier. 14 FullCollections are normal collections that visit all objects regardless of their 15 generation. 16 17 In this patch EdenCollections do not do anything in CopiedSpace. This will be fixed in 18 https://bugs.webkit.org/show_bug.cgi?id=126555. 19 20 * bytecode/CodeBlock.cpp: 21 (JSC::CodeBlock::visitAggregate): 22 * bytecode/CodeBlock.h: 23 (JSC::CodeBlockSet::mark): 24 * dfg/DFGOperations.cpp: 25 * heap/CodeBlockSet.cpp: 26 (JSC::CodeBlockSet::add): 27 (JSC::CodeBlockSet::traceMarked): 28 (JSC::CodeBlockSet::rememberCurrentlyExecutingCodeBlocks): 29 * heap/CodeBlockSet.h: 30 * heap/CopiedBlockInlines.h: 31 (JSC::CopiedBlock::reportLiveBytes): 32 * heap/CopiedSpace.cpp: 33 (JSC::CopiedSpace::didStartFullCollection): 34 * heap/CopiedSpace.h: 35 (JSC::CopiedSpace::heap): 36 * heap/Heap.cpp: 37 (JSC::Heap::Heap): 38 (JSC::Heap::didAbandon): 39 (JSC::Heap::markRoots): 40 (JSC::Heap::copyBackingStores): 41 (JSC::Heap::addToRememberedSet): 42 (JSC::Heap::collectAllGarbage): 43 (JSC::Heap::collect): 44 (JSC::Heap::didAllocate): 45 (JSC::Heap::writeBarrier): 46 * heap/Heap.h: 47 (JSC::Heap::isInRememberedSet): 48 (JSC::Heap::operationInProgress): 49 (JSC::Heap::shouldCollect): 50 (JSC::Heap::isCollecting): 51 (JSC::Heap::isWriteBarrierEnabled): 52 (JSC::Heap::writeBarrier): 53 * heap/HeapOperation.h: 54 * heap/MarkStack.cpp: 55 (JSC::MarkStackArray::~MarkStackArray): 56 (JSC::MarkStackArray::clear): 57 (JSC::MarkStackArray::fillVector): 58 * heap/MarkStack.h: 59 * heap/MarkedAllocator.cpp: 60 (JSC::isListPagedOut): 61 (JSC::MarkedAllocator::isPagedOut): 62 (JSC::MarkedAllocator::tryAllocateHelper): 63 (JSC::MarkedAllocator::addBlock): 64 (JSC::MarkedAllocator::removeBlock): 65 (JSC::MarkedAllocator::reset): 66 * heap/MarkedAllocator.h: 67 (JSC::MarkedAllocator::MarkedAllocator): 68 * heap/MarkedBlock.cpp: 69 (JSC::MarkedBlock::clearMarks): 70 (JSC::MarkedBlock::clearRememberedSet): 71 (JSC::MarkedBlock::clearMarksWithCollectionType): 72 (JSC::MarkedBlock::lastChanceToFinalize): 73 * heap/MarkedBlock.h: Changed atomSize to 16 bytes because we have no objects smaller 74 than 16 bytes. This is also to pay for the additional Bitmap for the remembered set. 75 (JSC::MarkedBlock::didConsumeEmptyFreeList): 76 (JSC::MarkedBlock::setRemembered): 77 (JSC::MarkedBlock::clearRemembered): 78 (JSC::MarkedBlock::atomicClearRemembered): 79 (JSC::MarkedBlock::isRemembered): 80 * heap/MarkedSpace.cpp: 81 (JSC::MarkedSpace::~MarkedSpace): 82 (JSC::MarkedSpace::resetAllocators): 83 (JSC::MarkedSpace::visitWeakSets): 84 (JSC::MarkedSpace::reapWeakSets): 85 (JSC::VerifyMarked::operator()): 86 (JSC::MarkedSpace::clearMarks): 87 * heap/MarkedSpace.h: 88 (JSC::ClearMarks::operator()): 89 (JSC::ClearRememberedSet::operator()): 90 (JSC::MarkedSpace::didAllocateInBlock): 91 (JSC::MarkedSpace::clearRememberedSet): 92 * heap/SlotVisitor.cpp: 93 (JSC::SlotVisitor::~SlotVisitor): 94 (JSC::SlotVisitor::clearMarkStack): 95 * heap/SlotVisitor.h: 96 (JSC::SlotVisitor::markStack): 97 (JSC::SlotVisitor::sharedData): 98 * heap/SlotVisitorInlines.h: 99 (JSC::SlotVisitor::internalAppend): 100 (JSC::SlotVisitor::unconditionallyAppend): 101 (JSC::SlotVisitor::copyLater): 102 (JSC::SlotVisitor::reportExtraMemoryUsage): 103 (JSC::SlotVisitor::heap): 104 * jit/Repatch.cpp: 105 * runtime/JSGenericTypedArrayViewInlines.h: 106 (JSC::JSGenericTypedArrayView<Adaptor>::visitChildren): 107 * runtime/JSPropertyNameIterator.h: 108 (JSC::StructureRareData::setEnumerationCache): 109 * runtime/JSString.cpp: 110 (JSC::JSString::visitChildren): 111 * runtime/StructureRareDataInlines.h: 112 (JSC::StructureRareData::setPreviousID): 113 (JSC::StructureRareData::setObjectToStringValue): 114 * runtime/WeakMapData.cpp: 115 (JSC::WeakMapData::visitChildren): 116 1 117 2014-01-09 Joseph Pecoraro <[email protected]> 2 118 -
trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp
r161557 r161615 1955 1955 otherBlock->visitAggregate(visitor); 1956 1956 1957 visitor.reportExtraMemoryUsage( sizeof(CodeBlock));1957 visitor.reportExtraMemoryUsage(ownerExecutable(), sizeof(CodeBlock)); 1958 1958 if (m_jitCode) 1959 visitor.reportExtraMemoryUsage( m_jitCode->size());1959 visitor.reportExtraMemoryUsage(ownerExecutable(), m_jitCode->size()); 1960 1960 if (m_instructions.size()) { 1961 1961 // Divide by refCount() because m_instructions points to something that is shared … … 1963 1963 // Having each CodeBlock report only its proportional share of the size is one way 1964 1964 // of accomplishing this. 1965 visitor.reportExtraMemoryUsage( m_instructions.size() * sizeof(Instruction) / m_instructions.refCount());1965 visitor.reportExtraMemoryUsage(ownerExecutable(), m_instructions.size() * sizeof(Instruction) / m_instructions.refCount()); 1966 1966 } 1967 1967 -
trunk/Source/JavaScriptCore/bytecode/CodeBlock.h
r161557 r161615 1270 1270 1271 1271 (*iter)->m_mayBeExecuting = true; 1272 #if ENABLE(GGC) 1273 m_currentlyExecuting.append(static_cast<CodeBlock*>(candidateCodeBlock)); 1274 #endif 1272 1275 } 1273 1276 -
trunk/Source/JavaScriptCore/dfg/DFGOperations.cpp
r161557 r161615 851 851 852 852 ASSERT(!object->structure()->outOfLineCapacity()); 853 DeferGC deferGC(vm.heap); 853 854 Butterfly* result = object->growOutOfLineStorage(vm, 0, initialOutOfLineCapacity); 854 855 object->setButterflyWithoutChangingStructure(vm, result); … … 861 862 NativeCallFrameTracer tracer(&vm, exec); 862 863 864 DeferGC deferGC(vm.heap); 863 865 Butterfly* result = object->growOutOfLineStorage(vm, object->structure()->outOfLineCapacity(), newSize); 864 866 object->setButterflyWithoutChangingStructure(vm, result); -
trunk/Source/JavaScriptCore/heap/CodeBlockSet.cpp
r161557 r161615 46 46 void CodeBlockSet::add(PassRefPtr<CodeBlock> codeBlock) 47 47 { 48 bool isNewEntry = m_set.add(codeBlock.leakRef()).isNewEntry; 48 CodeBlock* block = codeBlock.leakRef(); 49 bool isNewEntry = m_set.add(block).isNewEntry; 49 50 ASSERT_UNUSED(isNewEntry, isNewEntry); 50 51 } … … 102 103 if (!codeBlock->m_mayBeExecuting) 103 104 continue; 104 codeBlock-> visitAggregate(visitor);105 codeBlock->ownerExecutable()->methodTable()->visitChildren(codeBlock->ownerExecutable(), visitor); 105 106 } 107 } 108 109 void CodeBlockSet::rememberCurrentlyExecutingCodeBlocks(Heap* heap) 110 { 111 #if ENABLE(GGC) 112 for (size_t i = 0; i < m_currentlyExecuting.size(); ++i) 113 heap->addToRememberedSet(m_currentlyExecuting[i]->ownerExecutable()); 114 m_currentlyExecuting.clear(); 115 #else 116 UNUSED_PARAM(heap); 117 #endif // ENABLE(GGC) 106 118 } 107 119 -
trunk/Source/JavaScriptCore/heap/CodeBlockSet.h
r161557 r161615 31 31 #include <wtf/PassRefPtr.h> 32 32 #include <wtf/RefPtr.h> 33 #include <wtf/Vector.h> 33 34 34 35 namespace JSC { 35 36 36 37 class CodeBlock; 38 class Heap; 37 39 class SlotVisitor; 38 40 … … 66 68 void traceMarked(SlotVisitor&); 67 69 70 // Add all currently executing CodeBlocks to the remembered set to be 71 // re-scanned during the next collection. 72 void rememberCurrentlyExecutingCodeBlocks(Heap*); 73 68 74 private: 69 75 // This is not a set of RefPtr<CodeBlock> because we need to be able to find … … 71 77 // and all, but that seemed like overkill. 72 78 HashSet<CodeBlock* > m_set; 79 Vector<CodeBlock*> m_currentlyExecuting; 73 80 }; 74 81 -
trunk/Source/JavaScriptCore/heap/CopiedBlockInlines.h
r161557 r161615 43 43 m_liveBytes += bytes; 44 44 45 if (isPinned()) 46 return; 47 45 48 if (!shouldEvacuate()) { 46 49 pin(); -
trunk/Source/JavaScriptCore/heap/CopiedSpace.cpp
r161557 r161615 317 317 } 318 318 319 void CopiedSpace::didStartFullCollection() 320 { 321 ASSERT(heap()->operationInProgress() == FullCollection); 322 323 ASSERT(m_fromSpace->isEmpty()); 324 325 for (CopiedBlock* block = m_toSpace->head(); block; block = block->next()) 326 block->didSurviveGC(); 327 328 for (CopiedBlock* block = m_oversizeBlocks.head(); block; block = block->next()) 329 block->didSurviveGC(); 330 } 331 319 332 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/CopiedSpace.h
r161557 r161615 61 61 CopiedAllocator& allocator() { return m_allocator; } 62 62 63 void didStartFullCollection(); 64 63 65 void startedCopying(); 64 66 void doneCopying(); … … 80 82 81 83 static CopiedBlock* blockFor(void*); 84 85 Heap* heap() const { return m_heap; } 82 86 83 87 private: -
trunk/Source/JavaScriptCore/heap/Heap.cpp
r161557 r161615 254 254 , m_minBytesPerCycle(minHeapSize(m_heapType, m_ramSize)) 255 255 , m_sizeAfterLastCollect(0) 256 , m_bytesAllocatedLimit(m_minBytesPerCycle) 257 , m_bytesAllocated(0) 258 , m_bytesAbandoned(0) 256 , m_bytesAllocatedThisCycle(0) 257 , m_bytesAbandonedThisCycle(0) 258 , m_maxEdenSize(m_minBytesPerCycle) 259 , m_maxHeapSize(m_minBytesPerCycle) 260 , m_shouldDoFullCollection(false) 259 261 , m_totalBytesVisited(0) 260 262 , m_totalBytesCopied(0) … … 270 272 , m_handleSet(vm) 271 273 , m_isSafeToCollect(false) 272 , m_writeBarrierBuffer( 128)274 , m_writeBarrierBuffer(256) 273 275 , m_vm(vm) 274 276 , m_lastGCLength(0) … … 333 335 { 334 336 if (m_activityCallback) 335 m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned);336 m_bytesAbandoned += bytes;337 m_activityCallback->didAllocate(m_bytesAllocatedThisCycle + m_bytesAbandonedThisCycle); 338 m_bytesAbandonedThisCycle += bytes; 337 339 } 338 340 … … 487 489 visitor.setup(); 488 490 HeapRootVisitor heapRootVisitor(visitor); 491 492 Vector<const JSCell*> rememberedSet(m_slotVisitor.markStack().size()); 493 m_slotVisitor.markStack().fillVector(rememberedSet); 489 494 490 495 { … … 591 596 } 592 597 598 { 599 GCPHASE(ClearRememberedSet); 600 for (unsigned i = 0; i < rememberedSet.size(); ++i) { 601 const JSCell* cell = rememberedSet[i]; 602 MarkedBlock::blockFor(cell)->clearRemembered(cell); 603 } 604 } 605 593 606 GCCOUNTER(VisitedValueCount, visitor.visitCount()); 594 607 … … 602 615 #endif 603 616 604 m_totalBytesVisited = visitor.bytesVisited(); 605 m_totalBytesCopied = visitor.bytesCopied(); 617 if (m_operationInProgress == EdenCollection) { 618 m_totalBytesVisited += visitor.bytesVisited(); 619 m_totalBytesCopied += visitor.bytesCopied(); 620 } else { 621 ASSERT(m_operationInProgress == FullCollection); 622 m_totalBytesVisited = visitor.bytesVisited(); 623 m_totalBytesCopied = visitor.bytesCopied(); 624 } 606 625 #if ENABLE(PARALLEL_GC) 607 626 m_totalBytesVisited += m_sharedData.childBytesVisited(); … … 616 635 } 617 636 637 template <HeapOperation collectionType> 618 638 void Heap::copyBackingStores() 619 639 { 640 if (collectionType == EdenCollection) 641 return; 642 620 643 m_storageSpace.startedCopying(); 621 644 if (m_storageSpace.shouldDoCopyPhase()) { … … 628 651 m_storageSpace.doneCopying(); 629 652 m_sharedData.didFinishCopying(); 630 } else 653 } else 631 654 m_storageSpace.doneCopying(); 632 655 } … … 724 747 } 725 748 749 void Heap::addToRememberedSet(const JSCell* cell) 750 { 751 ASSERT(cell); 752 ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread()); 753 if (isInRememberedSet(cell)) 754 return; 755 MarkedBlock::blockFor(cell)->setRemembered(cell); 756 m_slotVisitor.unconditionallyAppend(const_cast<JSCell*>(cell)); 757 } 758 726 759 void Heap::collectAllGarbage() 727 760 { … … 729 762 return; 730 763 764 m_shouldDoFullCollection = true; 731 765 collect(); 732 766 … … 765 799 m_vm->prepareToDiscardCode(); 766 800 } 767 768 m_operationInProgress = Collection; 769 m_extraMemoryUsage = 0; 801 802 bool isFullCollection = m_shouldDoFullCollection; 803 if (isFullCollection) { 804 m_operationInProgress = FullCollection; 805 m_slotVisitor.clearMarkStack(); 806 m_shouldDoFullCollection = false; 807 if (Options::logGC()) 808 dataLog("FullCollection, "); 809 } else { 810 #if ENABLE(GGC) 811 m_operationInProgress = EdenCollection; 812 if (Options::logGC()) 813 dataLog("EdenCollection, "); 814 #else 815 m_operationInProgress = FullCollection; 816 m_slotVisitor.clearMarkStack(); 817 if (Options::logGC()) 818 dataLog("FullCollection, "); 819 #endif 820 } 821 if (m_operationInProgress == FullCollection) 822 m_extraMemoryUsage = 0; 770 823 771 824 if (m_activityCallback) … … 781 834 GCPHASE(StopAllocation); 782 835 m_objectSpace.stopAllocating(); 836 if (m_operationInProgress == FullCollection) 837 m_storageSpace.didStartFullCollection(); 838 } 839 840 { 841 GCPHASE(FlushWriteBarrierBuffer); 842 if (m_operationInProgress == EdenCollection) 843 m_writeBarrierBuffer.flush(*this); 844 else 845 m_writeBarrierBuffer.reset(); 783 846 } 784 847 … … 797 860 } 798 861 799 {862 if (m_operationInProgress == FullCollection) { 800 863 m_blockSnapshot.resize(m_objectSpace.blocks().set().size()); 801 864 MarkedBlockSnapshotFunctor functor(m_blockSnapshot); … … 803 866 } 804 867 805 copyBackingStores(); 868 if (m_operationInProgress == FullCollection) 869 copyBackingStores<FullCollection>(); 870 else 871 copyBackingStores<EdenCollection>(); 806 872 807 873 { … … 820 886 } 821 887 822 m_sweeper->startSweeping(m_blockSnapshot); 823 m_bytesAbandoned = 0; 888 if (m_operationInProgress == FullCollection) 889 m_sweeper->startSweeping(m_blockSnapshot); 890 891 { 892 GCPHASE(AddCurrentlyExecutingCodeBlocksToRememberedSet); 893 m_codeBlocks.rememberCurrentlyExecutingCodeBlocks(this); 894 } 895 896 m_bytesAbandonedThisCycle = 0; 824 897 825 898 { … … 832 905 HeapStatistics::exitWithFailure(); 833 906 907 if (m_operationInProgress == FullCollection) { 908 // To avoid pathological GC churn in very small and very large heaps, we set 909 // the new allocation limit based on the current size of the heap, with a 910 // fixed minimum. 911 m_maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize)); 912 m_maxEdenSize = m_maxHeapSize - currentHeapSize; 913 } else { 914 ASSERT(currentHeapSize >= m_sizeAfterLastCollect); 915 m_maxEdenSize = m_maxHeapSize - currentHeapSize; 916 double edenToOldGenerationRatio = (double)m_maxEdenSize / (double)m_maxHeapSize; 917 double minEdenToOldGenerationRatio = 1.0 / 3.0; 918 if (edenToOldGenerationRatio < minEdenToOldGenerationRatio) 919 m_shouldDoFullCollection = true; 920 m_maxHeapSize += currentHeapSize - m_sizeAfterLastCollect; 921 m_maxEdenSize = m_maxHeapSize - currentHeapSize; 922 } 923 834 924 m_sizeAfterLastCollect = currentHeapSize; 835 925 836 // To avoid pathological GC churn in very small and very large heaps, we set 837 // the new allocation limit based on the current size of the heap, with a 838 // fixed minimum. 839 size_t maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize)); 840 m_bytesAllocatedLimit = maxHeapSize - currentHeapSize; 841 842 m_bytesAllocated = 0; 926 m_bytesAllocatedThisCycle = 0; 843 927 double lastGCEndTime = WTF::monotonicallyIncreasingTime(); 844 928 m_lastGCLength = lastGCEndTime - lastGCStartTime; … … 846 930 if (Options::recordGCPauseTimes()) 847 931 HeapStatistics::recordGCPauseTime(lastGCStartTime, lastGCEndTime); 848 RELEASE_ASSERT(m_operationInProgress == Collection);932 RELEASE_ASSERT(m_operationInProgress == EdenCollection || m_operationInProgress == FullCollection); 849 933 850 934 m_operationInProgress = NoOperation; … … 864 948 dataLog(after - before, " ms, ", currentHeapSize / 1024, " kb]\n"); 865 949 } 866 867 #if ENABLE(ALLOCATION_LOGGING)868 dataLogF("JSC GC finishing collection.\n");869 #endif870 950 } 871 951 … … 917 997 { 918 998 if (m_activityCallback) 919 m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned);920 m_bytesAllocated += bytes;999 m_activityCallback->didAllocate(m_bytesAllocatedThisCycle + m_bytesAbandonedThisCycle); 1000 m_bytesAllocatedThisCycle += bytes; 921 1001 } 922 1002 … … 993 1073 decrementDeferralDepth(); 994 1074 collectIfNecessaryOrDefer(); 1075 } 1076 1077 void Heap::writeBarrier(const JSCell* from) 1078 { 1079 ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from)); 1080 if (!from || !isMarked(from)) 1081 return; 1082 Heap* heap = Heap::heap(from); 1083 heap->addToRememberedSet(from); 995 1084 } 996 1085 -
trunk/Source/JavaScriptCore/heap/Heap.h
r161557 r161615 95 95 static void setMarked(const void*); 96 96 97 JS_EXPORT_PRIVATE void addToRememberedSet(const JSCell*); 98 bool isInRememberedSet(const JSCell* cell) const 99 { 100 ASSERT(cell); 101 ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread()); 102 return MarkedBlock::blockFor(cell)->isRemembered(cell); 103 } 97 104 static bool isWriteBarrierEnabled(); 98 static void writeBarrier(const JSCell*);105 JS_EXPORT_PRIVATE static void writeBarrier(const JSCell*); 99 106 static void writeBarrier(const JSCell*, JSValue); 100 107 static void writeBarrier(const JSCell*, JSCell*); 101 static uint8_t* addressOfCardFor(JSCell*);102 108 103 109 WriteBarrierBuffer& writeBarrierBuffer() { return m_writeBarrierBuffer; } … … 121 127 // true if collection is in progress 122 128 inline bool isCollecting(); 129 inline HeapOperation operationInProgress() { return m_operationInProgress; } 123 130 // true if an allocation or collection is in progress 124 131 inline bool isBusy(); … … 237 244 void markProtectedObjects(HeapRootVisitor&); 238 245 void markTempSortVectors(HeapRootVisitor&); 246 template <HeapOperation collectionType> 239 247 void copyBackingStores(); 240 248 void harvestWeakReferences(); … … 258 266 size_t m_sizeAfterLastCollect; 259 267 260 size_t m_bytesAllocatedLimit; 261 size_t m_bytesAllocated; 262 size_t m_bytesAbandoned; 263 268 size_t m_bytesAllocatedThisCycle; 269 size_t m_bytesAbandonedThisCycle; 270 size_t m_maxEdenSize; 271 size_t m_maxHeapSize; 272 bool m_shouldDoFullCollection; 264 273 size_t m_totalBytesVisited; 265 274 size_t m_totalBytesCopied; … … 271 280 GCIncomingRefCountedSet<ArrayBuffer> m_arrayBuffers; 272 281 size_t m_extraMemoryUsage; 282 283 HashSet<const JSCell*> m_copyingRememberedSet; 273 284 274 285 ProtectCountSet m_protectedValues; … … 323 334 return false; 324 335 if (Options::gcMaxHeapSize()) 325 return m_bytesAllocated > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation;326 return m_bytesAllocated > m_bytesAllocatedLimit&& m_isSafeToCollect && m_operationInProgress == NoOperation;336 return m_bytesAllocatedThisCycle > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation; 337 return m_bytesAllocatedThisCycle > m_maxEdenSize && m_isSafeToCollect && m_operationInProgress == NoOperation; 327 338 } 328 339 … … 334 345 bool Heap::isCollecting() 335 346 { 336 return m_operationInProgress == Collection;347 return m_operationInProgress == FullCollection || m_operationInProgress == EdenCollection; 337 348 } 338 349 … … 371 382 inline bool Heap::isWriteBarrierEnabled() 372 383 { 373 #if ENABLE(WRITE_BARRIER_PROFILING) 384 #if ENABLE(WRITE_BARRIER_PROFILING) || ENABLE(GGC) 374 385 return true; 375 386 #else … … 378 389 } 379 390 380 inline void Heap::writeBarrier(const JSCell*) 381 { 391 inline void Heap::writeBarrier(const JSCell* from, JSCell* to) 392 { 393 #if ENABLE(WRITE_BARRIER_PROFILING) 382 394 WriteBarrierCounters::countWriteBarrier(); 383 } 384 385 inline void Heap::writeBarrier(const JSCell*, JSCell*) 386 { 395 #endif 396 if (!from || !isMarked(from)) 397 return; 398 if (!to || isMarked(to)) 399 return; 400 Heap::heap(from)->addToRememberedSet(from); 401 } 402 403 inline void Heap::writeBarrier(const JSCell* from, JSValue to) 404 { 405 #if ENABLE(WRITE_BARRIER_PROFILING) 387 406 WriteBarrierCounters::countWriteBarrier(); 388 } 389 390 inline void Heap::writeBarrier(const JSCell*, JSValue) 391 { 392 WriteBarrierCounters::countWriteBarrier(); 407 #endif 408 if (!to.isCell()) 409 return; 410 writeBarrier(from, to.asCell()); 393 411 } 394 412 -
trunk/Source/JavaScriptCore/heap/HeapOperation.h
r161557 r161615 29 29 namespace JSC { 30 30 31 enum HeapOperation { NoOperation, Allocation, Collection };31 enum HeapOperation { NoOperation, Allocation, FullCollection, EdenCollection }; 32 32 33 33 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/MarkStack.cpp
r161557 r161615 58 58 MarkStackArray::~MarkStackArray() 59 59 { 60 ASSERT(m_numberOfSegments == 1 && m_segments.size() == 1); 60 ASSERT(m_numberOfSegments == 1); 61 ASSERT(m_segments.size() == 1); 61 62 m_blockAllocator.deallocate(MarkStackSegment::destroy(m_segments.removeHead())); 63 m_numberOfSegments--; 64 ASSERT(!m_numberOfSegments); 65 ASSERT(!m_segments.size()); 66 } 67 68 void MarkStackArray::clear() 69 { 70 if (!m_segments.head()) 71 return; 72 MarkStackSegment* next; 73 for (MarkStackSegment* current = m_segments.head(); current->next(); current = next) { 74 next = current->next(); 75 m_segments.remove(current); 76 m_blockAllocator.deallocate(MarkStackSegment::destroy(current)); 77 } 78 m_top = 0; 79 m_numberOfSegments = 1; 80 #if !ASSERT_DISABLED 81 m_segments.head()->m_top = 0; 82 #endif 62 83 } 63 84 … … 168 189 } 169 190 191 void MarkStackArray::fillVector(Vector<const JSCell*>& vector) 192 { 193 ASSERT(vector.size() == size()); 194 195 MarkStackSegment* currentSegment = m_segments.head(); 196 if (!currentSegment) 197 return; 198 199 unsigned count = 0; 200 for (unsigned i = 0; i < m_top; ++i) { 201 ASSERT(currentSegment->data()[i]); 202 vector[count++] = currentSegment->data()[i]; 203 } 204 205 currentSegment = currentSegment->next(); 206 while (currentSegment) { 207 for (unsigned i = 0; i < s_segmentCapacity; ++i) { 208 ASSERT(currentSegment->data()[i]); 209 vector[count++] = currentSegment->data()[i]; 210 } 211 currentSegment = currentSegment->next(); 212 } 213 } 214 170 215 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/MarkStack.h
r161557 r161615 53 53 #include "HeapBlock.h" 54 54 #include <wtf/StdLibExtras.h> 55 #include <wtf/Vector.h> 55 56 56 57 namespace JSC { … … 101 102 bool isEmpty(); 102 103 104 void fillVector(Vector<const JSCell*>&); 105 void clear(); 106 103 107 private: 104 108 template <size_t size> struct CapacityFromSize { -
trunk/Source/JavaScriptCore/heap/MarkedAllocator.cpp
r161557 r161615 11 11 namespace JSC { 12 12 13 bool MarkedAllocator::isPagedOut(double deadline)13 static bool isListPagedOut(double deadline, DoublyLinkedList<MarkedBlock>& list) 14 14 { 15 15 unsigned itersSinceLastTimeCheck = 0; 16 MarkedBlock* block = m_blockList.head();16 MarkedBlock* block = list.head(); 17 17 while (block) { 18 18 block = block->next(); … … 25 25 } 26 26 } 27 return false; 28 } 27 29 30 bool MarkedAllocator::isPagedOut(double deadline) 31 { 32 if (isListPagedOut(deadline, m_blockList)) 33 return true; 28 34 return false; 29 35 } … … 37 43 DelayedReleaseScope delayedReleaseScope(*m_markedSpace); 38 44 if (m_currentBlock) { 39 ASSERT(m_currentBlock == m_ blocksToSweep);45 ASSERT(m_currentBlock == m_nextBlockToSweep); 40 46 m_currentBlock->didConsumeFreeList(); 41 m_ blocksToSweep = m_currentBlock->next();47 m_nextBlockToSweep = m_currentBlock->next(); 42 48 } 43 49 44 for (MarkedBlock*& block = m_blocksToSweep; block; block = block->next()) { 50 MarkedBlock* next; 51 for (MarkedBlock*& block = m_nextBlockToSweep; block; block = next) { 52 next = block->next(); 53 45 54 MarkedBlock::FreeList freeList = block->sweep(MarkedBlock::SweepToFreeList); 55 46 56 if (!freeList.head) { 47 57 block->didConsumeEmptyFreeList(); 58 m_blockList.remove(block); 59 m_blockList.push(block); 60 if (!m_lastFullBlock) 61 m_lastFullBlock = block; 48 62 continue; 49 63 } … … 69 83 m_freeList.head = head->next; 70 84 ASSERT(head); 85 m_markedSpace->didAllocateInBlock(m_currentBlock); 71 86 return head; 72 87 } … … 137 152 138 153 m_blockList.append(block); 139 m_ blocksToSweep = m_currentBlock = block;154 m_nextBlockToSweep = m_currentBlock = block; 140 155 m_freeList = block->sweep(MarkedBlock::SweepToFreeList); 141 156 m_markedSpace->didAddBlock(block); … … 148 163 m_freeList = MarkedBlock::FreeList(); 149 164 } 150 if (m_blocksToSweep == block) 151 m_blocksToSweep = m_blocksToSweep->next(); 165 if (m_nextBlockToSweep == block) 166 m_nextBlockToSweep = m_nextBlockToSweep->next(); 167 168 if (block == m_lastFullBlock) 169 m_lastFullBlock = m_lastFullBlock->prev(); 170 152 171 m_blockList.remove(block); 153 172 } 154 173 174 void MarkedAllocator::reset() 175 { 176 m_lastActiveBlock = 0; 177 m_currentBlock = 0; 178 m_freeList = MarkedBlock::FreeList(); 179 if (m_heap->operationInProgress() == FullCollection) 180 m_lastFullBlock = 0; 181 182 if (m_lastFullBlock) 183 m_nextBlockToSweep = m_lastFullBlock->next() ? m_lastFullBlock->next() : m_lastFullBlock; 184 else 185 m_nextBlockToSweep = m_blockList.head(); 186 } 187 155 188 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/MarkedAllocator.h
r161557 r161615 53 53 MarkedBlock* m_currentBlock; 54 54 MarkedBlock* m_lastActiveBlock; 55 MarkedBlock* m_blocksToSweep; 55 MarkedBlock* m_nextBlockToSweep; 56 MarkedBlock* m_lastFullBlock; 56 57 DoublyLinkedList<MarkedBlock> m_blockList; 57 58 size_t m_cellSize; … … 69 70 : m_currentBlock(0) 70 71 , m_lastActiveBlock(0) 71 , m_blocksToSweep(0) 72 , m_nextBlockToSweep(0) 73 , m_lastFullBlock(0) 72 74 , m_cellSize(0) 73 75 , m_destructorType(MarkedBlock::None) … … 101 103 #endif 102 104 return head; 103 }104 105 inline void MarkedAllocator::reset()106 {107 m_lastActiveBlock = 0;108 m_currentBlock = 0;109 m_freeList = MarkedBlock::FreeList();110 m_blocksToSweep = m_blockList.head();111 105 } 112 106 -
trunk/Source/JavaScriptCore/heap/MarkedBlock.cpp
r161557 r161615 198 198 } 199 199 200 void MarkedBlock::clearMarks() 201 { 202 if (heap()->operationInProgress() == JSC::EdenCollection) 203 this->clearMarksWithCollectionType<EdenCollection>(); 204 else 205 this->clearMarksWithCollectionType<FullCollection>(); 206 } 207 208 void MarkedBlock::clearRememberedSet() 209 { 210 m_rememberedSet.clearAll(); 211 } 212 213 template <HeapOperation collectionType> 214 void MarkedBlock::clearMarksWithCollectionType() 215 { 216 ASSERT(collectionType == FullCollection || collectionType == EdenCollection); 217 HEAP_LOG_BLOCK_STATE_TRANSITION(this); 218 219 ASSERT(m_state != New && m_state != FreeListed); 220 if (collectionType == FullCollection) { 221 m_marks.clearAll(); 222 m_rememberedSet.clearAll(); 223 } 224 225 // This will become true at the end of the mark phase. We set it now to 226 // avoid an extra pass to do so later. 227 m_state = Marked; 228 } 229 230 void MarkedBlock::lastChanceToFinalize() 231 { 232 m_weakSet.lastChanceToFinalize(); 233 234 clearNewlyAllocated(); 235 clearMarksWithCollectionType<FullCollection>(); 236 sweep(); 237 } 238 200 239 MarkedBlock::FreeList MarkedBlock::resumeAllocating() 201 240 { -
trunk/Source/JavaScriptCore/heap/MarkedBlock.h
r161557 r161615 26 26 #include "HeapBlock.h" 27 27 28 #include "HeapOperation.h" 28 29 #include "WeakSet.h" 29 30 #include <wtf/Bitmap.h> … … 73 74 74 75 public: 75 static const size_t atomSize = 8; // bytes76 static const size_t atomSize = 16; // bytes 76 77 static const size_t atomShiftAmount = 4; // log_2(atomSize) FIXME: Change atomSize to 16. 77 78 static const size_t blockSize = 64 * KB; … … 141 142 FreeList resumeAllocating(); // Call this if you canonicalized a block for some non-collection related purpose. 142 143 void didConsumeEmptyFreeList(); // Call this if you sweep a block, but the returned FreeList is empty. 144 void didSweepToNoAvail(); // Call this if you sweep a block and get an empty free list back. 143 145 144 146 // Returns true if the "newly allocated" bitmap was non-null … … 146 148 bool clearNewlyAllocated(); 147 149 void clearMarks(); 150 void clearRememberedSet(); 151 template <HeapOperation collectionType> 152 void clearMarksWithCollectionType(); 153 148 154 size_t markCount(); 149 155 bool isEmpty(); … … 161 167 void setMarked(const void*); 162 168 void clearMarked(const void*); 169 170 void setRemembered(const void*); 171 void clearRemembered(const void*); 172 void atomicClearRemembered(const void*); 173 bool isRemembered(const void*); 163 174 164 175 bool isNewlyAllocated(const void*); … … 191 202 size_t m_endAtom; // This is a fuzzy end. Always test for < m_endAtom. 192 203 #if ENABLE(PARALLEL_GC) 193 WTF::Bitmap<atomsPerBlock, WTF::BitmapAtomic> m_marks; 204 WTF::Bitmap<atomsPerBlock, WTF::BitmapAtomic, uint8_t> m_marks; 205 WTF::Bitmap<atomsPerBlock, WTF::BitmapAtomic, uint8_t> m_rememberedSet; 194 206 #else 195 WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic> m_marks; 207 WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic, uint8_t> m_marks; 208 WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic, uint8_t> m_rememberedSet; 196 209 #endif 197 210 OwnPtr<WTF::Bitmap<atomsPerBlock>> m_newlyAllocated; … … 235 248 } 236 249 237 inline void MarkedBlock::lastChanceToFinalize()238 {239 m_weakSet.lastChanceToFinalize();240 241 clearNewlyAllocated();242 clearMarks();243 sweep();244 }245 246 250 inline MarkedAllocator* MarkedBlock::allocator() const 247 251 { … … 292 296 293 297 ASSERT(!m_newlyAllocated); 294 #ifndef NDEBUG295 for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell)296 ASSERT(m_marks.get(i));297 #endif298 298 ASSERT(m_state == FreeListed); 299 299 m_state = Marked; 300 300 } 301 301 302 inline void MarkedBlock::clearMarks()303 {304 HEAP_LOG_BLOCK_STATE_TRANSITION(this);305 306 ASSERT(m_state != New && m_state != FreeListed);307 m_marks.clearAll();308 309 // This will become true at the end of the mark phase. We set it now to310 // avoid an extra pass to do so later.311 m_state = Marked;312 }313 314 302 inline size_t MarkedBlock::markCount() 315 303 { … … 345 333 { 346 334 return (reinterpret_cast<Bits>(p) - reinterpret_cast<Bits>(this)) / atomSize; 335 } 336 337 inline void MarkedBlock::setRemembered(const void* p) 338 { 339 m_rememberedSet.set(atomNumber(p)); 340 } 341 342 inline void MarkedBlock::clearRemembered(const void* p) 343 { 344 m_rememberedSet.clear(atomNumber(p)); 345 } 346 347 inline void MarkedBlock::atomicClearRemembered(const void* p) 348 { 349 m_rememberedSet.concurrentTestAndClear(atomNumber(p)); 350 } 351 352 inline bool MarkedBlock::isRemembered(const void* p) 353 { 354 return m_rememberedSet.get(atomNumber(p)); 347 355 } 348 356 -
trunk/Source/JavaScriptCore/heap/MarkedSpace.cpp
r161557 r161615 106 106 Free free(Free::FreeAll, this); 107 107 forEachBlock(free); 108 ASSERT(!m_blocks.set().size()); 108 109 } 109 110 … … 144 145 m_normalDestructorSpace.largeAllocator.reset(); 145 146 m_immortalStructureDestructorSpace.largeAllocator.reset(); 147 148 m_blocksWithNewObjects.clear(); 146 149 } 147 150 … … 149 152 { 150 153 VisitWeakSet visitWeakSet(heapRootVisitor); 151 forEachBlock(visitWeakSet); 154 if (m_heap->operationInProgress() == EdenCollection) { 155 for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i) 156 visitWeakSet(m_blocksWithNewObjects[i]); 157 } else 158 forEachBlock(visitWeakSet); 152 159 } 153 160 154 161 void MarkedSpace::reapWeakSets() 155 162 { 156 forEachBlock<ReapWeakSet>(); 163 if (m_heap->operationInProgress() == EdenCollection) { 164 for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i) 165 m_blocksWithNewObjects[i]->reapWeakSet(); 166 } else 167 forEachBlock<ReapWeakSet>(); 157 168 } 158 169 … … 306 317 } 307 318 319 #ifndef NDEBUG 320 struct VerifyMarked : MarkedBlock::VoidFunctor { 321 void operator()(MarkedBlock* block) { ASSERT(block->needsSweeping()); } 322 }; 323 #endif 324 325 void MarkedSpace::clearMarks() 326 { 327 if (m_heap->operationInProgress() == EdenCollection) { 328 for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i) 329 m_blocksWithNewObjects[i]->clearMarks(); 330 } else 331 forEachBlock<ClearMarks>(); 332 #ifndef NDEBUG 333 forEachBlock<VerifyMarked>(); 334 #endif 335 } 336 308 337 void MarkedSpace::willStartIterating() 309 338 { -
trunk/Source/JavaScriptCore/heap/MarkedSpace.h
r161557 r161615 47 47 48 48 struct ClearMarks : MarkedBlock::VoidFunctor { 49 void operator()(MarkedBlock* block) { block->clearMarks(); } 49 void operator()(MarkedBlock* block) 50 { 51 block->clearMarks(); 52 } 53 }; 54 55 struct ClearRememberedSet : MarkedBlock::VoidFunctor { 56 void operator()(MarkedBlock* block) 57 { 58 block->clearRememberedSet(); 59 } 50 60 }; 51 61 … … 106 116 void didAddBlock(MarkedBlock*); 107 117 void didConsumeFreeList(MarkedBlock*); 118 void didAllocateInBlock(MarkedBlock*); 108 119 109 120 void clearMarks(); 121 void clearRememberedSet(); 110 122 void clearNewlyAllocated(); 111 123 void sweep(); … … 151 163 bool m_isIterating; 152 164 MarkedBlockSet m_blocks; 165 Vector<MarkedBlock*> m_blocksWithNewObjects; 153 166 154 167 DelayedReleaseScope* m_currentDelayedReleaseScope; … … 263 276 } 264 277 265 inline void MarkedSpace::clearMarks() 266 { 267 forEachBlock<ClearMarks>(); 278 inline void MarkedSpace::didAllocateInBlock(MarkedBlock* block) 279 { 280 m_blocksWithNewObjects.append(block); 281 } 282 283 inline void MarkedSpace::clearRememberedSet() 284 { 285 forEachBlock<ClearRememberedSet>(); 268 286 } 269 287 -
trunk/Source/JavaScriptCore/heap/SlotVisitor.cpp
r161557 r161615 34 34 SlotVisitor::~SlotVisitor() 35 35 { 36 ASSERT(m_stack.isEmpty());36 clearMarkStack(); 37 37 } 38 38 … … 62 62 m_shouldHashCons = false; 63 63 } 64 } 65 66 void SlotVisitor::clearMarkStack() 67 { 68 m_stack.clear(); 64 69 } 65 70 -
trunk/Source/JavaScriptCore/heap/SlotVisitor.h
r161557 r161615 50 50 ~SlotVisitor(); 51 51 52 MarkStackArray& markStack() { return m_stack; } 53 54 Heap* heap() const; 55 52 56 void append(ConservativeRoots&); 53 57 … … 62 66 template<typename T> 63 67 void appendUnbarrieredWeak(Weak<T>*); 68 void unconditionallyAppend(JSCell*); 64 69 65 70 void addOpaqueRoot(void*); … … 68 73 int opaqueRootCount(); 69 74 70 GCThreadSharedData& sharedData() { return m_shared; }75 GCThreadSharedData& sharedData() const { return m_shared; } 71 76 bool isEmpty() { return m_stack.isEmpty(); } 72 77 73 78 void setup(); 74 79 void reset(); 80 void clearMarkStack(); 75 81 76 82 size_t bytesVisited() const { return m_bytesVisited; } … … 90 96 void copyLater(JSCell*, CopyToken, void*, size_t); 91 97 92 void reportExtraMemoryUsage( size_t size);98 void reportExtraMemoryUsage(JSCell* owner, size_t); 93 99 94 100 void addWeakReferenceHarvester(WeakReferenceHarvester*); -
trunk/Source/JavaScriptCore/heap/SlotVisitorInlines.h
r161557 r161615 106 106 MARK_LOG_CHILD(*this, cell); 107 107 108 unconditionallyAppend(cell); 109 } 110 111 ALWAYS_INLINE void SlotVisitor::unconditionallyAppend(JSCell* cell) 112 { 113 ASSERT(Heap::isMarked(cell)); 114 m_visitCount++; 115 108 116 // Should never attempt to mark something that is zapped. 109 117 ASSERT(!cell->isZapped()); … … 219 227 { 220 228 ASSERT(bytes); 229 // We don't do any copying during EdenCollections. 230 ASSERT(heap()->operationInProgress() != EdenCollection); 231 221 232 m_bytesCopied += bytes; 222 233 … … 227 238 } 228 239 229 if (block->isPinned())230 return;231 232 240 block->reportLiveBytes(owner, token, bytes); 233 241 } 234 242 235 inline void SlotVisitor::reportExtraMemoryUsage(size_t size) 236 { 243 inline void SlotVisitor::reportExtraMemoryUsage(JSCell* owner, size_t size) 244 { 245 // We don't want to double-count the extra memory that was reported in previous collections. 246 if (heap()->operationInProgress() == EdenCollection && MarkedBlock::blockFor(owner)->isRemembered(owner)) 247 return; 248 237 249 size_t* counter = &m_shared.m_vm->heap.m_extraMemoryUsage; 238 250 … … 248 260 } 249 261 262 inline Heap* SlotVisitor::heap() const 263 { 264 return &sharedData().m_vm->heap; 265 } 266 250 267 } // namespace JSC 251 268 -
trunk/Source/JavaScriptCore/jit/Repatch.cpp
r161557 r161615 40 40 #include "RepatchBuffer.h" 41 41 #include "ScratchRegisterAllocator.h" 42 #include "StackAlignment.h" 42 43 #include "StructureRareDataInlines.h" 43 44 #include "StructureStubClearingWatchpoint.h" -
trunk/Source/JavaScriptCore/runtime/JSGenericTypedArrayViewInlines.h
r161557 r161615 448 448 449 449 case OversizeTypedArray: { 450 visitor.reportExtraMemoryUsage(thisObject ->byteSize());450 visitor.reportExtraMemoryUsage(thisObject, thisObject->byteSize()); 451 451 break; 452 452 } -
trunk/Source/JavaScriptCore/runtime/JSPropertyNameIterator.h
r161557 r161615 110 110 } 111 111 112 inline void StructureRareData::setEnumerationCache(VM& vm, const Structure* owner, JSPropertyNameIterator* value)112 inline void StructureRareData::setEnumerationCache(VM& vm, const Structure*, JSPropertyNameIterator* value) 113 113 { 114 m_enumerationCache.set(vm, owner, value);114 m_enumerationCache.set(vm, this, value); 115 115 } 116 116 -
trunk/Source/JavaScriptCore/runtime/JSString.cpp
r161557 r161615 73 73 StringImpl* impl = thisObject->m_value.impl(); 74 74 ASSERT(impl); 75 visitor.reportExtraMemoryUsage( impl->costDuringGC());75 visitor.reportExtraMemoryUsage(thisObject, impl->costDuringGC()); 76 76 } 77 77 } -
trunk/Source/JavaScriptCore/runtime/StructureRareDataInlines.h
r161557 r161615 36 36 } 37 37 38 inline void StructureRareData::setPreviousID(VM& vm, Structure* transition, Structure* structure)38 inline void StructureRareData::setPreviousID(VM& vm, Structure*, Structure* structure) 39 39 { 40 m_previous.set(vm, t ransition, structure);40 m_previous.set(vm, this, structure); 41 41 } 42 42 … … 51 51 } 52 52 53 inline void StructureRareData::setObjectToStringValue(VM& vm, const JSCell* owner, JSString* value)53 inline void StructureRareData::setObjectToStringValue(VM& vm, const JSCell*, JSString* value) 54 54 { 55 m_objectToStringValue.set(vm, owner, value);55 m_objectToStringValue.set(vm, this, value); 56 56 } 57 57 -
trunk/Source/JavaScriptCore/runtime/WeakMapData.cpp
r161557 r161615 65 65 // This isn't exact, but it is close enough, and proportional to the actual 66 66 // external mermory usage. 67 visitor.reportExtraMemoryUsage(thisObj ->m_map.capacity() * (sizeof(JSObject*) + sizeof(WriteBarrier<Unknown>)));67 visitor.reportExtraMemoryUsage(thisObj, thisObj->m_map.capacity() * (sizeof(JSObject*) + sizeof(WriteBarrier<Unknown>))); 68 68 } 69 69
Note:
See TracChangeset
for help on using the changeset viewer.