Ignore:
Timestamp:
Sep 10, 2012, 11:41:05 AM (13 years ago)
Author:
[email protected]
Message:

Combine MarkStack and SlotVisitor into single class
https://bugs.webkit.org/show_bug.cgi?id=96043

Reviewed by Geoff Garen.

Move all of MarkStack into SlotVisitor. The remaining stuff in MarkStack.cpp actually has to do
with MarkStack management/allocation. Cleaned up a few of the header files while I was at it.

  • CMakeLists.txt:
  • GNUmakefile.list.am:
  • JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.vcproj:
  • JavaScriptCore.xcodeproj/project.pbxproj:
  • Target.pri:
  • bytecode/CodeBlock.cpp:
  • dfg/DFGCommon.h:
  • heap/GCThreadSharedData.cpp:
  • heap/GCThreadSharedData.h:

(GCThreadSharedData):

  • heap/HeapRootVisitor.h:
  • heap/MarkStack.cpp:

(JSC):

  • heap/MarkStack.h:

(JSC):
(MarkStackSegment):
(JSC::MarkStackSegment::data):
(JSC::MarkStackSegment::capacityFromSize):
(JSC::MarkStackSegment::sizeFromCapacity):
(MarkStackSegmentAllocator):
(MarkStackArray):

  • heap/MarkStackInlineMethods.h:

(JSC::MarkStackArray::postIncTop):
(JSC):
(JSC::MarkStackArray::preDecTop):
(JSC::MarkStackArray::setTopForFullSegment):
(JSC::MarkStackArray::setTopForEmptySegment):
(JSC::MarkStackArray::top):
(JSC::MarkStackArray::validatePrevious):
(JSC::MarkStackArray::append):
(JSC::MarkStackArray::canRemoveLast):
(JSC::MarkStackArray::removeLast):
(JSC::MarkStackArray::isEmpty):
(JSC::MarkStackArray::size):

  • heap/SlotVisitor.cpp: Added.

(JSC):
(JSC::SlotVisitor::SlotVisitor):
(JSC::SlotVisitor::~SlotVisitor):
(JSC::SlotVisitor::setup):
(JSC::SlotVisitor::reset):
(JSC::SlotVisitor::append):
(JSC::visitChildren):
(JSC::SlotVisitor::donateKnownParallel):
(JSC::SlotVisitor::drain):
(JSC::SlotVisitor::drainFromShared):
(JSC::SlotVisitor::mergeOpaqueRoots):
(JSC::SlotVisitor::startCopying):
(JSC::SlotVisitor::allocateNewSpaceSlow):
(JSC::SlotVisitor::allocateNewSpaceOrPin):
(JSC::JSString::tryHashConstLock):
(JSC::JSString::releaseHashConstLock):
(JSC::JSString::shouldTryHashConst):
(JSC::SlotVisitor::internalAppend):
(JSC::SlotVisitor::copyAndAppend):
(JSC::SlotVisitor::doneCopying):
(JSC::SlotVisitor::harvestWeakReferences):
(JSC::SlotVisitor::finalizeUnconditionalFinalizers):
(JSC::SlotVisitor::validate):

  • heap/SlotVisitor.h:

(JSC):
(SlotVisitor):
(JSC::SlotVisitor::sharedData):
(JSC::SlotVisitor::isEmpty):
(JSC::SlotVisitor::visitCount):
(JSC::SlotVisitor::resetChildCount):
(JSC::SlotVisitor::childCount):
(JSC::SlotVisitor::incrementChildCount):
(ParallelModeEnabler):
(JSC::ParallelModeEnabler::ParallelModeEnabler):
(JSC::ParallelModeEnabler::~ParallelModeEnabler):

  • heap/SlotVisitorInlineMethods.h:

(JSC::SlotVisitor::append):
(JSC):
(JSC::SlotVisitor::appendUnbarrieredPointer):
(JSC::SlotVisitor::appendUnbarrieredValue):
(JSC::SlotVisitor::internalAppend):
(JSC::SlotVisitor::addWeakReferenceHarvester):
(JSC::SlotVisitor::addUnconditionalFinalizer):
(JSC::SlotVisitor::addOpaqueRoot):
(JSC::SlotVisitor::containsOpaqueRoot):
(JSC::SlotVisitor::opaqueRootCount):
(JSC::SlotVisitor::mergeOpaqueRootsIfNecessary):
(JSC::SlotVisitor::mergeOpaqueRootsIfProfitable):
(JSC::SlotVisitor::donate):
(JSC::SlotVisitor::donateAndDrain):

  • jit/JITWriteBarrier.h:

(JSC::SlotVisitor::append):

  • jit/JumpReplacementWatchpoint.cpp:
  • runtime/JSCell.h:
  • runtime/Structure.h:

(JSC::SlotVisitor::internalAppend):

  • runtime/WriteBarrier.h:

(JSC):
(JSC::SlotVisitor::append):
(JSC::SlotVisitor::appendValues):

  • yarr/YarrJIT.cpp:
File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/JavaScriptCore/heap/MarkStack.cpp

    r127202 r128084  
    224224}
    225225
    226 MarkStack::MarkStack(GCThreadSharedData& shared)
    227     : m_stack(shared.m_segmentAllocator)
    228 #if !ASSERT_DISABLED
    229     , m_isCheckingForDefaultMarkViolation(false)
    230     , m_isDraining(false)
    231 #endif
    232     , m_visitCount(0)
    233     , m_isInParallelMode(false)
    234     , m_shared(shared)
    235     , m_shouldHashConst(false)
    236 {
    237 }
    238 
    239 MarkStack::~MarkStack()
    240 {
    241     ASSERT(m_stack.isEmpty());
    242 }
    243 
    244 void MarkStack::setup()
    245 {
    246     m_shared.m_shouldHashConst = m_shared.m_globalData->haveEnoughNewStringsToHashConst();
    247     m_shouldHashConst = m_shared.m_shouldHashConst;
    248 #if ENABLE(PARALLEL_GC)
    249     for (unsigned i = 0; i < m_shared.m_markingThreadsMarkStack.size(); ++i)
    250         m_shared.m_markingThreadsMarkStack[i]->m_shouldHashConst = m_shared.m_shouldHashConst;
    251 #endif
    252 }
    253 
    254 void MarkStack::reset()
    255 {
    256     m_visitCount = 0;
    257     ASSERT(m_stack.isEmpty());
    258 #if ENABLE(PARALLEL_GC)
    259     ASSERT(m_opaqueRoots.isEmpty()); // Should have merged by now.
    260 #else
    261     m_opaqueRoots.clear();
    262 #endif
    263     if (m_shouldHashConst) {
    264         m_uniqueStrings.clear();
    265         m_shouldHashConst = false;
    266     }
    267 }
    268 
    269 void MarkStack::append(ConservativeRoots& conservativeRoots)
    270 {
    271     JSCell** roots = conservativeRoots.roots();
    272     size_t size = conservativeRoots.size();
    273     for (size_t i = 0; i < size; ++i)
    274         internalAppend(roots[i]);
    275 }
    276 
    277 ALWAYS_INLINE static void visitChildren(SlotVisitor& visitor, const JSCell* cell)
    278 {
    279 #if ENABLE(SIMPLE_HEAP_PROFILING)
    280     m_visitedTypeCounts.count(cell);
    281 #endif
    282 
    283     ASSERT(Heap::isMarked(cell));
    284    
    285     if (isJSString(cell)) {
    286         JSString::visitChildren(const_cast<JSCell*>(cell), visitor);
    287         return;
    288     }
    289 
    290     if (isJSFinalObject(cell)) {
    291         JSFinalObject::visitChildren(const_cast<JSCell*>(cell), visitor);
    292         return;
    293     }
    294 
    295     if (isJSArray(cell)) {
    296         JSArray::visitChildren(const_cast<JSCell*>(cell), visitor);
    297         return;
    298     }
    299 
    300     cell->methodTable()->visitChildren(const_cast<JSCell*>(cell), visitor);
    301 }
    302 
    303 void SlotVisitor::donateKnownParallel()
    304 {
    305     // NOTE: Because we re-try often, we can afford to be conservative, and
    306     // assume that donating is not profitable.
    307 
    308     // Avoid locking when a thread reaches a dead end in the object graph.
    309     if (m_stack.size() < 2)
    310         return;
    311 
    312     // If there's already some shared work queued up, be conservative and assume
    313     // that donating more is not profitable.
    314     if (m_shared.m_sharedMarkStack.size())
    315         return;
    316 
    317     // If we're contending on the lock, be conservative and assume that another
    318     // thread is already donating.
    319     MutexTryLocker locker(m_shared.m_markingLock);
    320     if (!locker.locked())
    321         return;
    322 
    323     // Otherwise, assume that a thread will go idle soon, and donate.
    324     m_stack.donateSomeCellsTo(m_shared.m_sharedMarkStack);
    325 
    326     if (m_shared.m_numberOfActiveParallelMarkers < Options::numberOfGCMarkers())
    327         m_shared.m_markingCondition.broadcast();
    328 }
    329 
    330 void SlotVisitor::drain()
    331 {
    332     ASSERT(m_isInParallelMode);
    333    
    334 #if ENABLE(PARALLEL_GC)
    335     if (Options::numberOfGCMarkers() > 1) {
    336         while (!m_stack.isEmpty()) {
    337             m_stack.refill();
    338             for (unsigned countdown = Options::minimumNumberOfScansBetweenRebalance(); m_stack.canRemoveLast() && countdown--;)
    339                 visitChildren(*this, m_stack.removeLast());
    340             donateKnownParallel();
    341         }
    342        
    343         mergeOpaqueRootsIfNecessary();
    344         return;
    345     }
    346 #endif
    347    
    348     while (!m_stack.isEmpty()) {
    349         m_stack.refill();
    350         while (m_stack.canRemoveLast())
    351             visitChildren(*this, m_stack.removeLast());
    352     }
    353 }
    354 
    355 void SlotVisitor::drainFromShared(SharedDrainMode sharedDrainMode)
    356 {
    357     ASSERT(m_isInParallelMode);
    358    
    359     ASSERT(Options::numberOfGCMarkers());
    360    
    361     bool shouldBeParallel;
    362 
    363 #if ENABLE(PARALLEL_GC)
    364     shouldBeParallel = Options::numberOfGCMarkers() > 1;
    365 #else
    366     ASSERT(Options::numberOfGCMarkers() == 1);
    367     shouldBeParallel = false;
    368 #endif
    369    
    370     if (!shouldBeParallel) {
    371         // This call should be a no-op.
    372         ASSERT_UNUSED(sharedDrainMode, sharedDrainMode == MasterDrain);
    373         ASSERT(m_stack.isEmpty());
    374         ASSERT(m_shared.m_sharedMarkStack.isEmpty());
    375         return;
    376     }
    377    
    378 #if ENABLE(PARALLEL_GC)
    379     {
    380         MutexLocker locker(m_shared.m_markingLock);
    381         m_shared.m_numberOfActiveParallelMarkers++;
    382     }
    383     while (true) {
    384         {
    385             MutexLocker locker(m_shared.m_markingLock);
    386             m_shared.m_numberOfActiveParallelMarkers--;
    387 
    388             // How we wait differs depending on drain mode.
    389             if (sharedDrainMode == MasterDrain) {
    390                 // Wait until either termination is reached, or until there is some work
    391                 // for us to do.
    392                 while (true) {
    393                     // Did we reach termination?
    394                     if (!m_shared.m_numberOfActiveParallelMarkers && m_shared.m_sharedMarkStack.isEmpty()) {
    395                         // Let any sleeping slaves know it's time for them to give their private CopiedBlocks back
    396                         m_shared.m_markingCondition.broadcast();
    397                         return;
    398                     }
    399                    
    400                     // Is there work to be done?
    401                     if (!m_shared.m_sharedMarkStack.isEmpty())
    402                         break;
    403                    
    404                     // Otherwise wait.
    405                     m_shared.m_markingCondition.wait(m_shared.m_markingLock);
    406                 }
    407             } else {
    408                 ASSERT(sharedDrainMode == SlaveDrain);
    409                
    410                 // Did we detect termination? If so, let the master know.
    411                 if (!m_shared.m_numberOfActiveParallelMarkers && m_shared.m_sharedMarkStack.isEmpty())
    412                     m_shared.m_markingCondition.broadcast();
    413                
    414                 while (m_shared.m_sharedMarkStack.isEmpty() && !m_shared.m_parallelMarkersShouldExit) {
    415                     if (!m_shared.m_numberOfActiveParallelMarkers && m_shared.m_sharedMarkStack.isEmpty())
    416                         doneCopying();
    417                     m_shared.m_markingCondition.wait(m_shared.m_markingLock);
    418                 }
    419                
    420                 // Is the VM exiting? If so, exit this thread.
    421                 if (m_shared.m_parallelMarkersShouldExit) {
    422                     doneCopying();
    423                     return;
    424                 }
    425             }
    426            
    427             size_t idleThreadCount = Options::numberOfGCMarkers() - m_shared.m_numberOfActiveParallelMarkers;
    428             m_stack.stealSomeCellsFrom(m_shared.m_sharedMarkStack, idleThreadCount);
    429             m_shared.m_numberOfActiveParallelMarkers++;
    430         }
    431        
    432         drain();
    433     }
    434 #endif
    435 }
    436 
    437 void MarkStack::mergeOpaqueRoots()
    438 {
    439     ASSERT(!m_opaqueRoots.isEmpty()); // Should only be called when opaque roots are non-empty.
    440     {
    441         MutexLocker locker(m_shared.m_opaqueRootsLock);
    442         HashSet<void*>::iterator begin = m_opaqueRoots.begin();
    443         HashSet<void*>::iterator end = m_opaqueRoots.end();
    444         for (HashSet<void*>::iterator iter = begin; iter != end; ++iter)
    445             m_shared.m_opaqueRoots.add(*iter);
    446     }
    447     m_opaqueRoots.clear();
    448 }
    449 
    450 void SlotVisitor::startCopying()
    451 {
    452     ASSERT(!m_copiedAllocator.isValid());
    453 }
    454 
    455 void* SlotVisitor::allocateNewSpaceSlow(size_t bytes)
    456 {
    457     m_shared.m_copiedSpace->doneFillingBlock(m_copiedAllocator.resetCurrentBlock());
    458     m_copiedAllocator.setCurrentBlock(m_shared.m_copiedSpace->allocateBlockForCopyingPhase());
    459 
    460     void* result = 0;
    461     CheckedBoolean didSucceed = m_copiedAllocator.tryAllocate(bytes, &result);
    462     ASSERT(didSucceed);
    463     return result;
    464 }
    465 
    466 void* SlotVisitor::allocateNewSpaceOrPin(void* ptr, size_t bytes)
    467 {
    468     if (!checkIfShouldCopyAndPinOtherwise(ptr, bytes))
    469         return 0;
    470    
    471     return allocateNewSpace(bytes);
    472 }
    473 
    474 ALWAYS_INLINE bool JSString::tryHashConstLock()
    475 {
    476 #if ENABLE(PARALLEL_GC)
    477     unsigned currentFlags = m_flags;
    478 
    479     if (currentFlags & HashConstLock)
    480         return false;
    481 
    482     unsigned newFlags = currentFlags | HashConstLock;
    483 
    484     if (!WTF::weakCompareAndSwap(&m_flags, currentFlags, newFlags))
    485         return false;
    486 
    487     WTF::memoryBarrierAfterLock();
    488     return true;
    489 #else
    490     if (isHashConstSingleton())
    491         return false;
    492 
    493     m_flags |= HashConstLock;
    494 
    495     return true;
    496 #endif
    497 }
    498 
    499 ALWAYS_INLINE void JSString::releaseHashConstLock()
    500 {
    501 #if ENABLE(PARALLEL_GC)
    502     WTF::memoryBarrierBeforeUnlock();
    503 #endif
    504     m_flags &= ~HashConstLock;
    505 }
    506 
    507 ALWAYS_INLINE bool JSString::shouldTryHashConst()
    508 {
    509     return ((length() > 1) && !isRope() && !isHashConstSingleton());
    510 }
    511 
    512 ALWAYS_INLINE void MarkStack::internalAppend(JSValue* slot)
    513 {
    514     // This internalAppend is only intended for visits to object and array backing stores.
    515     // as it can change the JSValue pointed to be the argument when the original JSValue
    516     // is a string that contains the same contents as another string.
    517 
    518     ASSERT(slot);
    519     JSValue value = *slot;
    520     ASSERT(value);
    521     if (!value.isCell())
    522         return;
    523 
    524     JSCell* cell = value.asCell();
    525     if (!cell)
    526         return;
    527 
    528     if (m_shouldHashConst && cell->isString()) {
    529         JSString* string = jsCast<JSString*>(cell);
    530         if (string->shouldTryHashConst() && string->tryHashConstLock()) {
    531             UniqueStringMap::AddResult addResult = m_uniqueStrings.add(string->string().impl(), value);
    532             if (addResult.isNewEntry)
    533                 string->setHashConstSingleton();
    534             else {
    535                 JSValue existingJSValue = addResult.iterator->second;
    536                 if (value != existingJSValue)
    537                     jsCast<JSString*>(existingJSValue.asCell())->clearHashConstSingleton();
    538                 *slot = existingJSValue;
    539                 string->releaseHashConstLock();
    540                 return;
    541             }
    542             string->releaseHashConstLock();
    543         }
    544     }
    545 
    546     internalAppend(cell);
    547 }
    548 
    549 void SlotVisitor::copyAndAppend(void** ptr, size_t bytes, JSValue* values, unsigned length)
    550 {
    551     void* oldPtr = *ptr;
    552     void* newPtr = allocateNewSpaceOrPin(oldPtr, bytes);
    553     if (newPtr) {
    554         size_t jsValuesOffset = static_cast<size_t>(reinterpret_cast<char*>(values) - static_cast<char*>(oldPtr));
    555 
    556         JSValue* newValues = reinterpret_cast_ptr<JSValue*>(static_cast<char*>(newPtr) + jsValuesOffset);
    557         for (unsigned i = 0; i < length; i++) {
    558             JSValue& value = values[i];
    559             newValues[i] = value;
    560             if (!value)
    561                 continue;
    562             internalAppend(&newValues[i]);
    563         }
    564 
    565         memcpy(newPtr, oldPtr, jsValuesOffset);
    566         *ptr = newPtr;
    567     } else
    568         append(values, length);
    569 }
    570    
    571 void SlotVisitor::doneCopying()
    572 {
    573     if (!m_copiedAllocator.isValid())
    574         return;
    575 
    576     m_shared.m_copiedSpace->doneFillingBlock(m_copiedAllocator.resetCurrentBlock());
    577 }
    578 
    579 void SlotVisitor::harvestWeakReferences()
    580 {
    581     for (WeakReferenceHarvester* current = m_shared.m_weakReferenceHarvesters.head(); current; current = current->next())
    582         current->visitWeakReferences(*this);
    583 }
    584 
    585 void SlotVisitor::finalizeUnconditionalFinalizers()
    586 {
    587     while (m_shared.m_unconditionalFinalizers.hasNext())
    588         m_shared.m_unconditionalFinalizers.removeNext()->finalizeUnconditionally();
    589 }
    590 
    591 #if ENABLE(GC_VALIDATION)
    592 void MarkStack::validate(JSCell* cell)
    593 {
    594     if (!cell) {
    595         dataLog("cell is NULL\n");
    596         CRASH();
    597     }
    598 
    599     if (!cell->structure()) {
    600         dataLog("cell at %p has a null structure\n" , cell);
    601         CRASH();
    602     }
    603 
    604     // Both the cell's structure, and the cell's structure's structure should be the Structure Structure.
    605     // I hate this sentence.
    606     if (cell->structure()->structure()->JSCell::classInfo() != cell->structure()->JSCell::classInfo()) {
    607         const char* parentClassName = 0;
    608         const char* ourClassName = 0;
    609         if (cell->structure()->structure() && cell->structure()->structure()->JSCell::classInfo())
    610             parentClassName = cell->structure()->structure()->JSCell::classInfo()->className;
    611         if (cell->structure()->JSCell::classInfo())
    612             ourClassName = cell->structure()->JSCell::classInfo()->className;
    613         dataLog("parent structure (%p <%s>) of cell at %p doesn't match cell's structure (%p <%s>)\n",
    614                 cell->structure()->structure(), parentClassName, cell, cell->structure(), ourClassName);
    615         CRASH();
    616     }
    617 }
    618 #else
    619 void MarkStack::validate(JSCell*)
    620 {
    621 }
    622 #endif
    623 
    624226} // namespace JSC
Note: See TracChangeset for help on using the changeset viewer.