Ignore:
Timestamp:
Aug 22, 2012, 2:49:16 PM (13 years ago)
Author:
[email protected]
Message:

Separate MarkStackThreadSharedData from MarkStack
https://bugs.webkit.org/show_bug.cgi?id=94294

Reviewed by Filip Pizlo.

MarkStackThreadSharedData is soon going to have data to allow for a parallel copying
mode too, so to separate our concerns we should split it out into its own set of files
and rename it to GCThreadSharedData. For now this is purely a cosmetic refactoring.

  • CMakeLists.txt:
  • GNUmakefile.list.am:
  • JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.vcproj:
  • JavaScriptCore.xcodeproj/project.pbxproj:
  • Target.pri:
  • heap/GCThreadSharedData.cpp: Added.

(JSC):
(JSC::GCThreadSharedData::resetChildren):
(JSC::GCThreadSharedData::childVisitCount):
(JSC::GCThreadSharedData::markingThreadMain):
(JSC::GCThreadSharedData::markingThreadStartFunc):
(JSC::GCThreadSharedData::GCThreadSharedData):
(JSC::GCThreadSharedData::~GCThreadSharedData):
(JSC::GCThreadSharedData::reset):

  • heap/GCThreadSharedData.h: Added.

(JSC):
(GCThreadSharedData):

  • heap/Heap.h:

(Heap):

  • heap/ListableHandler.h:

(ListableHandler):

  • heap/MarkStack.cpp:

(JSC::MarkStack::MarkStack):
(JSC::MarkStack::~MarkStack):

  • heap/MarkStack.h:

(JSC):
(MarkStack):
(JSC::MarkStack::sharedData):

  • heap/MarkStackInlineMethods.h: Added.

(JSC):
(JSC::MarkStack::append):
(JSC::MarkStack::appendUnbarrieredPointer):
(JSC::MarkStack::appendUnbarrieredValue):
(JSC::MarkStack::internalAppend):
(JSC::MarkStack::addWeakReferenceHarvester):
(JSC::MarkStack::addUnconditionalFinalizer):
(JSC::MarkStack::addOpaqueRoot):
(JSC::MarkStack::containsOpaqueRoot):
(JSC::MarkStack::opaqueRootCount):

  • heap/SlotVisitor.h:

(JSC):
(SlotVisitor):
(JSC::SlotVisitor::SlotVisitor):

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/JavaScriptCore/heap/MarkStack.h

    r123690 r126354  
    7575    class JSGlobalData;
    7676    class MarkStack;
     77    class GCThreadSharedData;
    7778    class ParallelModeEnabler;
    7879    class Register;
     
    193194    };
    194195
    195     class MarkStackThreadSharedData {
    196     public:
    197         MarkStackThreadSharedData(JSGlobalData*);
    198         ~MarkStackThreadSharedData();
    199        
    200         void reset();
    201 
    202 #if ENABLE(PARALLEL_GC)
    203         void resetChildren();
    204         size_t childVisitCount();
    205         size_t childDupStrings();
    206 #endif
    207    
    208     private:
    209         friend class MarkStack;
    210         friend class SlotVisitor;
    211 
    212 #if ENABLE(PARALLEL_GC)
    213         void markingThreadMain(SlotVisitor*);
    214         static void markingThreadStartFunc(void* heap);
    215 #endif
    216 
    217         JSGlobalData* m_globalData;
    218         CopiedSpace* m_copiedSpace;
    219        
    220         MarkStackSegmentAllocator m_segmentAllocator;
    221        
    222         bool m_shouldHashConst;
    223 
    224         Vector<ThreadIdentifier> m_markingThreads;
    225         Vector<MarkStack*> m_markingThreadsMarkStack;
    226        
    227         Mutex m_markingLock;
    228         ThreadCondition m_markingCondition;
    229         MarkStackArray m_sharedMarkStack;
    230         unsigned m_numberOfActiveParallelMarkers;
    231         bool m_parallelMarkersShouldExit;
    232 
    233         Mutex m_opaqueRootsLock;
    234         HashSet<void*> m_opaqueRoots;
    235 
    236         ListableHandler<WeakReferenceHarvester>::List m_weakReferenceHarvesters;
    237         ListableHandler<UnconditionalFinalizer>::List m_unconditionalFinalizers;
    238     };
    239 
    240196    class MarkStack {
    241197        WTF_MAKE_NONCOPYABLE(MarkStack);
     
    243199
    244200    public:
    245         MarkStack(MarkStackThreadSharedData&);
     201        MarkStack(GCThreadSharedData&);
    246202        ~MarkStack();
    247203
     
    260216        int opaqueRootCount();
    261217
    262         MarkStackThreadSharedData& sharedData() { return m_shared; }
     218        GCThreadSharedData& sharedData() { return m_shared; }
    263219        bool isEmpty() { return m_stack.isEmpty(); }
    264220
     
    272228#endif
    273229
    274         void addWeakReferenceHarvester(WeakReferenceHarvester* weakReferenceHarvester)
    275         {
    276             m_shared.m_weakReferenceHarvesters.addThreadSafe(weakReferenceHarvester);
    277         }
    278        
    279         void addUnconditionalFinalizer(UnconditionalFinalizer* unconditionalFinalizer)
    280         {
    281             m_shared.m_unconditionalFinalizers.addThreadSafe(unconditionalFinalizer);
    282         }
     230        void addWeakReferenceHarvester(WeakReferenceHarvester*);
     231        void addUnconditionalFinalizer(UnconditionalFinalizer*);
    283232
    284233#if ENABLE(OBJECT_MARK_LOGGING)
     
    329278        bool m_isInParallelMode;
    330279       
    331         MarkStackThreadSharedData& m_shared;
     280        GCThreadSharedData& m_shared;
    332281
    333282        bool m_shouldHashConst; // Local per-thread copy of shared flag for performance reasons
     
    339288#endif
    340289    };
    341 
    342     inline MarkStack::MarkStack(MarkStackThreadSharedData& shared)
    343         : m_stack(shared.m_segmentAllocator)
    344 #if !ASSERT_DISABLED
    345         , m_isCheckingForDefaultMarkViolation(false)
    346         , m_isDraining(false)
    347 #endif
    348         , m_visitCount(0)
    349         , m_isInParallelMode(false)
    350         , m_shared(shared)
    351         , m_shouldHashConst(false)
    352     {
    353     }
    354 
    355     inline MarkStack::~MarkStack()
    356     {
    357         ASSERT(m_stack.isEmpty());
    358     }
    359 
    360     inline void MarkStack::addOpaqueRoot(void* root)
    361     {
    362 #if ENABLE(PARALLEL_GC)
    363         if (Options::numberOfGCMarkers() == 1) {
    364             // Put directly into the shared HashSet.
    365             m_shared.m_opaqueRoots.add(root);
    366             return;
    367         }
    368         // Put into the local set, but merge with the shared one every once in
    369         // a while to make sure that the local sets don't grow too large.
    370         mergeOpaqueRootsIfProfitable();
    371         m_opaqueRoots.add(root);
    372 #else
    373         m_opaqueRoots.add(root);
    374 #endif
    375     }
    376 
    377     inline bool MarkStack::containsOpaqueRoot(void* root)
    378     {
    379         ASSERT(!m_isInParallelMode);
    380 #if ENABLE(PARALLEL_GC)
    381         ASSERT(m_opaqueRoots.isEmpty());
    382         return m_shared.m_opaqueRoots.contains(root);
    383 #else
    384         return m_opaqueRoots.contains(root);
    385 #endif
    386     }
    387 
    388     inline int MarkStack::opaqueRootCount()
    389     {
    390         ASSERT(!m_isInParallelMode);
    391 #if ENABLE(PARALLEL_GC)
    392         ASSERT(m_opaqueRoots.isEmpty());
    393         return m_shared.m_opaqueRoots.size();
    394 #else
    395         return m_opaqueRoots.size();
    396 #endif
    397     }
    398290
    399291    inline void MarkStackArray::append(const JSCell* cell)
     
    430322    }
    431323
    432     ALWAYS_INLINE void MarkStack::append(JSValue* slot, size_t count)
    433     {
    434         for (size_t i = 0; i < count; ++i) {
    435             JSValue& value = slot[i];
    436             if (!value)
    437                 continue;
    438             internalAppend(value);
    439         }
    440     }
    441 
    442     template<typename T>
    443     inline void MarkStack::appendUnbarrieredPointer(T** slot)
    444     {
    445         ASSERT(slot);
    446         JSCell* cell = *slot;
    447         if (cell)
    448             internalAppend(cell);
    449     }
    450    
    451     ALWAYS_INLINE void MarkStack::append(JSValue* slot)
    452     {
    453         ASSERT(slot);
    454         internalAppend(*slot);
    455     }
    456 
    457     ALWAYS_INLINE void MarkStack::appendUnbarrieredValue(JSValue* slot)
    458     {
    459         ASSERT(slot);
    460         internalAppend(*slot);
    461     }
    462 
    463     ALWAYS_INLINE void MarkStack::append(JSCell** slot)
    464     {
    465         ASSERT(slot);
    466         internalAppend(*slot);
    467     }
    468 
    469     ALWAYS_INLINE void MarkStack::internalAppend(JSValue value)
    470     {
    471         ASSERT(value);
    472         if (!value.isCell())
    473             return;
    474         internalAppend(value.asCell());
    475     }
    476 
    477324    class ParallelModeEnabler {
    478325    public:
Note: See TracChangeset for help on using the changeset viewer.