Changeset 128084 in webkit for trunk/Source/JavaScriptCore
- Timestamp:
- Sep 10, 2012, 11:41:05 AM (13 years ago)
- Location:
- trunk/Source/JavaScriptCore
- Files:
-
- 1 added
- 23 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/CMakeLists.txt
r128065 r128084 120 120 heap/MarkedSpace.cpp 121 121 heap/MarkStack.cpp 122 heap/SlotVisitor.cpp 122 123 heap/WeakSet.cpp 123 124 heap/WeakHandleOwner.cpp -
trunk/Source/JavaScriptCore/ChangeLog
r128071 r128084 1 2012-09-06 Mark Hahnenberg <[email protected]> 2 3 Combine MarkStack and SlotVisitor into single class 4 https://bugs.webkit.org/show_bug.cgi?id=96043 5 6 Reviewed by Geoff Garen. 7 8 Move all of MarkStack into SlotVisitor. The remaining stuff in MarkStack.cpp actually has to do 9 with MarkStack management/allocation. Cleaned up a few of the header files while I was at it. 10 11 * CMakeLists.txt: 12 * GNUmakefile.list.am: 13 * JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.vcproj: 14 * JavaScriptCore.xcodeproj/project.pbxproj: 15 * Target.pri: 16 * bytecode/CodeBlock.cpp: 17 * dfg/DFGCommon.h: 18 * heap/GCThreadSharedData.cpp: 19 * heap/GCThreadSharedData.h: 20 (GCThreadSharedData): 21 * heap/HeapRootVisitor.h: 22 * heap/MarkStack.cpp: 23 (JSC): 24 * heap/MarkStack.h: 25 (JSC): 26 (MarkStackSegment): 27 (JSC::MarkStackSegment::data): 28 (JSC::MarkStackSegment::capacityFromSize): 29 (JSC::MarkStackSegment::sizeFromCapacity): 30 (MarkStackSegmentAllocator): 31 (MarkStackArray): 32 * heap/MarkStackInlineMethods.h: 33 (JSC::MarkStackArray::postIncTop): 34 (JSC): 35 (JSC::MarkStackArray::preDecTop): 36 (JSC::MarkStackArray::setTopForFullSegment): 37 (JSC::MarkStackArray::setTopForEmptySegment): 38 (JSC::MarkStackArray::top): 39 (JSC::MarkStackArray::validatePrevious): 40 (JSC::MarkStackArray::append): 41 (JSC::MarkStackArray::canRemoveLast): 42 (JSC::MarkStackArray::removeLast): 43 (JSC::MarkStackArray::isEmpty): 44 (JSC::MarkStackArray::size): 45 * heap/SlotVisitor.cpp: Added. 46 (JSC): 47 (JSC::SlotVisitor::SlotVisitor): 48 (JSC::SlotVisitor::~SlotVisitor): 49 (JSC::SlotVisitor::setup): 50 (JSC::SlotVisitor::reset): 51 (JSC::SlotVisitor::append): 52 (JSC::visitChildren): 53 (JSC::SlotVisitor::donateKnownParallel): 54 (JSC::SlotVisitor::drain): 55 (JSC::SlotVisitor::drainFromShared): 56 (JSC::SlotVisitor::mergeOpaqueRoots): 57 (JSC::SlotVisitor::startCopying): 58 (JSC::SlotVisitor::allocateNewSpaceSlow): 59 (JSC::SlotVisitor::allocateNewSpaceOrPin): 60 (JSC::JSString::tryHashConstLock): 61 (JSC::JSString::releaseHashConstLock): 62 (JSC::JSString::shouldTryHashConst): 63 (JSC::SlotVisitor::internalAppend): 64 (JSC::SlotVisitor::copyAndAppend): 65 (JSC::SlotVisitor::doneCopying): 66 (JSC::SlotVisitor::harvestWeakReferences): 67 (JSC::SlotVisitor::finalizeUnconditionalFinalizers): 68 (JSC::SlotVisitor::validate): 69 * heap/SlotVisitor.h: 70 (JSC): 71 (SlotVisitor): 72 (JSC::SlotVisitor::sharedData): 73 (JSC::SlotVisitor::isEmpty): 74 (JSC::SlotVisitor::visitCount): 75 (JSC::SlotVisitor::resetChildCount): 76 (JSC::SlotVisitor::childCount): 77 (JSC::SlotVisitor::incrementChildCount): 78 (ParallelModeEnabler): 79 (JSC::ParallelModeEnabler::ParallelModeEnabler): 80 (JSC::ParallelModeEnabler::~ParallelModeEnabler): 81 * heap/SlotVisitorInlineMethods.h: 82 (JSC::SlotVisitor::append): 83 (JSC): 84 (JSC::SlotVisitor::appendUnbarrieredPointer): 85 (JSC::SlotVisitor::appendUnbarrieredValue): 86 (JSC::SlotVisitor::internalAppend): 87 (JSC::SlotVisitor::addWeakReferenceHarvester): 88 (JSC::SlotVisitor::addUnconditionalFinalizer): 89 (JSC::SlotVisitor::addOpaqueRoot): 90 (JSC::SlotVisitor::containsOpaqueRoot): 91 (JSC::SlotVisitor::opaqueRootCount): 92 (JSC::SlotVisitor::mergeOpaqueRootsIfNecessary): 93 (JSC::SlotVisitor::mergeOpaqueRootsIfProfitable): 94 (JSC::SlotVisitor::donate): 95 (JSC::SlotVisitor::donateAndDrain): 96 * jit/JITWriteBarrier.h: 97 (JSC::SlotVisitor::append): 98 * jit/JumpReplacementWatchpoint.cpp: 99 * runtime/JSCell.h: 100 * runtime/Structure.h: 101 (JSC::SlotVisitor::internalAppend): 102 * runtime/WriteBarrier.h: 103 (JSC): 104 (JSC::SlotVisitor::append): 105 (JSC::SlotVisitor::appendValues): 106 * yarr/YarrJIT.cpp: 107 1 108 2012-09-10 Hojong Han <[email protected]> 2 109 -
trunk/Source/JavaScriptCore/GNUmakefile.list.am
r128037 r128084 267 267 Source/JavaScriptCore/heap/IncrementalSweeper.h \ 268 268 Source/JavaScriptCore/heap/IncrementalSweeper.cpp \ 269 Source/JavaScriptCore/heap/SlotVisitor.cpp \ 269 270 Source/JavaScriptCore/heap/SlotVisitor.h \ 270 271 Source/JavaScriptCore/heap/SlotVisitorInlineMethods.h \ -
trunk/Source/JavaScriptCore/JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.def
r128014 r128084 366 366 ?unlockAtomicallyInitializedStaticMutex@WTF@@YAXXZ 367 367 ?unprotect@Heap@JSC@@QAE_NVJSValue@2@@Z 368 ?validate@ MarkStack@JSC@@KAXPAVJSCell@2@@Z368 ?validate@SlotVisitor@JSC@@CAXPAVJSCell@2@@Z 369 369 ?visitChildren@JSGlobalObject@JSC@@SAXPAVJSCell@2@AAVSlotVisitor@2@@Z 370 370 ?visitChildren@JSGlobalThis@JSC@@KAXPAVJSCell@2@AAVSlotVisitor@2@@Z -
trunk/Source/JavaScriptCore/JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.vcproj
r127202 r128084 2387 2387 </File> 2388 2388 <File 2389 RelativePath="..\..\heap\SlotVisitor.cpp" 2390 > 2391 </File> 2392 <File 2389 2393 RelativePath="..\..\heap\Strong.h" 2390 2394 > -
trunk/Source/JavaScriptCore/JavaScriptCore.xcodeproj/project.pbxproj
r127374 r128084 694 694 C21122E215DD9AB300790E3A /* GCThreadSharedData.h in Headers */ = {isa = PBXBuildFile; fileRef = C21122DF15DD9AB300790E3A /* GCThreadSharedData.h */; settings = {ATTRIBUTES = (Private, ); }; }; 695 695 C21122E315DD9AB300790E3A /* MarkStackInlineMethods.h in Headers */ = {isa = PBXBuildFile; fileRef = C21122E015DD9AB300790E3A /* MarkStackInlineMethods.h */; settings = {ATTRIBUTES = (Private, ); }; }; 696 C2160FE715F7E95E00942DFC /* SlotVisitorInlineMethods.h in Headers */ = {isa = PBXBuildFile; fileRef = 0FCB408515C0A3C30048932B /* SlotVisitorInlineMethods.h */; settings = {ATTRIBUTES = (Private, ); }; }; 697 C225494315F7DBAA0065E898 /* SlotVisitor.cpp in Sources */ = {isa = PBXBuildFile; fileRef = C225494215F7DBAA0065E898 /* SlotVisitor.cpp */; }; 696 698 C22B31B9140577D700DB475A /* SamplingCounter.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F77008E1402FDD60078EB39 /* SamplingCounter.h */; settings = {ATTRIBUTES = (Private, ); }; }; 697 699 C240305514B404E60079EB64 /* CopiedSpace.cpp in Sources */ = {isa = PBXBuildFile; fileRef = C240305314B404C90079EB64 /* CopiedSpace.cpp */; }; … … 1456 1458 C21122DF15DD9AB300790E3A /* GCThreadSharedData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GCThreadSharedData.h; sourceTree = "<group>"; }; 1457 1459 C21122E015DD9AB300790E3A /* MarkStackInlineMethods.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MarkStackInlineMethods.h; sourceTree = "<group>"; }; 1460 C225494215F7DBAA0065E898 /* SlotVisitor.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = SlotVisitor.cpp; sourceTree = "<group>"; }; 1458 1461 C240305314B404C90079EB64 /* CopiedSpace.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = CopiedSpace.cpp; sourceTree = "<group>"; }; 1459 1462 C25F8BCB157544A900245B71 /* IncrementalSweeper.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = IncrementalSweeper.cpp; sourceTree = "<group>"; }; … … 1785 1788 isa = PBXGroup; 1786 1789 children = ( 1790 C225494215F7DBAA0065E898 /* SlotVisitor.cpp */, 1787 1791 C21122DE15DD9AB300790E3A /* GCThreadSharedData.cpp */, 1788 1792 C21122DF15DD9AB300790E3A /* GCThreadSharedData.h */, … … 2528 2532 FE20CE9E15F04A9500DF3430 /* LLIntCLoop.h in Headers */, 2529 2533 C21122E215DD9AB300790E3A /* GCThreadSharedData.h in Headers */, 2534 C2160FE715F7E95E00942DFC /* SlotVisitorInlineMethods.h in Headers */, 2530 2535 C2E526BE1590EF000054E48D /* HeapTimer.h in Headers */, 2531 2536 C21122E315DD9AB300790E3A /* MarkStackInlineMethods.h in Headers */, … … 3504 3509 1442566115EDE98D0066A49B /* JSWithScope.cpp in Sources */, 3505 3510 FE20CE9D15F04A9500DF3430 /* LLIntCLoop.cpp in Sources */, 3511 C225494315F7DBAA0065E898 /* SlotVisitor.cpp in Sources */, 3506 3512 ); 3507 3513 runOnlyForDeploymentPostprocessing = 0; -
trunk/Source/JavaScriptCore/Target.pri
r127202 r128084 90 90 heap/MarkedBlock.cpp \ 91 91 heap/MarkedSpace.cpp \ 92 heap/SlotVisitor.cpp \ 92 93 heap/VTableSpectrum.cpp \ 93 94 heap/WriteBarrierSupport.cpp \ -
trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp
r127958 r128084 46 46 #include "MethodCallLinkStatus.h" 47 47 #include "RepatchBuffer.h" 48 #include "SlotVisitorInlineMethods.h" 48 49 #include <stdio.h> 49 50 #include <wtf/StringExtras.h> -
trunk/Source/JavaScriptCore/dfg/DFGCommon.h
r126715 r128084 32 32 33 33 #include "CodeOrigin.h" 34 #include "Options.h" 34 35 #include "VirtualRegister.h" 35 36 -
trunk/Source/JavaScriptCore/heap/GCThreadSharedData.cpp
r126354 r128084 30 30 #include "MarkStack.h" 31 31 #include "SlotVisitor.h" 32 #include "SlotVisitorInlineMethods.h" 32 33 #include <wtf/MainThread.h> 33 34 -
trunk/Source/JavaScriptCore/heap/GCThreadSharedData.h
r126354 r128084 27 27 #define GCThreadSharedData_h 28 28 29 #include "ListableHandler.h" 29 30 #include "MarkStack.h" 31 #include "UnconditionalFinalizer.h" 32 #include "WeakReferenceHarvester.h" 33 #include <wtf/HashSet.h> 34 #include <wtf/Threading.h> 30 35 #include <wtf/Vector.h> 31 36 … … 49 54 50 55 private: 51 friend class MarkStack;52 56 friend class SlotVisitor; 53 57 … … 65 69 66 70 Vector<ThreadIdentifier> m_markingThreads; 67 Vector< MarkStack*> m_markingThreadsMarkStack;71 Vector<SlotVisitor*> m_markingThreadsMarkStack; 68 72 69 73 Mutex m_markingLock; -
trunk/Source/JavaScriptCore/heap/HeapRootVisitor.h
r97642 r128084 28 28 29 29 #include "SlotVisitor.h" 30 #include "SlotVisitorInlineMethods.h" 30 31 31 32 namespace JSC { -
trunk/Source/JavaScriptCore/heap/MarkStack.cpp
r127202 r128084 224 224 } 225 225 226 MarkStack::MarkStack(GCThreadSharedData& shared)227 : m_stack(shared.m_segmentAllocator)228 #if !ASSERT_DISABLED229 , m_isCheckingForDefaultMarkViolation(false)230 , m_isDraining(false)231 #endif232 , m_visitCount(0)233 , m_isInParallelMode(false)234 , m_shared(shared)235 , m_shouldHashConst(false)236 {237 }238 239 MarkStack::~MarkStack()240 {241 ASSERT(m_stack.isEmpty());242 }243 244 void MarkStack::setup()245 {246 m_shared.m_shouldHashConst = m_shared.m_globalData->haveEnoughNewStringsToHashConst();247 m_shouldHashConst = m_shared.m_shouldHashConst;248 #if ENABLE(PARALLEL_GC)249 for (unsigned i = 0; i < m_shared.m_markingThreadsMarkStack.size(); ++i)250 m_shared.m_markingThreadsMarkStack[i]->m_shouldHashConst = m_shared.m_shouldHashConst;251 #endif252 }253 254 void MarkStack::reset()255 {256 m_visitCount = 0;257 ASSERT(m_stack.isEmpty());258 #if ENABLE(PARALLEL_GC)259 ASSERT(m_opaqueRoots.isEmpty()); // Should have merged by now.260 #else261 m_opaqueRoots.clear();262 #endif263 if (m_shouldHashConst) {264 m_uniqueStrings.clear();265 m_shouldHashConst = false;266 }267 }268 269 void MarkStack::append(ConservativeRoots& conservativeRoots)270 {271 JSCell** roots = conservativeRoots.roots();272 size_t size = conservativeRoots.size();273 for (size_t i = 0; i < size; ++i)274 internalAppend(roots[i]);275 }276 277 ALWAYS_INLINE static void visitChildren(SlotVisitor& visitor, const JSCell* cell)278 {279 #if ENABLE(SIMPLE_HEAP_PROFILING)280 m_visitedTypeCounts.count(cell);281 #endif282 283 ASSERT(Heap::isMarked(cell));284 285 if (isJSString(cell)) {286 JSString::visitChildren(const_cast<JSCell*>(cell), visitor);287 return;288 }289 290 if (isJSFinalObject(cell)) {291 JSFinalObject::visitChildren(const_cast<JSCell*>(cell), visitor);292 return;293 }294 295 if (isJSArray(cell)) {296 JSArray::visitChildren(const_cast<JSCell*>(cell), visitor);297 return;298 }299 300 cell->methodTable()->visitChildren(const_cast<JSCell*>(cell), visitor);301 }302 303 void SlotVisitor::donateKnownParallel()304 {305 // NOTE: Because we re-try often, we can afford to be conservative, and306 // assume that donating is not profitable.307 308 // Avoid locking when a thread reaches a dead end in the object graph.309 if (m_stack.size() < 2)310 return;311 312 // If there's already some shared work queued up, be conservative and assume313 // that donating more is not profitable.314 if (m_shared.m_sharedMarkStack.size())315 return;316 317 // If we're contending on the lock, be conservative and assume that another318 // thread is already donating.319 MutexTryLocker locker(m_shared.m_markingLock);320 if (!locker.locked())321 return;322 323 // Otherwise, assume that a thread will go idle soon, and donate.324 m_stack.donateSomeCellsTo(m_shared.m_sharedMarkStack);325 326 if (m_shared.m_numberOfActiveParallelMarkers < Options::numberOfGCMarkers())327 m_shared.m_markingCondition.broadcast();328 }329 330 void SlotVisitor::drain()331 {332 ASSERT(m_isInParallelMode);333 334 #if ENABLE(PARALLEL_GC)335 if (Options::numberOfGCMarkers() > 1) {336 while (!m_stack.isEmpty()) {337 m_stack.refill();338 for (unsigned countdown = Options::minimumNumberOfScansBetweenRebalance(); m_stack.canRemoveLast() && countdown--;)339 visitChildren(*this, m_stack.removeLast());340 donateKnownParallel();341 }342 343 mergeOpaqueRootsIfNecessary();344 return;345 }346 #endif347 348 while (!m_stack.isEmpty()) {349 m_stack.refill();350 while (m_stack.canRemoveLast())351 visitChildren(*this, m_stack.removeLast());352 }353 }354 355 void SlotVisitor::drainFromShared(SharedDrainMode sharedDrainMode)356 {357 ASSERT(m_isInParallelMode);358 359 ASSERT(Options::numberOfGCMarkers());360 361 bool shouldBeParallel;362 363 #if ENABLE(PARALLEL_GC)364 shouldBeParallel = Options::numberOfGCMarkers() > 1;365 #else366 ASSERT(Options::numberOfGCMarkers() == 1);367 shouldBeParallel = false;368 #endif369 370 if (!shouldBeParallel) {371 // This call should be a no-op.372 ASSERT_UNUSED(sharedDrainMode, sharedDrainMode == MasterDrain);373 ASSERT(m_stack.isEmpty());374 ASSERT(m_shared.m_sharedMarkStack.isEmpty());375 return;376 }377 378 #if ENABLE(PARALLEL_GC)379 {380 MutexLocker locker(m_shared.m_markingLock);381 m_shared.m_numberOfActiveParallelMarkers++;382 }383 while (true) {384 {385 MutexLocker locker(m_shared.m_markingLock);386 m_shared.m_numberOfActiveParallelMarkers--;387 388 // How we wait differs depending on drain mode.389 if (sharedDrainMode == MasterDrain) {390 // Wait until either termination is reached, or until there is some work391 // for us to do.392 while (true) {393 // Did we reach termination?394 if (!m_shared.m_numberOfActiveParallelMarkers && m_shared.m_sharedMarkStack.isEmpty()) {395 // Let any sleeping slaves know it's time for them to give their private CopiedBlocks back396 m_shared.m_markingCondition.broadcast();397 return;398 }399 400 // Is there work to be done?401 if (!m_shared.m_sharedMarkStack.isEmpty())402 break;403 404 // Otherwise wait.405 m_shared.m_markingCondition.wait(m_shared.m_markingLock);406 }407 } else {408 ASSERT(sharedDrainMode == SlaveDrain);409 410 // Did we detect termination? If so, let the master know.411 if (!m_shared.m_numberOfActiveParallelMarkers && m_shared.m_sharedMarkStack.isEmpty())412 m_shared.m_markingCondition.broadcast();413 414 while (m_shared.m_sharedMarkStack.isEmpty() && !m_shared.m_parallelMarkersShouldExit) {415 if (!m_shared.m_numberOfActiveParallelMarkers && m_shared.m_sharedMarkStack.isEmpty())416 doneCopying();417 m_shared.m_markingCondition.wait(m_shared.m_markingLock);418 }419 420 // Is the VM exiting? If so, exit this thread.421 if (m_shared.m_parallelMarkersShouldExit) {422 doneCopying();423 return;424 }425 }426 427 size_t idleThreadCount = Options::numberOfGCMarkers() - m_shared.m_numberOfActiveParallelMarkers;428 m_stack.stealSomeCellsFrom(m_shared.m_sharedMarkStack, idleThreadCount);429 m_shared.m_numberOfActiveParallelMarkers++;430 }431 432 drain();433 }434 #endif435 }436 437 void MarkStack::mergeOpaqueRoots()438 {439 ASSERT(!m_opaqueRoots.isEmpty()); // Should only be called when opaque roots are non-empty.440 {441 MutexLocker locker(m_shared.m_opaqueRootsLock);442 HashSet<void*>::iterator begin = m_opaqueRoots.begin();443 HashSet<void*>::iterator end = m_opaqueRoots.end();444 for (HashSet<void*>::iterator iter = begin; iter != end; ++iter)445 m_shared.m_opaqueRoots.add(*iter);446 }447 m_opaqueRoots.clear();448 }449 450 void SlotVisitor::startCopying()451 {452 ASSERT(!m_copiedAllocator.isValid());453 }454 455 void* SlotVisitor::allocateNewSpaceSlow(size_t bytes)456 {457 m_shared.m_copiedSpace->doneFillingBlock(m_copiedAllocator.resetCurrentBlock());458 m_copiedAllocator.setCurrentBlock(m_shared.m_copiedSpace->allocateBlockForCopyingPhase());459 460 void* result = 0;461 CheckedBoolean didSucceed = m_copiedAllocator.tryAllocate(bytes, &result);462 ASSERT(didSucceed);463 return result;464 }465 466 void* SlotVisitor::allocateNewSpaceOrPin(void* ptr, size_t bytes)467 {468 if (!checkIfShouldCopyAndPinOtherwise(ptr, bytes))469 return 0;470 471 return allocateNewSpace(bytes);472 }473 474 ALWAYS_INLINE bool JSString::tryHashConstLock()475 {476 #if ENABLE(PARALLEL_GC)477 unsigned currentFlags = m_flags;478 479 if (currentFlags & HashConstLock)480 return false;481 482 unsigned newFlags = currentFlags | HashConstLock;483 484 if (!WTF::weakCompareAndSwap(&m_flags, currentFlags, newFlags))485 return false;486 487 WTF::memoryBarrierAfterLock();488 return true;489 #else490 if (isHashConstSingleton())491 return false;492 493 m_flags |= HashConstLock;494 495 return true;496 #endif497 }498 499 ALWAYS_INLINE void JSString::releaseHashConstLock()500 {501 #if ENABLE(PARALLEL_GC)502 WTF::memoryBarrierBeforeUnlock();503 #endif504 m_flags &= ~HashConstLock;505 }506 507 ALWAYS_INLINE bool JSString::shouldTryHashConst()508 {509 return ((length() > 1) && !isRope() && !isHashConstSingleton());510 }511 512 ALWAYS_INLINE void MarkStack::internalAppend(JSValue* slot)513 {514 // This internalAppend is only intended for visits to object and array backing stores.515 // as it can change the JSValue pointed to be the argument when the original JSValue516 // is a string that contains the same contents as another string.517 518 ASSERT(slot);519 JSValue value = *slot;520 ASSERT(value);521 if (!value.isCell())522 return;523 524 JSCell* cell = value.asCell();525 if (!cell)526 return;527 528 if (m_shouldHashConst && cell->isString()) {529 JSString* string = jsCast<JSString*>(cell);530 if (string->shouldTryHashConst() && string->tryHashConstLock()) {531 UniqueStringMap::AddResult addResult = m_uniqueStrings.add(string->string().impl(), value);532 if (addResult.isNewEntry)533 string->setHashConstSingleton();534 else {535 JSValue existingJSValue = addResult.iterator->second;536 if (value != existingJSValue)537 jsCast<JSString*>(existingJSValue.asCell())->clearHashConstSingleton();538 *slot = existingJSValue;539 string->releaseHashConstLock();540 return;541 }542 string->releaseHashConstLock();543 }544 }545 546 internalAppend(cell);547 }548 549 void SlotVisitor::copyAndAppend(void** ptr, size_t bytes, JSValue* values, unsigned length)550 {551 void* oldPtr = *ptr;552 void* newPtr = allocateNewSpaceOrPin(oldPtr, bytes);553 if (newPtr) {554 size_t jsValuesOffset = static_cast<size_t>(reinterpret_cast<char*>(values) - static_cast<char*>(oldPtr));555 556 JSValue* newValues = reinterpret_cast_ptr<JSValue*>(static_cast<char*>(newPtr) + jsValuesOffset);557 for (unsigned i = 0; i < length; i++) {558 JSValue& value = values[i];559 newValues[i] = value;560 if (!value)561 continue;562 internalAppend(&newValues[i]);563 }564 565 memcpy(newPtr, oldPtr, jsValuesOffset);566 *ptr = newPtr;567 } else568 append(values, length);569 }570 571 void SlotVisitor::doneCopying()572 {573 if (!m_copiedAllocator.isValid())574 return;575 576 m_shared.m_copiedSpace->doneFillingBlock(m_copiedAllocator.resetCurrentBlock());577 }578 579 void SlotVisitor::harvestWeakReferences()580 {581 for (WeakReferenceHarvester* current = m_shared.m_weakReferenceHarvesters.head(); current; current = current->next())582 current->visitWeakReferences(*this);583 }584 585 void SlotVisitor::finalizeUnconditionalFinalizers()586 {587 while (m_shared.m_unconditionalFinalizers.hasNext())588 m_shared.m_unconditionalFinalizers.removeNext()->finalizeUnconditionally();589 }590 591 #if ENABLE(GC_VALIDATION)592 void MarkStack::validate(JSCell* cell)593 {594 if (!cell) {595 dataLog("cell is NULL\n");596 CRASH();597 }598 599 if (!cell->structure()) {600 dataLog("cell at %p has a null structure\n" , cell);601 CRASH();602 }603 604 // Both the cell's structure, and the cell's structure's structure should be the Structure Structure.605 // I hate this sentence.606 if (cell->structure()->structure()->JSCell::classInfo() != cell->structure()->JSCell::classInfo()) {607 const char* parentClassName = 0;608 const char* ourClassName = 0;609 if (cell->structure()->structure() && cell->structure()->structure()->JSCell::classInfo())610 parentClassName = cell->structure()->structure()->JSCell::classInfo()->className;611 if (cell->structure()->JSCell::classInfo())612 ourClassName = cell->structure()->JSCell::classInfo()->className;613 dataLog("parent structure (%p <%s>) of cell at %p doesn't match cell's structure (%p <%s>)\n",614 cell->structure()->structure(), parentClassName, cell, cell->structure(), ourClassName);615 CRASH();616 }617 }618 #else619 void MarkStack::validate(JSCell*)620 {621 }622 #endif623 624 226 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/MarkStack.h
r126354 r128084 27 27 #define MarkStack_h 28 28 29 #include "CopiedSpace.h"30 #include "HandleTypes.h"31 #include "JSValue.h"32 #include "Options.h"33 #include "Register.h"34 #include "UnconditionalFinalizer.h"35 #include "VTableSpectrum.h"36 #include "WeakReferenceHarvester.h"37 #include <wtf/DataLog.h>38 #include <wtf/Forward.h>39 #include <wtf/HashMap.h>40 #include <wtf/HashSet.h>41 #include <wtf/Noncopyable.h>42 #include <wtf/OSAllocator.h>43 #include <wtf/PageBlock.h>44 #include <wtf/TCSpinLock.h>45 #include <wtf/text/StringHash.h>46 #include <wtf/Vector.h>47 48 29 #if ENABLE(OBJECT_MARK_LOGGING) 49 30 #define MARK_LOG_MESSAGE0(message) dataLog(message) … … 70 51 #endif 71 52 53 #include <wtf/StdLibExtras.h> 54 #include <wtf/TCSpinLock.h> 55 72 56 namespace JSC { 73 57 74 class ConservativeRoots; 75 class JSGlobalData; 76 class MarkStack; 77 class GCThreadSharedData; 78 class ParallelModeEnabler; 79 class Register; 80 class SlotVisitor; 81 template<typename T> class WriteBarrierBase; 82 template<typename T> class JITWriteBarrier; 83 84 struct MarkStackSegment { 85 MarkStackSegment* m_previous; 58 class JSCell; 59 60 struct MarkStackSegment { 61 MarkStackSegment* m_previous; 86 62 #if !ASSERT_DISABLED 87 63 size_t m_top; 88 64 #endif 89 65 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 66 const JSCell** data() 67 { 68 return bitwise_cast<const JSCell**>(this + 1); 69 } 70 71 static size_t capacityFromSize(size_t size) 72 { 73 return (size - sizeof(MarkStackSegment)) / sizeof(const JSCell*); 74 } 75 76 static size_t sizeFromCapacity(size_t capacity) 77 { 78 return sizeof(MarkStackSegment) + capacity * sizeof(const JSCell*); 79 } 80 }; 105 81 106 107 108 109 110 111 112 113 114 115 116 117 118 119 82 class MarkStackSegmentAllocator { 83 public: 84 MarkStackSegmentAllocator(); 85 ~MarkStackSegmentAllocator(); 86 87 MarkStackSegment* allocate(); 88 void release(MarkStackSegment*); 89 90 void shrinkReserve(); 91 92 private: 93 SpinLock m_lock; 94 MarkStackSegment* m_nextFreeSegment; 95 }; 120 96 121 122 123 124 97 class MarkStackArray { 98 public: 99 MarkStackArray(MarkStackSegmentAllocator&); 100 ~MarkStackArray(); 125 101 126 102 void append(const JSCell*); 127 103 128 bool canRemoveLast(); 129 const JSCell* removeLast(); 130 bool refill(); 131 132 bool isEmpty(); 133 134 void donateSomeCellsTo(MarkStackArray& other); 135 136 void stealSomeCellsFrom(MarkStackArray& other, size_t idleThreadCount); 104 bool canRemoveLast(); 105 const JSCell* removeLast(); 106 bool refill(); 107 108 void donateSomeCellsTo(MarkStackArray& other); 109 void stealSomeCellsFrom(MarkStackArray& other, size_t idleThreadCount); 137 110 138 size_t size(); 111 size_t size(); 112 bool isEmpty(); 139 113 140 private: 141 MarkStackSegment* m_topSegment; 142 143 JS_EXPORT_PRIVATE void expand(); 144 145 MarkStackSegmentAllocator& m_allocator; 114 private: 115 JS_EXPORT_PRIVATE void expand(); 116 117 size_t postIncTop(); 118 size_t preDecTop(); 119 void setTopForFullSegment(); 120 void setTopForEmptySegment(); 121 size_t top(); 122 123 void validatePrevious(); 146 124 147 size_t m_segmentCapacity; 148 size_t m_top; 149 size_t m_numberOfPreviousSegments; 150 151 size_t postIncTop() 152 { 153 size_t result = m_top++; 154 ASSERT(result == m_topSegment->m_top++); 155 return result; 156 } 157 158 size_t preDecTop() 159 { 160 size_t result = --m_top; 161 ASSERT(result == --m_topSegment->m_top); 162 return result; 163 } 164 165 void setTopForFullSegment() 166 { 167 ASSERT(m_topSegment->m_top == m_segmentCapacity); 168 m_top = m_segmentCapacity; 169 } 170 171 void setTopForEmptySegment() 172 { 173 ASSERT(!m_topSegment->m_top); 174 m_top = 0; 175 } 176 177 size_t top() 178 { 179 ASSERT(m_top == m_topSegment->m_top); 180 return m_top; 181 } 182 183 #if ASSERT_DISABLED 184 void validatePrevious() { } 185 #else 186 void validatePrevious() 187 { 188 unsigned count = 0; 189 for (MarkStackSegment* current = m_topSegment->m_previous; current; current = current->m_previous) 190 count++; 191 ASSERT(count == m_numberOfPreviousSegments); 192 } 193 #endif 194 }; 125 MarkStackSegment* m_topSegment; 126 MarkStackSegmentAllocator& m_allocator; 195 127 196 class MarkStack { 197 WTF_MAKE_NONCOPYABLE(MarkStack); 198 friend class HeapRootVisitor; // Allowed to mark a JSValue* or JSCell** directly. 199 200 public: 201 MarkStack(GCThreadSharedData&); 202 ~MarkStack(); 203 204 void append(ConservativeRoots&); 205 206 template<typename T> void append(JITWriteBarrier<T>*); 207 template<typename T> void append(WriteBarrierBase<T>*); 208 void appendValues(WriteBarrierBase<Unknown>*, size_t count); 209 210 template<typename T> 211 void appendUnbarrieredPointer(T**); 212 void appendUnbarrieredValue(JSValue*); 213 214 void addOpaqueRoot(void*); 215 bool containsOpaqueRoot(void*); 216 int opaqueRootCount(); 217 218 GCThreadSharedData& sharedData() { return m_shared; } 219 bool isEmpty() { return m_stack.isEmpty(); } 220 221 void setup(); 222 void reset(); 223 224 size_t visitCount() const { return m_visitCount; } 225 226 #if ENABLE(SIMPLE_HEAP_PROFILING) 227 VTableSpectrum m_visitedTypeCounts; 228 #endif 229 230 void addWeakReferenceHarvester(WeakReferenceHarvester*); 231 void addUnconditionalFinalizer(UnconditionalFinalizer*); 232 233 #if ENABLE(OBJECT_MARK_LOGGING) 234 inline void resetChildCount() { m_logChildCount = 0; } 235 inline unsigned childCount() { return m_logChildCount; } 236 inline void incrementChildCount() { m_logChildCount++; } 237 #endif 238 239 protected: 240 JS_EXPORT_PRIVATE static void validate(JSCell*); 241 242 void append(JSValue*); 243 void append(JSValue*, size_t count); 244 void append(JSCell**); 245 246 void internalAppend(JSCell*); 247 void internalAppend(JSValue); 248 void internalAppend(JSValue*); 249 250 JS_EXPORT_PRIVATE void mergeOpaqueRoots(); 251 252 void mergeOpaqueRootsIfNecessary() 253 { 254 if (m_opaqueRoots.isEmpty()) 255 return; 256 mergeOpaqueRoots(); 257 } 258 259 void mergeOpaqueRootsIfProfitable() 260 { 261 if (static_cast<unsigned>(m_opaqueRoots.size()) < Options::opaqueRootMergeThreshold()) 262 return; 263 mergeOpaqueRoots(); 264 } 265 266 MarkStackArray m_stack; 267 HashSet<void*> m_opaqueRoots; // Handle-owning data structures not visible to the garbage collector. 268 269 #if !ASSERT_DISABLED 270 public: 271 bool m_isCheckingForDefaultMarkViolation; 272 bool m_isDraining; 273 #endif 274 protected: 275 friend class ParallelModeEnabler; 276 277 size_t m_visitCount; 278 bool m_isInParallelMode; 279 280 GCThreadSharedData& m_shared; 281 282 bool m_shouldHashConst; // Local per-thread copy of shared flag for performance reasons 283 typedef HashMap<StringImpl*, JSValue> UniqueStringMap; 284 UniqueStringMap m_uniqueStrings; 285 286 #if ENABLE(OBJECT_MARK_LOGGING) 287 unsigned m_logChildCount; 288 #endif 289 }; 290 291 inline void MarkStackArray::append(const JSCell* cell) 292 { 293 if (m_top == m_segmentCapacity) 294 expand(); 295 m_topSegment->data()[postIncTop()] = cell; 296 } 297 298 inline bool MarkStackArray::canRemoveLast() 299 { 300 return !!m_top; 301 } 302 303 inline const JSCell* MarkStackArray::removeLast() 304 { 305 return m_topSegment->data()[preDecTop()]; 306 } 307 308 inline bool MarkStackArray::isEmpty() 309 { 310 if (m_top) 311 return false; 312 if (m_topSegment->m_previous) { 313 ASSERT(m_topSegment->m_previous->m_top == m_segmentCapacity); 314 return false; 315 } 316 return true; 317 } 318 319 inline size_t MarkStackArray::size() 320 { 321 return m_top + m_segmentCapacity * m_numberOfPreviousSegments; 322 } 323 324 class ParallelModeEnabler { 325 public: 326 ParallelModeEnabler(MarkStack& stack) 327 : m_stack(stack) 328 { 329 ASSERT(!m_stack.m_isInParallelMode); 330 m_stack.m_isInParallelMode = true; 331 } 332 333 ~ParallelModeEnabler() 334 { 335 ASSERT(m_stack.m_isInParallelMode); 336 m_stack.m_isInParallelMode = false; 337 } 338 339 private: 340 MarkStack& m_stack; 341 }; 128 size_t m_segmentCapacity; 129 size_t m_top; 130 size_t m_numberOfPreviousSegments; 131 132 }; 342 133 343 134 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/MarkStackInlineMethods.h
r126624 r128084 32 32 namespace JSC { 33 33 34 ALWAYS_INLINE void MarkStack::append(JSValue* slot, size_t count)34 inline size_t MarkStackArray::postIncTop() 35 35 { 36 for (size_t i = 0; i < count; ++i) { 37 JSValue& value = slot[i]; 38 internalAppend(value); 39 } 36 size_t result = m_top++; 37 ASSERT(result == m_topSegment->m_top++); 38 return result; 39 } 40 41 inline size_t MarkStackArray::preDecTop() 42 { 43 size_t result = --m_top; 44 ASSERT(result == --m_topSegment->m_top); 45 return result; 46 } 47 48 inline void MarkStackArray::setTopForFullSegment() 49 { 50 ASSERT(m_topSegment->m_top == m_segmentCapacity); 51 m_top = m_segmentCapacity; 40 52 } 41 53 42 template<typename T> 43 inline void MarkStack::appendUnbarrieredPointer(T** slot) 54 inline void MarkStackArray::setTopForEmptySegment() 44 55 { 45 ASSERT(slot); 46 JSCell* cell = *slot; 47 internalAppend(cell); 56 ASSERT(!m_topSegment->m_top); 57 m_top = 0; 48 58 } 49 59 50 ALWAYS_INLINE void MarkStack::append(JSValue* slot)60 inline size_t MarkStackArray::top() 51 61 { 52 ASSERT( slot);53 internalAppend(*slot);62 ASSERT(m_top == m_topSegment->m_top); 63 return m_top; 54 64 } 55 65 56 ALWAYS_INLINE void MarkStack::appendUnbarrieredValue(JSValue* slot) 66 #if ASSERT_DISABLED 67 inline void MarkStackArray::validatePrevious() { } 68 #else 69 inline void MarkStackArray::validatePrevious() 57 70 { 58 ASSERT(slot); 59 internalAppend(*slot); 71 unsigned count = 0; 72 for (MarkStackSegment* current = m_topSegment->m_previous; current; current = current->m_previous) 73 count++; 74 ASSERT(count == m_numberOfPreviousSegments); 75 } 76 #endif 77 78 inline void MarkStackArray::append(const JSCell* cell) 79 { 80 if (m_top == m_segmentCapacity) 81 expand(); 82 m_topSegment->data()[postIncTop()] = cell; 60 83 } 61 84 62 ALWAYS_INLINE void MarkStack::append(JSCell** slot)85 inline bool MarkStackArray::canRemoveLast() 63 86 { 64 ASSERT(slot); 65 internalAppend(*slot); 87 return !!m_top; 66 88 } 67 89 68 ALWAYS_INLINE void MarkStack::internalAppend(JSValue value)90 inline const JSCell* MarkStackArray::removeLast() 69 91 { 70 if (!value || !value.isCell()) 71 return; 72 internalAppend(value.asCell()); 92 return m_topSegment->data()[preDecTop()]; 73 93 } 74 94 75 inline void MarkStack::addWeakReferenceHarvester(WeakReferenceHarvester* weakReferenceHarvester)95 inline bool MarkStackArray::isEmpty() 76 96 { 77 m_shared.m_weakReferenceHarvesters.addThreadSafe(weakReferenceHarvester); 97 if (m_top) 98 return false; 99 if (m_topSegment->m_previous) { 100 ASSERT(m_topSegment->m_previous->m_top == m_segmentCapacity); 101 return false; 102 } 103 return true; 78 104 } 79 105 80 inline void MarkStack::addUnconditionalFinalizer(UnconditionalFinalizer* unconditionalFinalizer)106 inline size_t MarkStackArray::size() 81 107 { 82 m_shared.m_unconditionalFinalizers.addThreadSafe(unconditionalFinalizer); 83 } 84 85 inline void MarkStack::addOpaqueRoot(void* root) 86 { 87 #if ENABLE(PARALLEL_GC) 88 if (Options::numberOfGCMarkers() == 1) { 89 // Put directly into the shared HashSet. 90 m_shared.m_opaqueRoots.add(root); 91 return; 92 } 93 // Put into the local set, but merge with the shared one every once in 94 // a while to make sure that the local sets don't grow too large. 95 mergeOpaqueRootsIfProfitable(); 96 m_opaqueRoots.add(root); 97 #else 98 m_opaqueRoots.add(root); 99 #endif 100 } 101 102 inline bool MarkStack::containsOpaqueRoot(void* root) 103 { 104 ASSERT(!m_isInParallelMode); 105 #if ENABLE(PARALLEL_GC) 106 ASSERT(m_opaqueRoots.isEmpty()); 107 return m_shared.m_opaqueRoots.contains(root); 108 #else 109 return m_opaqueRoots.contains(root); 110 #endif 111 } 112 113 inline int MarkStack::opaqueRootCount() 114 { 115 ASSERT(!m_isInParallelMode); 116 #if ENABLE(PARALLEL_GC) 117 ASSERT(m_opaqueRoots.isEmpty()); 118 return m_shared.m_opaqueRoots.size(); 119 #else 120 return m_opaqueRoots.size(); 121 #endif 108 return m_top + m_segmentCapacity * m_numberOfPreviousSegments; 122 109 } 123 110 -
trunk/Source/JavaScriptCore/heap/SlotVisitor.h
r126354 r128084 28 28 29 29 #include "CopiedSpace.h" 30 #include " MarkStack.h"30 #include "HandleTypes.h" 31 31 #include "MarkStackInlineMethods.h" 32 33 #include <wtf/text/StringHash.h> 32 34 33 35 namespace JSC { 34 36 37 class ConservativeRoots; 38 class GCThreadSharedData; 35 39 class Heap; 36 class GCThreadSharedData; 40 template<typename T> class WriteBarrierBase; 41 template<typename T> class JITWriteBarrier; 37 42 38 class SlotVisitor : public MarkStack { 39 friend class HeapRootVisitor; 43 class SlotVisitor { 44 WTF_MAKE_NONCOPYABLE(SlotVisitor); 45 friend class HeapRootVisitor; // Allowed to mark a JSValue* or JSCell** directly. 46 40 47 public: 41 48 SlotVisitor(GCThreadSharedData&); 49 ~SlotVisitor(); 42 50 43 void donate() 44 { 45 ASSERT(m_isInParallelMode); 46 if (Options::numberOfGCMarkers() == 1) 47 return; 48 49 donateKnownParallel(); 50 } 51 void append(ConservativeRoots&); 51 52 53 template<typename T> void append(JITWriteBarrier<T>*); 54 template<typename T> void append(WriteBarrierBase<T>*); 55 void appendValues(WriteBarrierBase<Unknown>*, size_t count); 56 57 template<typename T> 58 void appendUnbarrieredPointer(T**); 59 void appendUnbarrieredValue(JSValue*); 60 61 void addOpaqueRoot(void*); 62 bool containsOpaqueRoot(void*); 63 int opaqueRootCount(); 64 65 GCThreadSharedData& sharedData() { return m_shared; } 66 bool isEmpty() { return m_stack.isEmpty(); } 67 68 void setup(); 69 void reset(); 70 71 size_t visitCount() const { return m_visitCount; } 72 73 void donate(); 52 74 void drain(); 53 54 void donateAndDrain() 55 { 56 donate(); 57 drain(); 58 } 75 void donateAndDrain(); 59 76 60 77 enum SharedDrainMode { SlaveDrain, MasterDrain }; … … 79 96 void doneCopying(); 80 97 98 #if ENABLE(SIMPLE_HEAP_PROFILING) 99 VTableSpectrum m_visitedTypeCounts; 100 #endif 101 102 void addWeakReferenceHarvester(WeakReferenceHarvester*); 103 void addUnconditionalFinalizer(UnconditionalFinalizer*); 104 105 #if ENABLE(OBJECT_MARK_LOGGING) 106 inline void resetChildCount() { m_logChildCount = 0; } 107 inline unsigned childCount() { return m_logChildCount; } 108 inline void incrementChildCount() { m_logChildCount++; } 109 #endif 110 81 111 private: 112 friend class ParallelModeEnabler; 113 114 JS_EXPORT_PRIVATE static void validate(JSCell*); 115 116 void append(JSValue*); 117 void append(JSValue*, size_t count); 118 void append(JSCell**); 119 120 void internalAppend(JSCell*); 121 void internalAppend(JSValue); 122 void internalAppend(JSValue*); 123 124 JS_EXPORT_PRIVATE void mergeOpaqueRoots(); 125 void mergeOpaqueRootsIfNecessary(); 126 void mergeOpaqueRootsIfProfitable(); 127 82 128 void* allocateNewSpaceOrPin(void*, size_t); 83 129 void* allocateNewSpaceSlow(size_t); … … 85 131 void donateKnownParallel(); 86 132 133 MarkStackArray m_stack; 134 HashSet<void*> m_opaqueRoots; // Handle-owning data structures not visible to the garbage collector. 135 136 size_t m_visitCount; 137 bool m_isInParallelMode; 138 139 GCThreadSharedData& m_shared; 140 141 bool m_shouldHashConst; // Local per-thread copy of shared flag for performance reasons 142 typedef HashMap<StringImpl*, JSValue> UniqueStringMap; 143 UniqueStringMap m_uniqueStrings; 144 145 #if ENABLE(OBJECT_MARK_LOGGING) 146 unsigned m_logChildCount; 147 #endif 148 87 149 CopiedAllocator m_copiedAllocator; 150 151 public: 152 #if !ASSERT_DISABLED 153 bool m_isCheckingForDefaultMarkViolation; 154 bool m_isDraining; 155 #endif 88 156 }; 89 157 90 inline SlotVisitor::SlotVisitor(GCThreadSharedData& shared) 91 : MarkStack(shared) 92 { 93 } 158 class ParallelModeEnabler { 159 public: 160 ParallelModeEnabler(SlotVisitor& stack) 161 : m_stack(stack) 162 { 163 ASSERT(!m_stack.m_isInParallelMode); 164 m_stack.m_isInParallelMode = true; 165 } 166 167 ~ParallelModeEnabler() 168 { 169 ASSERT(m_stack.m_isInParallelMode); 170 m_stack.m_isInParallelMode = false; 171 } 172 173 private: 174 SlotVisitor& m_stack; 175 }; 94 176 95 177 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/SlotVisitorInlineMethods.h
r123690 r128084 28 28 29 29 #include "CopiedSpaceInlineMethods.h" 30 #include "Options.h" 30 31 #include "SlotVisitor.h" 31 32 32 33 namespace JSC { 33 34 35 ALWAYS_INLINE void SlotVisitor::append(JSValue* slot, size_t count) 36 { 37 for (size_t i = 0; i < count; ++i) { 38 JSValue& value = slot[i]; 39 internalAppend(value); 40 } 41 } 42 43 template<typename T> 44 inline void SlotVisitor::appendUnbarrieredPointer(T** slot) 45 { 46 ASSERT(slot); 47 JSCell* cell = *slot; 48 internalAppend(cell); 49 } 50 51 ALWAYS_INLINE void SlotVisitor::append(JSValue* slot) 52 { 53 ASSERT(slot); 54 internalAppend(*slot); 55 } 56 57 ALWAYS_INLINE void SlotVisitor::appendUnbarrieredValue(JSValue* slot) 58 { 59 ASSERT(slot); 60 internalAppend(*slot); 61 } 62 63 ALWAYS_INLINE void SlotVisitor::append(JSCell** slot) 64 { 65 ASSERT(slot); 66 internalAppend(*slot); 67 } 68 69 ALWAYS_INLINE void SlotVisitor::internalAppend(JSValue value) 70 { 71 if (!value || !value.isCell()) 72 return; 73 internalAppend(value.asCell()); 74 } 75 76 inline void SlotVisitor::addWeakReferenceHarvester(WeakReferenceHarvester* weakReferenceHarvester) 77 { 78 m_shared.m_weakReferenceHarvesters.addThreadSafe(weakReferenceHarvester); 79 } 80 81 inline void SlotVisitor::addUnconditionalFinalizer(UnconditionalFinalizer* unconditionalFinalizer) 82 { 83 m_shared.m_unconditionalFinalizers.addThreadSafe(unconditionalFinalizer); 84 } 85 86 inline void SlotVisitor::addOpaqueRoot(void* root) 87 { 88 #if ENABLE(PARALLEL_GC) 89 if (Options::numberOfGCMarkers() == 1) { 90 // Put directly into the shared HashSet. 91 m_shared.m_opaqueRoots.add(root); 92 return; 93 } 94 // Put into the local set, but merge with the shared one every once in 95 // a while to make sure that the local sets don't grow too large. 96 mergeOpaqueRootsIfProfitable(); 97 m_opaqueRoots.add(root); 98 #else 99 m_opaqueRoots.add(root); 100 #endif 101 } 102 103 inline bool SlotVisitor::containsOpaqueRoot(void* root) 104 { 105 ASSERT(!m_isInParallelMode); 106 #if ENABLE(PARALLEL_GC) 107 ASSERT(m_opaqueRoots.isEmpty()); 108 return m_shared.m_opaqueRoots.contains(root); 109 #else 110 return m_opaqueRoots.contains(root); 111 #endif 112 } 113 114 inline int SlotVisitor::opaqueRootCount() 115 { 116 ASSERT(!m_isInParallelMode); 117 #if ENABLE(PARALLEL_GC) 118 ASSERT(m_opaqueRoots.isEmpty()); 119 return m_shared.m_opaqueRoots.size(); 120 #else 121 return m_opaqueRoots.size(); 122 #endif 123 } 124 125 inline void SlotVisitor::mergeOpaqueRootsIfNecessary() 126 { 127 if (m_opaqueRoots.isEmpty()) 128 return; 129 mergeOpaqueRoots(); 130 } 131 132 inline void SlotVisitor::mergeOpaqueRootsIfProfitable() 133 { 134 if (static_cast<unsigned>(m_opaqueRoots.size()) < Options::opaqueRootMergeThreshold()) 135 return; 136 mergeOpaqueRoots(); 137 } 138 34 139 ALWAYS_INLINE bool SlotVisitor::checkIfShouldCopyAndPinOtherwise(void* oldPtr, size_t bytes) 35 140 { … … 56 161 } 57 162 163 inline void SlotVisitor::donate() 164 { 165 ASSERT(m_isInParallelMode); 166 if (Options::numberOfGCMarkers() == 1) 167 return; 168 169 donateKnownParallel(); 170 } 171 172 inline void SlotVisitor::donateAndDrain() 173 { 174 donate(); 175 drain(); 176 } 177 58 178 } // namespace JSC 59 179 -
trunk/Source/JavaScriptCore/jit/JITWriteBarrier.h
r100880 r128084 30 30 31 31 #include "MacroAssembler.h" 32 #include " MarkStack.h"32 #include "SlotVisitor.h" 33 33 #include "WriteBarrier.h" 34 34 … … 136 136 }; 137 137 138 template<typename T> inline void MarkStack::append(JITWriteBarrier<T>* slot)138 template<typename T> inline void SlotVisitor::append(JITWriteBarrier<T>* slot) 139 139 { 140 140 internalAppend(slot->get()); -
trunk/Source/JavaScriptCore/jit/JumpReplacementWatchpoint.cpp
r126214 r128084 30 30 31 31 #include "LinkBuffer.h" 32 #include "Options.h" 32 33 33 34 namespace JSC { -
trunk/Source/JavaScriptCore/runtime/JSCell.h
r127191 r128084 31 31 #include "JSValueInlineMethods.h" 32 32 #include "SlotVisitor.h" 33 #include "SlotVisitorInlineMethods.h" 33 34 #include "WriteBarrier.h" 34 35 #include <wtf/Noncopyable.h> -
trunk/Source/JavaScriptCore/runtime/Structure.h
r127191 r128084 565 565 } 566 566 567 ALWAYS_INLINE void MarkStack::internalAppend(JSCell* cell)567 ALWAYS_INLINE void SlotVisitor::internalAppend(JSCell* cell) 568 568 { 569 569 ASSERT(!m_isCheckingForDefaultMarkViolation); -
trunk/Source/JavaScriptCore/runtime/WriteBarrier.h
r119655 r128084 226 226 } 227 227 228 // MarkStackfunctions229 230 template<typename T> inline void MarkStack::append(WriteBarrierBase<T>* slot)228 // SlotVisitor functions 229 230 template<typename T> inline void SlotVisitor::append(WriteBarrierBase<T>* slot) 231 231 { 232 232 internalAppend(*slot->slot()); 233 233 } 234 234 235 ALWAYS_INLINE void MarkStack::appendValues(WriteBarrierBase<Unknown>* barriers, size_t count)235 ALWAYS_INLINE void SlotVisitor::appendValues(WriteBarrierBase<Unknown>* barriers, size_t count) 236 236 { 237 237 append(barriers->slot(), count); -
trunk/Source/JavaScriptCore/yarr/YarrJIT.cpp
r127191 r128084 29 29 #include <wtf/ASCIICType.h> 30 30 #include "LinkBuffer.h" 31 #include "Options.h" 31 32 #include "Yarr.h" 32 33 #include "YarrCanonicalizeUCS2.h"
Note:
See TracChangeset
for help on using the changeset viewer.