Changeset 52047 in webkit for trunk/JavaScriptCore/runtime/Collector.cpp
- Timestamp:
- Dec 11, 2009, 11:20:27 PM (15 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/JavaScriptCore/runtime/Collector.cpp
r52040 r52047 105 105 const size_t GROWTH_FACTOR = 2; 106 106 const size_t LOW_WATER_FACTOR = 4; 107 const size_t ALLOCATIONS_PER_COLLECTION = 3600;107 const size_t ALLOCATIONS_PER_COLLECTION = 4000; 108 108 // This value has to be a macro to be used in max() without introducing 109 109 // a PIC branch in Mach-O binaries, see <rdar://problem/5971391>. … … 149 149 { 150 150 ASSERT(globalData); 151 151 152 152 #if PLATFORM(SYMBIAN) 153 153 // Symbian OpenC supports mmap but currently not the MAP_ANON flag. … … 171 171 172 172 memset(&primaryHeap, 0, sizeof(CollectorHeap)); 173 allocateBlock<PrimaryHeap>();174 175 173 memset(&numberHeap, 0, sizeof(CollectorHeap)); 176 #if USE(JSVALUE32)177 allocateBlock<NumberHeap>();178 #endif179 174 } 180 175 … … 199 194 m_markListSet = 0; 200 195 201 freeBlocks<PrimaryHeap>(); 202 freeBlocks<NumberHeap>(); 196 sweep<PrimaryHeap>(); 197 // No need to sweep number heap, because the JSNumber destructor doesn't do anything. 198 #if ENABLE(JSC_ZOMBIES) 199 ASSERT(primaryHeap.numLiveObjects == primaryHeap.numZombies); 200 #else 201 ASSERT(!primaryHeap.numLiveObjects); 202 #endif 203 freeBlocks(&primaryHeap); 204 freeBlocks(&numberHeap); 203 205 204 206 #if ENABLE(JSC_MULTIPLE_THREADS) … … 224 226 #if PLATFORM(DARWIN) 225 227 vm_address_t address = 0; 228 // FIXME: tag the region as a JavaScriptCore heap when we get a registered VM tag: <rdar://problem/6054788>. 226 229 vm_map(current_task(), &address, BLOCK_SIZE, BLOCK_OFFSET_MASK, VM_FLAGS_ANYWHERE | VM_TAG_FOR_COLLECTOR_MEMORY, MEMORY_OBJECT_NULL, 0, FALSE, VM_PROT_DEFAULT, VM_PROT_DEFAULT, VM_INHERIT_DEFAULT); 227 230 #elif PLATFORM(SYMBIAN) … … 231 234 CRASH(); 232 235 uintptr_t address = reinterpret_cast<uintptr_t>(mask); 236 237 memset(reinterpret_cast<void*>(address), 0, BLOCK_SIZE); 233 238 #elif PLATFORM(WINCE) 234 239 void* address = VirtualAlloc(NULL, BLOCK_SIZE, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE); … … 243 248 void* address; 244 249 posix_memalign(&address, BLOCK_SIZE, BLOCK_SIZE); 250 memset(address, 0, BLOCK_SIZE); 245 251 #else 246 252 … … 268 274 269 275 address += adjust; 270 #endif 271 272 // Initialize block. 276 memset(reinterpret_cast<void*>(address), 0, BLOCK_SIZE); 277 #endif 273 278 274 279 CollectorBlock* block = reinterpret_cast<CollectorBlock*>(address); 280 block->freeList = block->cells; 275 281 block->heap = this; 276 282 block->type = heapType; 277 clearMarkBits<heapType>(block);278 279 // heapAllocate assumes that it's safe to call a destructor on any cell in the primary heap.280 if (heapType != NumberHeap) {281 for (size_t i = 0; i < HeapConstants<heapType>::cellsPerBlock; ++i)282 new (block->cells + i) JSCell(JSCell::DummyDestructableCell);283 }284 285 // Add block to blocks vector.286 283 287 284 CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap; … … 305 302 CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap; 306 303 307 if (heapType != NumberHeap) {308 ObjectIterator<heapType> it(heap, block);309 ObjectIterator<heapType> end(heap, block + 1);310 for ( ; it != end; ++it)311 (*it)->~JSCell();312 }313 304 freeBlock(heap.blocks[block]); 314 305 … … 344 335 } 345 336 346 template <HeapType heapType> 347 void Heap::freeBlocks() 348 { 349 CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap; 350 351 while (heap.usedBlocks) 352 freeBlock<heapType>(0); 353 fastFree(heap.blocks); 354 memset(&heap, 0, sizeof(CollectorHeap)); 337 void Heap::freeBlocks(CollectorHeap* heap) 338 { 339 for (size_t i = 0; i < heap->usedBlocks; ++i) 340 if (heap->blocks[i]) 341 freeBlock(heap->blocks[i]); 342 fastFree(heap->blocks); 343 memset(heap, 0, sizeof(CollectorHeap)); 355 344 } 356 345 … … 369 358 // NOTE: we target the primaryHeap unconditionally as JSNumber doesn't modify cost 370 359 371 if (primaryHeap.extraCost > maxExtraCost && primaryHeap.extraCost > primaryHeap.usedBlocks * BLOCK_SIZE / 2) {372 // If the last iteration through the heap deallocated blocks, we need373 // to clean up remaining garbage before marking. Otherwise, the conservative374 // marking mechanism might follow a pointer to unmapped memory.375 if (primaryHeap.didShrink)376 sweep<PrimaryHeap>();377 reset();378 }379 360 primaryHeap.extraCost += cost; 380 361 } … … 384 365 typedef typename HeapConstants<heapType>::Block Block; 385 366 typedef typename HeapConstants<heapType>::Cell Cell; 386 367 387 368 CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap; 388 389 369 ASSERT(JSLock::lockCount() > 0); 390 370 ASSERT(JSLock::currentThreadIsHoldingLock()); … … 393 373 ASSERT(heap.operationInProgress == NoOperation); 394 374 ASSERT(heapType == PrimaryHeap || heap.extraCost == 0); 375 // FIXME: If another global variable access here doesn't hurt performance 376 // too much, we could CRASH() in NDEBUG builds, which could help ensure we 377 // don't spend any time debugging cases where we allocate inside an object's 378 // deallocation code. 395 379 396 380 #if COLLECT_ON_EVERY_ALLOCATION 397 collectAllGarbage(); 381 collect(); 382 #endif 383 384 size_t numLiveObjects = heap.numLiveObjects; 385 size_t usedBlocks = heap.usedBlocks; 386 size_t i = heap.firstBlockWithPossibleSpace; 387 388 // if we have a huge amount of extra cost, we'll try to collect even if we still have 389 // free cells left. 390 if (heapType == PrimaryHeap && heap.extraCost > ALLOCATIONS_PER_COLLECTION) { 391 size_t numLiveObjectsAtLastCollect = heap.numLiveObjectsAtLastCollect; 392 size_t numNewObjects = numLiveObjects - numLiveObjectsAtLastCollect; 393 const size_t newCost = numNewObjects + heap.extraCost; 394 if (newCost >= ALLOCATIONS_PER_COLLECTION && newCost >= numLiveObjectsAtLastCollect) 395 goto collect; 396 } 397 398 398 ASSERT(heap.operationInProgress == NoOperation); 399 #endif 400 401 allocate: 402 403 // Fast case: find the next garbage cell and recycle it. 404 405 do { 406 ASSERT(heap.nextBlock < heap.usedBlocks); 407 Block* block = reinterpret_cast<Block*>(heap.blocks[heap.nextBlock]); 408 do { 409 ASSERT(heap.nextCell < HeapConstants<heapType>::cellsPerBlock); 410 if (!block->marked.get(heap.nextCell >> HeapConstants<heapType>::bitmapShift)) { // Always false for the last cell in the block 411 Cell* cell = block->cells + heap.nextCell; 412 if (heapType != NumberHeap) { 413 heap.operationInProgress = Allocation; 414 JSCell* imp = reinterpret_cast<JSCell*>(cell); 415 imp->~JSCell(); 416 heap.operationInProgress = NoOperation; 417 } 418 ++heap.nextCell; 419 return cell; 420 } 421 } while (++heap.nextCell != HeapConstants<heapType>::cellsPerBlock); 422 heap.nextCell = 0; 423 } while (++heap.nextBlock != heap.usedBlocks); 424 425 // Slow case: reached the end of the heap. Mark live objects and start over. 426 427 reset(); 428 goto allocate; 429 } 430 431 template <HeapType heapType> 432 void Heap::resizeBlocks() 433 { 434 CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap; 435 436 heap.didShrink = false; 437 438 size_t usedCellCount = markedCells<heapType>(); 439 size_t minCellCount = usedCellCount + max(ALLOCATIONS_PER_COLLECTION, usedCellCount); 440 size_t minBlockCount = (minCellCount + HeapConstants<heapType>::cellsPerBlock - 1) / HeapConstants<heapType>::cellsPerBlock; 441 442 size_t maxCellCount = 1.25f * minCellCount; 443 size_t maxBlockCount = (maxCellCount + HeapConstants<heapType>::cellsPerBlock - 1) / HeapConstants<heapType>::cellsPerBlock; 444 445 if (heap.usedBlocks < minBlockCount) 446 growBlocks<heapType>(minBlockCount); 447 else if (heap.usedBlocks > maxBlockCount) 448 shrinkBlocks<heapType>(maxBlockCount); 449 } 450 451 template <HeapType heapType> 452 void Heap::growBlocks(size_t neededBlocks) 453 { 454 CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap; 455 ASSERT(heap.usedBlocks < neededBlocks); 456 while (heap.usedBlocks < neededBlocks) 457 allocateBlock<heapType>(); 458 } 459 460 template <HeapType heapType> 461 void Heap::shrinkBlocks(size_t neededBlocks) 462 { 463 CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap; 464 ASSERT(heap.usedBlocks > neededBlocks); 465 466 // Clear the always-on last bit, so isEmpty() isn't fooled by it. 467 for (size_t i = 0; i < heap.usedBlocks; ++i) 468 heap.blocks[i]->marked.clear((HeapConstants<heapType>::cellsPerBlock - 1) >> HeapConstants<heapType>::bitmapShift); 469 470 for (size_t i = 0; i != heap.usedBlocks && heap.usedBlocks != neededBlocks; ) { 471 if (heap.blocks[i]->marked.isEmpty()) { 472 freeBlock<heapType>(i); 473 heap.didShrink = true; 474 } else 475 ++i; 476 } 477 478 // Reset the always-on last bit. 479 for (size_t i = 0; i < heap.usedBlocks; ++i) 480 heap.blocks[i]->marked.set((HeapConstants<heapType>::cellsPerBlock - 1) >> HeapConstants<heapType>::bitmapShift); 399 #ifndef NDEBUG 400 // FIXME: Consider doing this in NDEBUG builds too (see comment above). 401 heap.operationInProgress = Allocation; 402 #endif 403 404 scan: 405 Block* targetBlock; 406 size_t targetBlockUsedCells; 407 if (i != usedBlocks) { 408 targetBlock = reinterpret_cast<Block*>(heap.blocks[i]); 409 targetBlockUsedCells = targetBlock->usedCells; 410 ASSERT(targetBlockUsedCells <= HeapConstants<heapType>::cellsPerBlock); 411 while (targetBlockUsedCells == HeapConstants<heapType>::cellsPerBlock) { 412 if (++i == usedBlocks) 413 goto collect; 414 targetBlock = reinterpret_cast<Block*>(heap.blocks[i]); 415 targetBlockUsedCells = targetBlock->usedCells; 416 ASSERT(targetBlockUsedCells <= HeapConstants<heapType>::cellsPerBlock); 417 } 418 heap.firstBlockWithPossibleSpace = i; 419 } else { 420 421 collect: 422 size_t numLiveObjectsAtLastCollect = heap.numLiveObjectsAtLastCollect; 423 size_t numNewObjects = numLiveObjects - numLiveObjectsAtLastCollect; 424 const size_t newCost = numNewObjects + heap.extraCost; 425 426 if (newCost >= ALLOCATIONS_PER_COLLECTION && newCost >= numLiveObjectsAtLastCollect) { 427 #ifndef NDEBUG 428 heap.operationInProgress = NoOperation; 429 #endif 430 bool foundGarbage = collect(); 431 numLiveObjects = heap.numLiveObjects; 432 usedBlocks = heap.usedBlocks; 433 i = heap.firstBlockWithPossibleSpace; 434 #ifndef NDEBUG 435 heap.operationInProgress = Allocation; 436 #endif 437 if (foundGarbage) 438 goto scan; 439 } 440 441 // didn't find a block, and GC didn't reclaim anything, need to allocate a new block 442 targetBlock = reinterpret_cast<Block*>(allocateBlock<heapType>()); 443 heap.firstBlockWithPossibleSpace = heap.usedBlocks - 1; 444 targetBlockUsedCells = 0; 445 } 446 447 // find a free spot in the block and detach it from the free list 448 Cell* newCell = targetBlock->freeList; 449 450 // "next" field is a cell offset -- 0 means next cell, so a zeroed block is already initialized 451 targetBlock->freeList = (newCell + 1) + newCell->u.freeCell.next; 452 453 targetBlock->usedCells = static_cast<uint32_t>(targetBlockUsedCells + 1); 454 heap.numLiveObjects = numLiveObjects + 1; 455 456 #ifndef NDEBUG 457 // FIXME: Consider doing this in NDEBUG builds too (see comment above). 458 heap.operationInProgress = NoOperation; 459 #endif 460 461 return newCell; 481 462 } 482 463 … … 734 715 #endif 735 716 736 inline bool isPointerAligned(void* p) 737 { 738 return (((intptr_t)(p) & (sizeof(char*) - 1)) == 0); 739 } 740 741 // Cell size needs to be a power of two for isPossibleCell to be valid. 742 COMPILE_ASSERT(sizeof(CollectorCell) % 2 == 0, Collector_cell_size_is_power_of_two); 743 744 #if USE(JSVALUE32) 745 static bool isHalfCellAligned(void *p) 746 { 747 return (((intptr_t)(p) & (CELL_MASK >> 1)) == 0); 748 } 749 750 static inline bool isPossibleCell(void* p) 751 { 752 return isHalfCellAligned(p) && p; 753 } 754 755 #else 756 757 static inline bool isCellAligned(void *p) 758 { 759 return (((intptr_t)(p) & CELL_MASK) == 0); 760 } 761 762 static inline bool isPossibleCell(void* p) 763 { 764 return isCellAligned(p) && p; 765 } 766 #endif 717 #define IS_POINTER_ALIGNED(p) (((intptr_t)(p) & (sizeof(char*) - 1)) == 0) 718 719 // cell size needs to be a power of two for this to be valid 720 #define IS_HALF_CELL_ALIGNED(p) (((intptr_t)(p) & (CELL_MASK >> 1)) == 0) 767 721 768 722 void Heap::markConservatively(MarkStack& markStack, void* start, void* end) … … 775 729 776 730 ASSERT((static_cast<char*>(end) - static_cast<char*>(start)) < 0x1000000); 777 ASSERT( isPointerAligned(start));778 ASSERT( isPointerAligned(end));731 ASSERT(IS_POINTER_ALIGNED(start)); 732 ASSERT(IS_POINTER_ALIGNED(end)); 779 733 780 734 char** p = static_cast<char**>(start); 781 735 char** e = static_cast<char**>(end); 782 736 737 size_t usedPrimaryBlocks = primaryHeap.usedBlocks; 738 size_t usedNumberBlocks = numberHeap.usedBlocks; 739 CollectorBlock** primaryBlocks = primaryHeap.blocks; 740 CollectorBlock** numberBlocks = numberHeap.blocks; 741 742 const size_t lastCellOffset = sizeof(CollectorCell) * (CELLS_PER_BLOCK - 1); 743 783 744 while (p != e) { 784 745 char* x = *p++; 785 if ( isPossibleCell(x)) {746 if (IS_HALF_CELL_ALIGNED(x) && x) { 786 747 uintptr_t xAsBits = reinterpret_cast<uintptr_t>(x); 787 748 xAsBits &= CELL_ALIGN_MASK; 788 749 uintptr_t offset = xAsBits & BLOCK_OFFSET_MASK; 789 790 const size_t lastCellOffset = sizeof(CollectorCell) * (CELLS_PER_BLOCK - 1);791 if (offset > lastCellOffset)792 continue;793 794 750 CollectorBlock* blockAddr = reinterpret_cast<CollectorBlock*>(xAsBits - offset); 795 #if USE(JSVALUE32)796 751 // Mark the the number heap, we can mark these Cells directly to avoid the virtual call cost 797 size_t usedNumberBlocks = numberHeap.usedBlocks;798 CollectorBlock** numberBlocks = numberHeap.blocks;799 752 for (size_t block = 0; block < usedNumberBlocks; block++) { 800 if ( numberBlocks[block] == blockAddr) {753 if ((numberBlocks[block] == blockAddr) & (offset <= lastCellOffset)) { 801 754 Heap::markCell(reinterpret_cast<JSCell*>(xAsBits)); 802 goto loopEnd;755 goto endMarkLoop; 803 756 } 804 757 } 805 #endif 758 806 759 // Mark the primary heap 807 size_t usedPrimaryBlocks = primaryHeap.usedBlocks;808 CollectorBlock** primaryBlocks = primaryHeap.blocks;809 760 for (size_t block = 0; block < usedPrimaryBlocks; block++) { 810 if (primaryBlocks[block] != blockAddr) 811 continue; 812 markStack.append(reinterpret_cast<JSCell*>(xAsBits)); 813 markStack.drain(); 814 break; 761 if ((primaryBlocks[block] == blockAddr) & (offset <= lastCellOffset)) { 762 if (reinterpret_cast<CollectorCell*>(xAsBits)->u.freeCell.zeroIfFree) { 763 markStack.append(reinterpret_cast<JSCell*>(xAsBits)); 764 markStack.drain(); 765 } 766 break; 767 } 815 768 } 769 endMarkLoop: 770 ; 816 771 } 817 #if USE(JSVALUE32)818 loopEnd:819 #endif820 772 } 821 773 } … … 1058 1010 } 1059 1011 1060 template <HeapType heapType> 1061 void Heap::clearMarkBits() 1062 { 1012 template <HeapType heapType> size_t Heap::sweep() 1013 { 1014 typedef typename HeapConstants<heapType>::Block Block; 1015 typedef typename HeapConstants<heapType>::Cell Cell; 1016 1017 // SWEEP: delete everything with a zero refcount (garbage) and unmark everything else 1063 1018 CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap; 1064 for (size_t i = 0; i < heap.usedBlocks; ++i)1065 clearMarkBits<heapType>(heap.blocks[i]);1066 }1067 1068 template <HeapType heapType>1069 void Heap::clearMarkBits(CollectorBlock* block)1070 {1071 // heapAllocate assumes that the last cell in every block is marked.1072 block->marked.clearAll();1073 block->marked.set((HeapConstants<heapType>::cellsPerBlock - 1) >> HeapConstants<heapType>::bitmapShift);1074 }1075 1076 template <HeapType heapType>1077 size_t Heap::markedCells(size_t startBlock, size_t startCell) const1078 {1079 const CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap;1080 ASSERT(startBlock <= heap.usedBlocks);1081 ASSERT(startCell < HeapConstants<heapType>::cellsPerBlock);1082 1083 if (startBlock >= heap.usedBlocks)1084 return 0;1085 1086 size_t result = 0;1087 result += heap.blocks[startBlock]->marked.count(startCell);1088 for (size_t i = startBlock + 1; i < heap.usedBlocks; ++i)1089 result += heap.blocks[i]->marked.count();1090 1091 return result;1092 }1093 1094 template <HeapType heapType>1095 void Heap::sweep()1096 {1097 ASSERT(heapType != NumberHeap); // The number heap does not contain meaningful destructors.1098 1099 CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap;1100 1101 ASSERT(heap.operationInProgress == NoOperation);1102 if (heap.operationInProgress != NoOperation)1103 CRASH();1104 heap.operationInProgress = Collection;1105 1019 1020 size_t emptyBlocks = 0; 1021 size_t numLiveObjects = heap.numLiveObjects; 1022 1023 for (size_t block = 0; block < heap.usedBlocks; block++) { 1024 Block* curBlock = reinterpret_cast<Block*>(heap.blocks[block]); 1025 1026 size_t usedCells = curBlock->usedCells; 1027 Cell* freeList = curBlock->freeList; 1028 1029 if (usedCells == HeapConstants<heapType>::cellsPerBlock) { 1030 // special case with a block where all cells are used -- testing indicates this happens often 1031 for (size_t i = 0; i < HeapConstants<heapType>::cellsPerBlock; i++) { 1032 if (!curBlock->marked.get(i >> HeapConstants<heapType>::bitmapShift)) { 1033 Cell* cell = curBlock->cells + i; 1034 1035 if (heapType != NumberHeap) { 1036 JSCell* imp = reinterpret_cast<JSCell*>(cell); 1037 // special case for allocated but uninitialized object 1038 // (We don't need this check earlier because nothing prior this point 1039 // assumes the object has a valid vptr.) 1040 if (cell->u.freeCell.zeroIfFree == 0) 1041 continue; 1042 #if ENABLE(JSC_ZOMBIES) 1043 if (!imp->isZombie()) { 1044 const ClassInfo* info = imp->classInfo(); 1045 imp->~JSCell(); 1046 new (imp) JSZombie(info, JSZombie::leakedZombieStructure()); 1047 heap.numZombies++; 1048 } 1049 #else 1050 imp->~JSCell(); 1051 #endif 1052 } 1053 --numLiveObjects; 1106 1054 #if !ENABLE(JSC_ZOMBIES) 1107 Structure* dummyMarkableCellStructure = m_globalData->dummyMarkableCellStructure.get(); 1108 #endif 1109 1110 DeadObjectIterator<heapType> it(heap, heap.nextBlock, heap.nextCell); 1111 DeadObjectIterator<heapType> end(heap, heap.usedBlocks); 1112 for ( ; it != end; ++it) { 1113 JSCell* cell = *it; 1055 --usedCells; 1056 1057 // put cell on the free list 1058 cell->u.freeCell.zeroIfFree = 0; 1059 cell->u.freeCell.next = freeList - (cell + 1); 1060 freeList = cell; 1061 #endif 1062 } 1063 } 1064 } else { 1065 size_t minimumCellsToProcess = usedCells; 1066 for (size_t i = 0; (i < minimumCellsToProcess) & (i < HeapConstants<heapType>::cellsPerBlock); i++) { 1067 Cell* cell = curBlock->cells + i; 1068 if (cell->u.freeCell.zeroIfFree == 0) { 1069 ++minimumCellsToProcess; 1070 } else { 1071 if (!curBlock->marked.get(i >> HeapConstants<heapType>::bitmapShift)) { 1072 if (heapType != NumberHeap) { 1073 JSCell* imp = reinterpret_cast<JSCell*>(cell); 1114 1074 #if ENABLE(JSC_ZOMBIES) 1115 if (!cell->isZombie()) { 1116 const ClassInfo* info = cell->classInfo(); 1117 cell->~JSCell(); 1118 new (cell) JSZombie(info, JSZombie::leakedZombieStructure()); 1119 Heap::markCell(cell); 1075 if (!imp->isZombie()) { 1076 const ClassInfo* info = imp->classInfo(); 1077 imp->~JSCell(); 1078 new (imp) JSZombie(info, JSZombie::leakedZombieStructure()); 1079 heap.numZombies++; 1080 } 1081 #else 1082 imp->~JSCell(); 1083 #endif 1084 } 1085 #if !ENABLE(JSC_ZOMBIES) 1086 --usedCells; 1087 --numLiveObjects; 1088 1089 // put cell on the free list 1090 cell->u.freeCell.zeroIfFree = 0; 1091 cell->u.freeCell.next = freeList - (cell + 1); 1092 freeList = cell; 1093 #endif 1094 } 1095 } 1096 } 1120 1097 } 1121 #else 1122 cell->~JSCell(); 1123 // Callers of sweep assume it's safe to mark any cell in the heap. 1124 new (cell) JSCell(dummyMarkableCellStructure); 1125 #endif 1126 } 1127 1128 heap.operationInProgress = NoOperation; 1129 } 1130 1131 void Heap::markRoots() 1098 1099 curBlock->usedCells = static_cast<uint32_t>(usedCells); 1100 curBlock->freeList = freeList; 1101 curBlock->marked.clearAll(); 1102 1103 if (!usedCells) 1104 ++emptyBlocks; 1105 } 1106 1107 if (heap.numLiveObjects != numLiveObjects) 1108 heap.firstBlockWithPossibleSpace = 0; 1109 1110 heap.numLiveObjects = numLiveObjects; 1111 heap.numLiveObjectsAtLastCollect = numLiveObjects; 1112 heap.extraCost = 0; 1113 1114 if (!emptyBlocks) 1115 return numLiveObjects; 1116 1117 size_t neededCells = 1.25f * (numLiveObjects + max(ALLOCATIONS_PER_COLLECTION, numLiveObjects)); 1118 size_t neededBlocks = (neededCells + HeapConstants<heapType>::cellsPerBlock - 1) / HeapConstants<heapType>::cellsPerBlock; 1119 for (size_t block = 0; block < heap.usedBlocks; block++) { 1120 if (heap.usedBlocks <= neededBlocks) 1121 break; 1122 1123 Block* curBlock = reinterpret_cast<Block*>(heap.blocks[block]); 1124 if (curBlock->usedCells) 1125 continue; 1126 1127 freeBlock<heapType>(block); 1128 block--; // Don't move forward a step in this case 1129 } 1130 1131 return numLiveObjects; 1132 } 1133 1134 bool Heap::collect() 1132 1135 { 1133 1136 #ifndef NDEBUG … … 1138 1141 #endif 1139 1142 1140 ASSERT((primaryHeap.operationInProgress == NoOperation) &(numberHeap.operationInProgress == NoOperation));1141 if ( !((primaryHeap.operationInProgress == NoOperation) & (numberHeap.operationInProgress == NoOperation)))1143 ASSERT((primaryHeap.operationInProgress == NoOperation) | (numberHeap.operationInProgress == NoOperation)); 1144 if ((primaryHeap.operationInProgress != NoOperation) | (numberHeap.operationInProgress != NoOperation)) 1142 1145 CRASH(); 1143 1146 1147 JAVASCRIPTCORE_GC_BEGIN(); 1144 1148 primaryHeap.operationInProgress = Collection; 1145 1149 numberHeap.operationInProgress = Collection; 1146 1150 1151 // MARK: first mark all referenced objects recursively starting out from the set of root objects 1147 1152 MarkStack& markStack = m_globalData->markStack; 1148 1149 // Reset mark bits.1150 clearMarkBits<PrimaryHeap>();1151 clearMarkBits<NumberHeap>();1152 1153 // Mark stack roots.1154 1153 markStackObjectsConservatively(markStack); 1155 m_globalData->interpreter->registerFile().markCallFrames(markStack, this);1156 1157 // Mark explicitly registered roots.1158 1154 markProtectedObjects(markStack); 1159 1160 // Mark misc. other roots.1161 1155 if (m_markListSet && m_markListSet->size()) 1162 1156 MarkedArgumentBuffer::markLists(markStack, *m_markListSet); 1163 1157 if (m_globalData->exception) 1164 1158 markStack.append(m_globalData->exception); 1159 m_globalData->interpreter->registerFile().markCallFrames(markStack, this); 1165 1160 m_globalData->smallStrings.markChildren(markStack); 1166 1161 if (m_globalData->functionCodeBlockBeingReparsed) … … 1171 1166 markStack.drain(); 1172 1167 markStack.compact(); 1168 JAVASCRIPTCORE_GC_MARKED(); 1169 1170 size_t originalLiveObjects = primaryHeap.numLiveObjects + numberHeap.numLiveObjects; 1171 size_t numLiveObjects = sweep<PrimaryHeap>(); 1172 numLiveObjects += sweep<NumberHeap>(); 1173 1173 1174 1174 primaryHeap.operationInProgress = NoOperation; 1175 1175 numberHeap.operationInProgress = NoOperation; 1176 } 1177 1178 size_t Heap::objectCount() const 1179 { 1180 return objectCount<PrimaryHeap>() + objectCount<NumberHeap>(); 1176 JAVASCRIPTCORE_GC_END(originalLiveObjects, numLiveObjects); 1177 1178 return numLiveObjects < originalLiveObjects; 1179 } 1180 1181 size_t Heap::objectCount() 1182 { 1183 return primaryHeap.numLiveObjects + numberHeap.numLiveObjects - m_globalData->smallStrings.count(); 1181 1184 } 1182 1185 1183 1186 template <HeapType heapType> 1184 size_t Heap::objectCount() const 1185 { 1186 const CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap; 1187 1188 return heap.nextBlock * HeapConstants<heapType>::cellsPerBlock // allocated full blocks 1189 + heap.nextCell // allocated cells in current block 1190 + markedCells<heapType>(heap.nextBlock, heap.nextCell) // marked cells in remainder of heap 1191 - heap.usedBlocks; // 1 cell per block is a dummy sentinel 1192 } 1193 1194 template <HeapType heapType> 1195 void Heap::addToStatistics(Heap::Statistics& statistics) const 1196 { 1197 const CollectorHeap& heap = heapType == PrimaryHeap ? primaryHeap : numberHeap; 1198 1199 statistics.size += heap.usedBlocks * BLOCK_SIZE; 1200 statistics.free += heap.usedBlocks * BLOCK_SIZE - (objectCount<heapType>() * HeapConstants<heapType>::cellSize); 1187 static void addToStatistics(Heap::Statistics& statistics, const CollectorHeap& heap) 1188 { 1189 typedef HeapConstants<heapType> HC; 1190 for (size_t i = 0; i < heap.usedBlocks; ++i) { 1191 if (heap.blocks[i]) { 1192 statistics.size += BLOCK_SIZE; 1193 statistics.free += (HC::cellsPerBlock - heap.blocks[i]->usedCells) * HC::cellSize; 1194 } 1195 } 1201 1196 } 1202 1197 … … 1204 1199 { 1205 1200 Statistics statistics = { 0, 0 }; 1206 addToStatistics<PrimaryHeap>(statistics);1207 addToStatistics<NumberHeap>(statistics);1201 JSC::addToStatistics<PrimaryHeap>(statistics, primaryHeap); 1202 JSC::addToStatistics<NumberHeap>(statistics, numberHeap); 1208 1203 return statistics; 1209 1204 } … … 1277 1272 } 1278 1273 1279 void Heap::reset() 1280 { 1281 JAVASCRIPTCORE_GC_BEGIN(); 1282 1283 markRoots(); 1284 1285 JAVASCRIPTCORE_GC_MARKED(); 1286 1287 primaryHeap.nextCell = 0; 1288 primaryHeap.nextBlock = 0; 1289 primaryHeap.extraCost = 0; 1290 #if ENABLE(JSC_ZOMBIES) 1291 sweep<PrimaryHeap>(); 1292 #endif 1293 resizeBlocks<PrimaryHeap>(); 1294 1295 #if USE(JSVALUE32) 1296 numberHeap.nextCell = 0; 1297 numberHeap.nextBlock = 0; 1298 resizeBlocks<NumberHeap>(); 1299 #endif 1300 1301 JAVASCRIPTCORE_GC_END(); 1302 } 1303 1304 void Heap::collectAllGarbage() 1305 { 1306 JAVASCRIPTCORE_GC_BEGIN(); 1307 1308 // If the last iteration through the heap deallocated blocks, we need 1309 // to clean up remaining garbage before marking. Otherwise, the conservative 1310 // marking mechanism might follow a pointer to unmapped memory. 1311 if (primaryHeap.didShrink) 1312 sweep<PrimaryHeap>(); 1313 1314 markRoots(); 1315 1316 JAVASCRIPTCORE_GC_MARKED(); 1317 1318 primaryHeap.nextCell = 0; 1319 primaryHeap.nextBlock = 0; 1320 primaryHeap.extraCost = 0; 1321 sweep<PrimaryHeap>(); 1322 resizeBlocks<PrimaryHeap>(); 1323 1324 #if USE(JSVALUE32) 1325 numberHeap.nextCell = 0; 1326 numberHeap.nextBlock = 0; 1327 resizeBlocks<NumberHeap>(); 1328 #endif 1329 1330 JAVASCRIPTCORE_GC_END(); 1331 } 1332 1333 LiveObjectIterator<PrimaryHeap> Heap::primaryHeapBegin() 1334 { 1335 return LiveObjectIterator<PrimaryHeap>(primaryHeap, 0); 1336 } 1337 1338 LiveObjectIterator<PrimaryHeap> Heap::primaryHeapEnd() 1339 { 1340 return LiveObjectIterator<PrimaryHeap>(primaryHeap, primaryHeap.usedBlocks); 1274 Heap::iterator Heap::primaryHeapBegin() 1275 { 1276 return iterator(primaryHeap.blocks, primaryHeap.blocks + primaryHeap.usedBlocks); 1277 } 1278 1279 Heap::iterator Heap::primaryHeapEnd() 1280 { 1281 return iterator(primaryHeap.blocks + primaryHeap.usedBlocks, primaryHeap.blocks + primaryHeap.usedBlocks); 1341 1282 } 1342 1283
Note:
See TracChangeset
for help on using the changeset viewer.