Changeset 107445 in webkit for trunk/Source/JavaScriptCore/heap
- Timestamp:
- Feb 10, 2012, 2:44:09 PM (13 years ago)
- Location:
- trunk/Source/JavaScriptCore/heap
- Files:
-
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/heap/Heap.h
r106676 r107445 96 96 inline bool isBusy(); 97 97 98 MarkedAllocator& allocatorForObject (size_t bytes) { return m_objectSpace.allocatorFor(bytes); }99 void* allocate(size_t);98 MarkedAllocator& allocatorForObjectWithoutDestructor(size_t bytes) { return m_objectSpace.allocatorFor(bytes); } 99 MarkedAllocator& allocatorForObjectWithDestructor(size_t bytes) { return m_objectSpace.destructorAllocatorFor(bytes); } 100 100 CheckedBoolean tryAllocateStorage(size_t, void**); 101 101 CheckedBoolean tryReallocateStorage(void**, size_t, size_t); … … 143 143 friend class SlotVisitor; 144 144 friend class CodeBlock; 145 template<typename T> friend void* allocateCell(Heap&); 146 147 void* allocateWithDestructor(size_t); 148 void* allocateWithoutDestructor(size_t); 145 149 146 150 size_t waterMark(); … … 335 339 } 336 340 337 inline void* Heap::allocate (size_t bytes)341 inline void* Heap::allocateWithDestructor(size_t bytes) 338 342 { 339 343 ASSERT(isValidAllocation(bytes)); 340 return m_objectSpace.allocate(bytes); 344 return m_objectSpace.allocateWithDestructor(bytes); 345 } 346 347 inline void* Heap::allocateWithoutDestructor(size_t bytes) 348 { 349 ASSERT(isValidAllocation(bytes)); 350 return m_objectSpace.allocateWithoutDestructor(bytes); 341 351 } 342 352 -
trunk/Source/JavaScriptCore/heap/MarkedAllocator.cpp
r106677 r107445 98 98 } 99 99 if (block) 100 block = MarkedBlock::recycle(block, m_heap, m_cellSize );100 block = MarkedBlock::recycle(block, m_heap, m_cellSize, m_cellsNeedDestruction); 101 101 else if (allocationEffort == AllocationCanFail) 102 102 return 0; 103 103 else 104 block = MarkedBlock::create(m_heap, m_cellSize );104 block = MarkedBlock::create(m_heap, m_cellSize, m_cellsNeedDestruction); 105 105 106 106 m_markedSpace->didAddBlock(block); -
trunk/Source/JavaScriptCore/heap/MarkedAllocator.h
r106677 r107445 23 23 void zapFreeList(); 24 24 size_t cellSize() { return m_cellSize; } 25 bool cellsNeedDestruction() { return m_cellsNeedDestruction; } 25 26 void* allocate(); 26 27 Heap* heap() { return m_heap; } … … 30 31 void addBlock(MarkedBlock*); 31 32 void removeBlock(MarkedBlock*); 32 void setHeap(Heap* heap) { m_heap = heap; } 33 void setCellSize(size_t cellSize) { m_cellSize = cellSize; } 34 void setMarkedSpace(MarkedSpace* space) { m_markedSpace = space; } 33 void init(Heap*, MarkedSpace*, size_t cellSize, bool cellsNeedDestruction); 35 34 36 35 private: … … 44 43 DoublyLinkedList<HeapBlock> m_blockList; 45 44 size_t m_cellSize; 45 bool m_cellsNeedDestruction; 46 46 Heap* m_heap; 47 47 MarkedSpace* m_markedSpace; … … 52 52 , m_currentBlock(0) 53 53 , m_cellSize(0) 54 , m_cellsNeedDestruction(true) 54 55 , m_heap(0) 55 56 , m_markedSpace(0) 56 57 { 57 58 } 58 59 60 inline void MarkedAllocator::init(Heap* heap, MarkedSpace* markedSpace, size_t cellSize, bool cellsNeedDestruction) 61 { 62 m_heap = heap; 63 m_markedSpace = markedSpace; 64 m_cellSize = cellSize; 65 m_cellsNeedDestruction = cellsNeedDestruction; 66 } 67 59 68 inline void* MarkedAllocator::allocate() 60 69 { -
trunk/Source/JavaScriptCore/heap/MarkedBlock.cpp
r106686 r107445 33 33 namespace JSC { 34 34 35 MarkedBlock* MarkedBlock::create(Heap* heap, size_t cellSize )35 MarkedBlock* MarkedBlock::create(Heap* heap, size_t cellSize, bool cellsNeedDestruction) 36 36 { 37 37 PageAllocationAligned allocation = PageAllocationAligned::allocate(blockSize, blockSize, OSAllocator::JSGCHeapPages); 38 38 if (!static_cast<bool>(allocation)) 39 39 CRASH(); 40 return new (NotNull, allocation.base()) MarkedBlock(allocation, heap, cellSize );41 } 42 43 MarkedBlock* MarkedBlock::recycle(MarkedBlock* block, Heap* heap, size_t cellSize )44 { 45 return new (NotNull, block) MarkedBlock(block->m_allocation, heap, cellSize );40 return new (NotNull, allocation.base()) MarkedBlock(allocation, heap, cellSize, cellsNeedDestruction); 41 } 42 43 MarkedBlock* MarkedBlock::recycle(MarkedBlock* block, Heap* heap, size_t cellSize, bool cellsNeedDestruction) 44 { 45 return new (NotNull, block) MarkedBlock(block->m_allocation, heap, cellSize, cellsNeedDestruction); 46 46 } 47 47 … … 51 51 } 52 52 53 MarkedBlock::MarkedBlock(PageAllocationAligned& allocation, Heap* heap, size_t cellSize )53 MarkedBlock::MarkedBlock(PageAllocationAligned& allocation, Heap* heap, size_t cellSize, bool cellsNeedDestruction) 54 54 : HeapBlock(allocation) 55 55 , m_atomsPerCell((cellSize + atomSize - 1) / atomSize) 56 56 , m_endAtom(atomsPerBlock - m_atomsPerCell + 1) 57 , m_cellsNeedDestruction(cellsNeedDestruction) 57 58 , m_state(New) // All cells start out unmarked. 58 59 , m_heap(heap) … … 71 72 m_heap->m_destroyedTypeCounts.countVPtr(vptr); 72 73 #endif 73 if (cell->classInfo() != &JSFinalObject::s_info)74 74 ASSERT(cell->classInfo() != &JSFinalObject::s_info); 75 cell->methodTable()->destroy(cell); 75 76 76 77 cell->zap(); 77 78 } 78 79 79 template<MarkedBlock::BlockState blockState, MarkedBlock::SweepMode sweepMode >80 template<MarkedBlock::BlockState blockState, MarkedBlock::SweepMode sweepMode, bool destructorCallNeeded> 80 81 MarkedBlock::FreeCell* MarkedBlock::specializedSweep() 81 82 { 82 83 ASSERT(blockState != Allocated && blockState != FreeListed); 84 ASSERT(destructorCallNeeded || sweepMode != SweepOnly); 83 85 84 86 // This produces a free list that is ordered in reverse through the block. … … 94 96 continue; 95 97 96 if ( blockState != New)98 if (destructorCallNeeded && blockState != New) 97 99 callDestructor(cell); 98 100 … … 112 114 HEAP_LOG_BLOCK_STATE_TRANSITION(this); 113 115 116 if (sweepMode == SweepOnly && !m_cellsNeedDestruction) 117 return 0; 118 119 if (m_cellsNeedDestruction) 120 return sweepHelper<true>(sweepMode); 121 return sweepHelper<false>(sweepMode); 122 } 123 124 template<bool destructorCallNeeded> 125 MarkedBlock::FreeCell* MarkedBlock::sweepHelper(SweepMode sweepMode) 126 { 114 127 switch (m_state) { 115 128 case New: 116 129 ASSERT(sweepMode == SweepToFreeList); 117 return specializedSweep<New, SweepToFreeList >();130 return specializedSweep<New, SweepToFreeList, destructorCallNeeded>(); 118 131 case FreeListed: 119 132 // Happens when a block transitions to fully allocated. … … 125 138 case Marked: 126 139 return sweepMode == SweepToFreeList 127 ? specializedSweep<Marked, SweepToFreeList >()128 : specializedSweep<Marked, SweepOnly >();140 ? specializedSweep<Marked, SweepToFreeList, destructorCallNeeded>() 141 : specializedSweep<Marked, SweepOnly, destructorCallNeeded>(); 129 142 case Zapped: 130 143 return sweepMode == SweepToFreeList 131 ? specializedSweep<Zapped, SweepToFreeList >()132 : specializedSweep<Zapped, SweepOnly >();144 ? specializedSweep<Zapped, SweepToFreeList, destructorCallNeeded>() 145 : specializedSweep<Zapped, SweepOnly, destructorCallNeeded>(); 133 146 } 134 147 -
trunk/Source/JavaScriptCore/heap/MarkedBlock.h
r106686 r107445 90 90 }; 91 91 92 static MarkedBlock* create(Heap*, size_t cellSize );93 static MarkedBlock* recycle(MarkedBlock*, Heap*, size_t cellSize );92 static MarkedBlock* create(Heap*, size_t cellSize, bool cellsNeedDestruction); 93 static MarkedBlock* recycle(MarkedBlock*, Heap*, size_t cellSize, bool cellsNeedDestruction); 94 94 static void destroy(MarkedBlock*); 95 95 … … 116 116 117 117 size_t cellSize(); 118 bool cellsNeedDestruction(); 118 119 119 120 size_t size(); … … 160 161 161 162 enum BlockState { New, FreeListed, Allocated, Marked, Zapped }; 163 template<bool destructorCallNeeded> FreeCell* sweepHelper(SweepMode = SweepOnly); 162 164 163 165 typedef char Atom[atomSize]; 164 166 165 MarkedBlock(PageAllocationAligned&, Heap*, size_t cellSize );167 MarkedBlock(PageAllocationAligned&, Heap*, size_t cellSize, bool cellsNeedDestruction); 166 168 Atom* atoms(); 167 169 size_t atomNumber(const void*); 168 170 void callDestructor(JSCell*); 169 template<BlockState, SweepMode > FreeCell* specializedSweep();171 template<BlockState, SweepMode, bool destructorCallNeeded> FreeCell* specializedSweep(); 170 172 171 173 #if ENABLE(GGC) … … 180 182 WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic> m_marks; 181 183 #endif 184 bool m_cellsNeedDestruction; 182 185 BlockState m_state; 183 186 Heap* m_heap; … … 242 245 { 243 246 return m_atomsPerCell * atomSize; 247 } 248 249 inline bool MarkedBlock::cellsNeedDestruction() 250 { 251 return m_cellsNeedDestruction; 244 252 } 245 253 -
trunk/Source/JavaScriptCore/heap/MarkedSpace.cpp
r106676 r107445 37 37 { 38 38 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { 39 allocatorFor(cellSize).setCellSize(cellSize); 40 allocatorFor(cellSize).setHeap(heap); 41 allocatorFor(cellSize).setMarkedSpace(this); 39 allocatorFor(cellSize).init(heap, this, cellSize, false); 40 destructorAllocatorFor(cellSize).init(heap, this, cellSize, true); 42 41 } 43 42 44 43 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { 45 allocatorFor(cellSize).setCellSize(cellSize); 46 allocatorFor(cellSize).setHeap(heap); 47 allocatorFor(cellSize).setMarkedSpace(this); 44 allocatorFor(cellSize).init(heap, this, cellSize, false); 45 destructorAllocatorFor(cellSize).init(heap, this, cellSize, true); 48 46 } 49 47 } … … 54 52 m_nurseryWaterMark = 0; 55 53 56 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) 54 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { 57 55 allocatorFor(cellSize).reset(); 56 destructorAllocatorFor(cellSize).reset(); 57 } 58 58 59 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) 59 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { 60 60 allocatorFor(cellSize).reset(); 61 destructorAllocatorFor(cellSize).reset(); 62 } 61 63 } 62 64 63 65 void MarkedSpace::canonicalizeCellLivenessData() 64 66 { 65 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) 67 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { 66 68 allocatorFor(cellSize).zapFreeList(); 69 destructorAllocatorFor(cellSize).zapFreeList(); 70 } 67 71 68 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) 72 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { 69 73 allocatorFor(cellSize).zapFreeList(); 74 destructorAllocatorFor(cellSize).zapFreeList(); 75 } 70 76 } 71 77 … … 108 114 return; 109 115 110 m_markedSpace->allocatorFor(block ->cellSize()).removeBlock(block);116 m_markedSpace->allocatorFor(block).removeBlock(block); 111 117 m_empties.append(block); 112 118 } -
trunk/Source/JavaScriptCore/heap/MarkedSpace.h
r106676 r107445 53 53 54 54 MarkedAllocator& allocatorFor(size_t); 55 void* allocate(size_t); 55 MarkedAllocator& allocatorFor(MarkedBlock*); 56 MarkedAllocator& destructorAllocatorFor(size_t); 57 void* allocateWithDestructor(size_t); 58 void* allocateWithoutDestructor(size_t); 56 59 57 60 void resetAllocators(); … … 87 90 static const size_t impreciseCount = impreciseCutoff / impreciseStep; 88 91 89 FixedArray<MarkedAllocator, preciseCount> m_preciseSizeClasses; 90 FixedArray<MarkedAllocator, impreciseCount> m_impreciseSizeClasses; 92 struct Subspace { 93 FixedArray<MarkedAllocator, preciseCount> preciseAllocators; 94 FixedArray<MarkedAllocator, impreciseCount> impreciseAllocators; 95 }; 96 97 Subspace m_destructorSpace; 98 Subspace m_normalSpace; 99 91 100 size_t m_waterMark; 92 101 size_t m_nurseryWaterMark; … … 125 134 ASSERT(bytes && bytes <= maxCellSize); 126 135 if (bytes <= preciseCutoff) 127 return m_ preciseSizeClasses[(bytes - 1) / preciseStep];128 return m_ impreciseSizeClasses[(bytes - 1) / impreciseStep];136 return m_normalSpace.preciseAllocators[(bytes - 1) / preciseStep]; 137 return m_normalSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; 129 138 } 130 139 131 inline void* MarkedSpace::allocate(size_t bytes) 140 inline MarkedAllocator& MarkedSpace::allocatorFor(MarkedBlock* block) 141 { 142 if (block->cellsNeedDestruction()) 143 return destructorAllocatorFor(block->cellSize()); 144 return allocatorFor(block->cellSize()); 145 } 146 147 inline MarkedAllocator& MarkedSpace::destructorAllocatorFor(size_t bytes) 148 { 149 ASSERT(bytes && bytes <= maxCellSize); 150 if (bytes <= preciseCutoff) 151 return m_destructorSpace.preciseAllocators[(bytes - 1) / preciseStep]; 152 return m_destructorSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; 153 } 154 155 inline void* MarkedSpace::allocateWithoutDestructor(size_t bytes) 132 156 { 133 157 return allocatorFor(bytes).allocate(); 158 } 159 160 inline void* MarkedSpace::allocateWithDestructor(size_t bytes) 161 { 162 return destructorAllocatorFor(bytes).allocate(); 134 163 } 135 164 … … 137 166 { 138 167 for (size_t i = 0; i < preciseCount; ++i) { 139 m_preciseSizeClasses[i].forEachBlock(functor); 168 m_normalSpace.preciseAllocators[i].forEachBlock(functor); 169 m_destructorSpace.preciseAllocators[i].forEachBlock(functor); 140 170 } 141 171 142 172 for (size_t i = 0; i < impreciseCount; ++i) { 143 m_impreciseSizeClasses[i].forEachBlock(functor); 173 m_normalSpace.impreciseAllocators[i].forEachBlock(functor); 174 m_destructorSpace.impreciseAllocators[i].forEachBlock(functor); 144 175 } 145 176
Note:
See TracChangeset
for help on using the changeset viewer.