Changeset 130303 in webkit for trunk/Source/JavaScriptCore/heap
- Timestamp:
- Oct 3, 2012, 10:51:28 AM (13 years ago)
- Location:
- trunk/Source/JavaScriptCore/heap
- Files:
-
- 11 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/heap/Heap.cpp
r129586 r130303 943 943 } 944 944 945 bool Heap::isSafeToSweepStructures()946 {947 return !m_sweeper || m_sweeper->structuresCanBeSwept();948 }949 950 945 void Heap::didStartVMShutdown() 951 946 { -
trunk/Source/JavaScriptCore/heap/Heap.h
r129586 r130303 113 113 MarkedAllocator& firstAllocatorWithoutDestructors() { return m_objectSpace.firstAllocator(); } 114 114 MarkedAllocator& allocatorForObjectWithoutDestructor(size_t bytes) { return m_objectSpace.allocatorFor(bytes); } 115 MarkedAllocator& allocatorForObjectWithDestructor(size_t bytes) { return m_objectSpace.destructorAllocatorFor(bytes); } 115 MarkedAllocator& allocatorForObjectWithNormalDestructor(size_t bytes) { return m_objectSpace.normalDestructorAllocatorFor(bytes); } 116 MarkedAllocator& allocatorForObjectWithImmortalStructureDestructor(size_t bytes) { return m_objectSpace.immortalStructureDestructorAllocatorFor(bytes); } 116 117 CopiedAllocator& storageAllocator() { return m_storageSpace.allocator(); } 117 118 CheckedBoolean tryAllocateStorage(size_t, void**); … … 170 171 171 172 bool isPagedOut(double deadline); 172 bool isSafeToSweepStructures();173 173 void didStartVMShutdown(); 174 174 … … 186 186 template<typename T> friend void* allocateCell(Heap&, size_t); 187 187 188 void* allocateWith Destructor(size_t);189 void* allocateWith outDestructor(size_t);190 void* allocate Structure(size_t);188 void* allocateWithImmortalStructureDestructor(size_t); // For use with special objects whose Structures never die. 189 void* allocateWithNormalDestructor(size_t); // For use with objects that inherit directly or indirectly from JSDestructibleObject. 190 void* allocateWithoutDestructor(size_t); // For use with objects without destructors. 191 191 192 192 static const size_t minExtraCost = 256; … … 364 364 } 365 365 366 inline void* Heap::allocateWith Destructor(size_t bytes)366 inline void* Heap::allocateWithNormalDestructor(size_t bytes) 367 367 { 368 368 ASSERT(isValidAllocation(bytes)); 369 return m_objectSpace.allocateWithDestructor(bytes); 369 return m_objectSpace.allocateWithNormalDestructor(bytes); 370 } 371 372 inline void* Heap::allocateWithImmortalStructureDestructor(size_t bytes) 373 { 374 ASSERT(isValidAllocation(bytes)); 375 return m_objectSpace.allocateWithImmortalStructureDestructor(bytes); 370 376 } 371 377 … … 376 382 } 377 383 378 inline void* Heap::allocateStructure(size_t bytes)379 {380 return m_objectSpace.allocateStructure(bytes);381 }382 383 384 inline CheckedBoolean Heap::tryAllocateStorage(size_t bytes, void** outPtr) 384 385 { -
trunk/Source/JavaScriptCore/heap/IncrementalSweeper.cpp
r128851 r130303 49 49 : HeapTimer(heap->globalData(), runLoop) 50 50 , m_currentBlockToSweepIndex(0) 51 , m_structuresCanBeSwept(false)52 51 { 53 52 } … … 73 72 : HeapTimer(heap->globalData()) 74 73 , m_currentBlockToSweepIndex(0) 75 , m_structuresCanBeSwept(false)76 74 { 77 75 } … … 120 118 while (m_currentBlockToSweepIndex < m_blocksToSweep.size()) { 121 119 MarkedBlock* block = m_blocksToSweep[m_currentBlockToSweepIndex++]; 122 if (block->onlyContainsStructures())123 m_structuresCanBeSwept = true;124 else125 ASSERT(!m_structuresCanBeSwept);126 120 127 121 if (!block->needsSweeping()) … … 140 134 m_globalData->heap.objectSpace().forEachBlock(functor); 141 135 m_currentBlockToSweepIndex = 0; 142 m_structuresCanBeSwept = false;143 136 scheduleTimer(); 144 137 } … … 147 140 { 148 141 m_currentBlockToSweepIndex = 0; 149 m_structuresCanBeSwept = true;150 142 m_blocksToSweep.clear(); 151 143 if (m_globalData) … … 157 149 IncrementalSweeper::IncrementalSweeper(JSGlobalData* globalData) 158 150 : HeapTimer(globalData) 159 , m_structuresCanBeSwept(false)160 151 { 161 152 } … … 172 163 void IncrementalSweeper::startSweeping(const HashSet<MarkedBlock*>&) 173 164 { 174 m_structuresCanBeSwept = false;175 165 } 176 166 177 167 void IncrementalSweeper::willFinishSweeping() 178 168 { 179 m_structuresCanBeSwept = true;180 169 } 181 170 … … 186 175 #endif 187 176 188 bool IncrementalSweeper::structuresCanBeSwept()189 {190 return m_structuresCanBeSwept;191 }192 193 177 } // namespace JSC -
trunk/Source/JavaScriptCore/heap/IncrementalSweeper.h
r128851 r130303 57 57 virtual void doWork(); 58 58 void sweepNextBlock(); 59 bool structuresCanBeSwept();60 59 void willFinishSweeping(); 61 60 … … 79 78 80 79 #endif 81 bool m_structuresCanBeSwept;82 80 }; 83 81 -
trunk/Source/JavaScriptCore/heap/MarkedAllocator.cpp
r128851 r130303 31 31 { 32 32 if (!m_freeList.head) { 33 if (m_onlyContainsStructures && !m_heap->isSafeToSweepStructures()) {34 if (m_currentBlock) {35 m_currentBlock->didConsumeFreeList();36 m_currentBlock = 0;37 }38 // We sweep another random block here so that we can make progress39 // toward being able to sweep Structures.40 m_heap->sweeper()->sweepNextBlock();41 return 0;42 }43 44 33 for (MarkedBlock*& block = m_blocksToSweep; block; block = block->next()) { 45 34 MarkedBlock::FreeList freeList = block->sweep(MarkedBlock::SweepToFreeList); … … 125 114 if (blockSize == MarkedBlock::blockSize) { 126 115 PageAllocationAligned allocation = m_heap->blockAllocator().allocate(); 127 return MarkedBlock::create(allocation, m_heap, cellSize, m_cellsNeedDestruction, m_onlyContainsStructures);116 return MarkedBlock::create(allocation, this, cellSize, m_destructorType); 128 117 } 129 118 … … 131 120 if (!static_cast<bool>(allocation)) 132 121 CRASH(); 133 return MarkedBlock::create(allocation, m_heap, cellSize, m_cellsNeedDestruction, m_onlyContainsStructures);122 return MarkedBlock::create(allocation, this, cellSize, m_destructorType); 134 123 } 135 124 -
trunk/Source/JavaScriptCore/heap/MarkedAllocator.h
r128851 r130303 24 24 void canonicalizeCellLivenessData(); 25 25 size_t cellSize() { return m_cellSize; } 26 bool cellsNeedDestruction() { return m_cellsNeedDestruction; } 27 bool onlyContainsStructures() { return m_onlyContainsStructures; } 26 MarkedBlock::DestructorType destructorType() { return m_destructorType; } 28 27 void* allocate(size_t); 29 28 Heap* heap() { return m_heap; } … … 33 32 void addBlock(MarkedBlock*); 34 33 void removeBlock(MarkedBlock*); 35 void init(Heap*, MarkedSpace*, size_t cellSize, bool cellsNeedDestruction, bool onlyContainsStructures);34 void init(Heap*, MarkedSpace*, size_t cellSize, MarkedBlock::DestructorType); 36 35 37 36 bool isPagedOut(double deadline); … … 50 49 DoublyLinkedList<MarkedBlock> m_blockList; 51 50 size_t m_cellSize; 52 bool m_cellsNeedDestruction; 53 bool m_onlyContainsStructures; 51 MarkedBlock::DestructorType m_destructorType; 54 52 Heap* m_heap; 55 53 MarkedSpace* m_markedSpace; … … 60 58 , m_blocksToSweep(0) 61 59 , m_cellSize(0) 62 , m_cellsNeedDestruction(true) 63 , m_onlyContainsStructures(false) 60 , m_destructorType(MarkedBlock::None) 64 61 , m_heap(0) 65 62 , m_markedSpace(0) … … 67 64 } 68 65 69 inline void MarkedAllocator::init(Heap* heap, MarkedSpace* markedSpace, size_t cellSize, bool cellsNeedDestruction, bool onlyContainsStructures)66 inline void MarkedAllocator::init(Heap* heap, MarkedSpace* markedSpace, size_t cellSize, MarkedBlock::DestructorType destructorType) 70 67 { 71 68 m_heap = heap; 72 69 m_markedSpace = markedSpace; 73 70 m_cellSize = cellSize; 74 m_cellsNeedDestruction = cellsNeedDestruction; 75 m_onlyContainsStructures = onlyContainsStructures; 71 m_destructorType = destructorType; 76 72 } 77 73 -
trunk/Source/JavaScriptCore/heap/MarkedBlock.cpp
r128851 r130303 29 29 #include "IncrementalSweeper.h" 30 30 #include "JSCell.h" 31 #include "JS Object.h"31 #include "JSDestructibleObject.h" 32 32 33 33 34 34 namespace JSC { 35 35 36 MarkedBlock* MarkedBlock::create(const PageAllocationAligned& allocation, Heap* heap, size_t cellSize, bool cellsNeedDestruction, bool onlyContainsStructures)36 MarkedBlock* MarkedBlock::create(const PageAllocationAligned& allocation, MarkedAllocator* allocator, size_t cellSize, DestructorType destructorType) 37 37 { 38 return new (NotNull, allocation.base()) MarkedBlock(allocation, heap, cellSize, cellsNeedDestruction, onlyContainsStructures);38 return new (NotNull, allocation.base()) MarkedBlock(allocation, allocator, cellSize, destructorType); 39 39 } 40 40 41 MarkedBlock::MarkedBlock(const PageAllocationAligned& allocation, Heap* heap, size_t cellSize, bool cellsNeedDestruction, bool onlyContainsStructures)41 MarkedBlock::MarkedBlock(const PageAllocationAligned& allocation, MarkedAllocator* allocator, size_t cellSize, DestructorType destructorType) 42 42 : HeapBlock<MarkedBlock>(allocation) 43 43 , m_atomsPerCell((cellSize + atomSize - 1) / atomSize) 44 44 , m_endAtom(atomsPerBlock - m_atomsPerCell + 1) 45 , m_ cellsNeedDestruction(cellsNeedDestruction)46 , m_ onlyContainsStructures(onlyContainsStructures)45 , m_destructorType(destructorType) 46 , m_allocator(allocator) 47 47 , m_state(New) // All cells start out unmarked. 48 , m_weakSet( heap->globalData())48 , m_weakSet(allocator->heap()->globalData()) 49 49 { 50 ASSERT( heap);50 ASSERT(allocator); 51 51 HEAP_LOG_BLOCK_STATE_TRANSITION(this); 52 52 } … … 66 66 } 67 67 68 template<MarkedBlock::BlockState blockState, MarkedBlock::SweepMode sweepMode, bool destructorCallNeeded>68 template<MarkedBlock::BlockState blockState, MarkedBlock::SweepMode sweepMode, MarkedBlock::DestructorType dtorType> 69 69 MarkedBlock::FreeList MarkedBlock::specializedSweep() 70 70 { 71 71 ASSERT(blockState != Allocated && blockState != FreeListed); 72 ASSERT( destructorCallNeeded || sweepMode != SweepOnly);72 ASSERT(!(dtorType == MarkedBlock::None && sweepMode == SweepOnly)); 73 73 74 74 // This produces a free list that is ordered in reverse through the block. … … 83 83 JSCell* cell = reinterpret_cast_ptr<JSCell*>(&atoms()[i]); 84 84 85 if (d estructorCallNeeded&& blockState != New)85 if (dtorType != MarkedBlock::None && blockState != New) 86 86 callDestructor(cell); 87 87 … … 104 104 m_weakSet.sweep(); 105 105 106 if (sweepMode == SweepOnly && !m_cellsNeedDestruction)106 if (sweepMode == SweepOnly && m_destructorType == MarkedBlock::None) 107 107 return FreeList(); 108 108 109 if (m_cellsNeedDestruction) 110 return sweepHelper<true>(sweepMode); 111 return sweepHelper<false>(sweepMode); 109 if (m_destructorType == MarkedBlock::ImmortalStructure) 110 return sweepHelper<MarkedBlock::ImmortalStructure>(sweepMode); 111 if (m_destructorType == MarkedBlock::Normal) 112 return sweepHelper<MarkedBlock::Normal>(sweepMode); 113 return sweepHelper<MarkedBlock::None>(sweepMode); 112 114 } 113 115 114 template< bool destructorCallNeeded>116 template<MarkedBlock::DestructorType dtorType> 115 117 MarkedBlock::FreeList MarkedBlock::sweepHelper(SweepMode sweepMode) 116 118 { … … 118 120 case New: 119 121 ASSERT(sweepMode == SweepToFreeList); 120 return specializedSweep<New, SweepToFreeList, d estructorCallNeeded>();122 return specializedSweep<New, SweepToFreeList, dtorType>(); 121 123 case FreeListed: 122 124 // Happens when a block transitions to fully allocated. … … 127 129 return FreeList(); 128 130 case Marked: 129 ASSERT(!m_onlyContainsStructures || heap()->isSafeToSweepStructures());130 131 return sweepMode == SweepToFreeList 131 ? specializedSweep<Marked, SweepToFreeList, d estructorCallNeeded>()132 : specializedSweep<Marked, SweepOnly, d estructorCallNeeded>();132 ? specializedSweep<Marked, SweepToFreeList, dtorType>() 133 : specializedSweep<Marked, SweepOnly, dtorType>(); 133 134 } 134 135 -
trunk/Source/JavaScriptCore/heap/MarkedBlock.h
r128851 r130303 53 53 class Heap; 54 54 class JSCell; 55 class MarkedAllocator; 55 56 56 57 typedef uintptr_t Bits; … … 113 114 }; 114 115 115 static MarkedBlock* create(const PageAllocationAligned&, Heap*, size_t cellSize, bool cellsNeedDestruction, bool onlyContainsStructures); 116 enum DestructorType { None, ImmortalStructure, Normal }; 117 static MarkedBlock* create(const PageAllocationAligned&, MarkedAllocator*, size_t cellSize, DestructorType); 116 118 117 119 static bool isAtomAligned(const void*); … … 121 123 void lastChanceToFinalize(); 122 124 125 MarkedAllocator* allocator() const; 123 126 Heap* heap() const; 124 127 JSGlobalData* globalData() const; … … 144 147 145 148 size_t cellSize(); 146 bool cellsNeedDestruction(); 147 bool onlyContainsStructures(); 149 DestructorType destructorType(); 148 150 149 151 size_t size(); … … 195 197 196 198 enum BlockState { New, FreeListed, Allocated, Marked }; 197 template< bool destructorCallNeeded> FreeList sweepHelper(SweepMode = SweepOnly);199 template<DestructorType> FreeList sweepHelper(SweepMode = SweepOnly); 198 200 199 201 typedef char Atom[atomSize]; 200 202 201 MarkedBlock(const PageAllocationAligned&, Heap*, size_t cellSize, bool cellsNeedDestruction, bool onlyContainsStructures);203 MarkedBlock(const PageAllocationAligned&, MarkedAllocator*, size_t cellSize, DestructorType); 202 204 Atom* atoms(); 203 205 size_t atomNumber(const void*); 204 206 void callDestructor(JSCell*); 205 template<BlockState, SweepMode, bool destructorCallNeeded> FreeList specializedSweep();207 template<BlockState, SweepMode, DestructorType> FreeList specializedSweep(); 206 208 207 209 #if ENABLE(GGC) … … 216 218 WTF::Bitmap<atomsPerBlock, WTF::BitmapNotAtomic> m_marks; 217 219 #endif 218 bool m_cellsNeedDestruction;219 bool m_onlyContainsStructures;220 DestructorType m_destructorType; 221 MarkedAllocator* m_allocator; 220 222 BlockState m_state; 221 223 WeakSet m_weakSet; … … 262 264 } 263 265 266 inline MarkedAllocator* MarkedBlock::allocator() const 267 { 268 return m_allocator; 269 } 270 264 271 inline Heap* MarkedBlock::heap() const 265 272 { … … 327 334 } 328 335 329 inline bool MarkedBlock::cellsNeedDestruction() 330 { 331 return m_cellsNeedDestruction; 332 } 333 334 inline bool MarkedBlock::onlyContainsStructures() 335 { 336 return m_onlyContainsStructures; 336 inline MarkedBlock::DestructorType MarkedBlock::destructorType() 337 { 338 return m_destructorType; 337 339 } 338 340 -
trunk/Source/JavaScriptCore/heap/MarkedSpace.cpp
r128851 r130303 82 82 { 83 83 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { 84 allocatorFor(cellSize).init(heap, this, cellSize, false, false); 85 destructorAllocatorFor(cellSize).init(heap, this, cellSize, true, false); 86 } 87 88 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { 89 allocatorFor(cellSize).init(heap, this, cellSize, false, false); 90 destructorAllocatorFor(cellSize).init(heap, this, cellSize, true, false); 91 } 92 93 m_largeAllocator.init(heap, this, 0, true, false); 94 m_structureAllocator.init(heap, this, WTF::roundUpToMultipleOf(32, sizeof(Structure)), true, true); 84 allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None); 85 normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal); 86 immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure); 87 } 88 89 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { 90 allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None); 91 normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal); 92 immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure); 93 } 94 95 m_normalSpace.largeAllocator.init(heap, this, 0, MarkedBlock::None); 96 m_normalDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::Normal); 97 m_immortalStructureDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::ImmortalStructure); 95 98 } 96 99 … … 121 124 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { 122 125 allocatorFor(cellSize).reset(); 123 destructorAllocatorFor(cellSize).reset(); 126 normalDestructorAllocatorFor(cellSize).reset(); 127 immortalStructureDestructorAllocatorFor(cellSize).reset(); 124 128 } 125 129 126 130 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { 127 131 allocatorFor(cellSize).reset(); 128 destructorAllocatorFor(cellSize).reset(); 129 } 130 131 m_largeAllocator.reset(); 132 m_structureAllocator.reset(); 132 normalDestructorAllocatorFor(cellSize).reset(); 133 immortalStructureDestructorAllocatorFor(cellSize).reset(); 134 } 135 136 m_normalSpace.largeAllocator.reset(); 137 m_normalDestructorSpace.largeAllocator.reset(); 138 m_immortalStructureDestructorSpace.largeAllocator.reset(); 133 139 } 134 140 … … 148 154 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { 149 155 allocatorFor(cellSize).canonicalizeCellLivenessData(); 150 destructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); 156 normalDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); 157 immortalStructureDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); 151 158 } 152 159 153 160 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { 154 161 allocatorFor(cellSize).canonicalizeCellLivenessData(); 155 destructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); 156 } 157 158 m_largeAllocator.canonicalizeCellLivenessData(); 159 m_structureAllocator.canonicalizeCellLivenessData(); 162 normalDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); 163 immortalStructureDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); 164 } 165 166 m_normalSpace.largeAllocator.canonicalizeCellLivenessData(); 167 m_normalDestructorSpace.largeAllocator.canonicalizeCellLivenessData(); 168 m_immortalStructureDestructorSpace.largeAllocator.canonicalizeCellLivenessData(); 160 169 } 161 170 … … 163 172 { 164 173 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { 165 if (allocatorFor(cellSize).isPagedOut(deadline) || destructorAllocatorFor(cellSize).isPagedOut(deadline)) 174 if (allocatorFor(cellSize).isPagedOut(deadline) 175 || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline) 176 || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline)) 166 177 return true; 167 178 } 168 179 169 180 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { 170 if (allocatorFor(cellSize).isPagedOut(deadline) || destructorAllocatorFor(cellSize).isPagedOut(deadline)) 181 if (allocatorFor(cellSize).isPagedOut(deadline) 182 || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline) 183 || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline)) 171 184 return true; 172 185 } 173 186 174 if (m_largeAllocator.isPagedOut(deadline)) 187 if (m_normalSpace.largeAllocator.isPagedOut(deadline) 188 || m_normalDestructorSpace.largeAllocator.isPagedOut(deadline) 189 || m_immortalStructureDestructorSpace.largeAllocator.isPagedOut(deadline)) 175 190 return true; 176 191 177 if (m_structureAllocator.isPagedOut(deadline))178 return true;179 180 192 return false; 181 193 } … … 183 195 void MarkedSpace::freeBlock(MarkedBlock* block) 184 196 { 185 allocatorFor(block).removeBlock(block);197 block->allocator()->removeBlock(block); 186 198 m_blocks.remove(block); 187 199 if (block->capacity() == MarkedBlock::blockSize) { -
trunk/Source/JavaScriptCore/heap/MarkedSpace.h
r129586 r130303 77 77 MarkedAllocator& firstAllocator(); 78 78 MarkedAllocator& allocatorFor(size_t); 79 MarkedAllocator& allocatorFor(MarkedBlock*); 80 MarkedAllocator& destructorAllocatorFor(size_t); 81 void* allocateWithDestructor(size_t); 79 MarkedAllocator& immortalStructureDestructorAllocatorFor(size_t); 80 MarkedAllocator& normalDestructorAllocatorFor(size_t); 81 void* allocateWithNormalDestructor(size_t); 82 void* allocateWithImmortalStructureDestructor(size_t); 82 83 void* allocateWithoutDestructor(size_t); 83 void* allocateStructure(size_t);84 84 85 85 void resetAllocators(); … … 132 132 FixedArray<MarkedAllocator, preciseCount> preciseAllocators; 133 133 FixedArray<MarkedAllocator, impreciseCount> impreciseAllocators; 134 MarkedAllocator largeAllocator; 134 135 }; 135 136 136 Subspace m_destructorSpace; 137 Subspace m_normalDestructorSpace; 138 Subspace m_immortalStructureDestructorSpace; 137 139 Subspace m_normalSpace; 138 MarkedAllocator m_largeAllocator;139 MarkedAllocator m_structureAllocator;140 140 141 141 Heap* m_heap; … … 187 187 if (bytes <= impreciseCutoff) 188 188 return m_normalSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; 189 return m_largeAllocator; 190 } 191 192 inline MarkedAllocator& MarkedSpace::allocatorFor(MarkedBlock* block) 193 { 194 if (block->onlyContainsStructures()) 195 return m_structureAllocator; 196 197 if (block->cellsNeedDestruction()) 198 return destructorAllocatorFor(block->cellSize()); 199 200 return allocatorFor(block->cellSize()); 201 } 202 203 inline MarkedAllocator& MarkedSpace::destructorAllocatorFor(size_t bytes) 189 return m_normalSpace.largeAllocator; 190 } 191 192 inline MarkedAllocator& MarkedSpace::immortalStructureDestructorAllocatorFor(size_t bytes) 204 193 { 205 194 ASSERT(bytes); 206 195 if (bytes <= preciseCutoff) 207 return m_ destructorSpace.preciseAllocators[(bytes - 1) / preciseStep];196 return m_immortalStructureDestructorSpace.preciseAllocators[(bytes - 1) / preciseStep]; 208 197 if (bytes <= impreciseCutoff) 209 return m_normalSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; 210 return m_largeAllocator; 198 return m_immortalStructureDestructorSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; 199 return m_immortalStructureDestructorSpace.largeAllocator; 200 } 201 202 inline MarkedAllocator& MarkedSpace::normalDestructorAllocatorFor(size_t bytes) 203 { 204 ASSERT(bytes); 205 if (bytes <= preciseCutoff) 206 return m_normalDestructorSpace.preciseAllocators[(bytes - 1) / preciseStep]; 207 if (bytes <= impreciseCutoff) 208 return m_normalDestructorSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; 209 return m_normalDestructorSpace.largeAllocator; 211 210 } 212 211 … … 216 215 } 217 216 218 inline void* MarkedSpace::allocateWith Destructor(size_t bytes)219 { 220 return destructorAllocatorFor(bytes).allocate(bytes);221 } 222 223 inline void* MarkedSpace::allocate Structure(size_t bytes)224 { 225 return m_structureAllocator.allocate(bytes);217 inline void* MarkedSpace::allocateWithImmortalStructureDestructor(size_t bytes) 218 { 219 return immortalStructureDestructorAllocatorFor(bytes).allocate(bytes); 220 } 221 222 inline void* MarkedSpace::allocateWithNormalDestructor(size_t bytes) 223 { 224 return normalDestructorAllocatorFor(bytes).allocate(bytes); 226 225 } 227 226 … … 230 229 for (size_t i = 0; i < preciseCount; ++i) { 231 230 m_normalSpace.preciseAllocators[i].forEachBlock(functor); 232 m_destructorSpace.preciseAllocators[i].forEachBlock(functor); 231 m_normalDestructorSpace.preciseAllocators[i].forEachBlock(functor); 232 m_immortalStructureDestructorSpace.preciseAllocators[i].forEachBlock(functor); 233 233 } 234 234 235 235 for (size_t i = 0; i < impreciseCount; ++i) { 236 236 m_normalSpace.impreciseAllocators[i].forEachBlock(functor); 237 m_destructorSpace.impreciseAllocators[i].forEachBlock(functor); 237 m_normalDestructorSpace.impreciseAllocators[i].forEachBlock(functor); 238 m_immortalStructureDestructorSpace.impreciseAllocators[i].forEachBlock(functor); 238 239 } 239 240 240 m_largeAllocator.forEachBlock(functor); 241 m_structureAllocator.forEachBlock(functor); 241 m_normalSpace.largeAllocator.forEachBlock(functor); 242 m_normalDestructorSpace.largeAllocator.forEachBlock(functor); 243 m_immortalStructureDestructorSpace.largeAllocator.forEachBlock(functor); 242 244 243 245 return functor.returnValue(); -
trunk/Source/JavaScriptCore/heap/SlotVisitor.cpp
r128851 r130303 6 6 #include "CopiedSpaceInlineMethods.h" 7 7 #include "JSArray.h" 8 #include "JSDestructibleObject.h" 8 9 #include "JSGlobalData.h" 9 10 #include "JSObject.h"
Note:
See TracChangeset
for help on using the changeset viewer.