Ignore:
Timestamp:
Aug 26, 2009, 2:33:30 PM (16 years ago)
Author:
[email protected]
Message:

End re-roll-in of r47738:47740 with Windows crash fixed

2009-08-26 Geoffrey Garen <[email protected]>

Build fix: start out with a 32-bit value to avoid a shortening warning.

  • runtime/Collector.cpp: (JSC::Heap::sweep):

2009-08-24 Geoffrey Garen <[email protected]>

Reviewed by Oliver Hunt.

Substantially reduced VM thrash in the GC heap.


1.08x faster on v8 (1.60x faster on v8-splay).


1.40x faster on bench-alloc-nonretained.


1.90x faster on bench-alloc-retained.


SunSpider says no change.


  • runtime/Collector.cpp: (JSC::Heap::heapAllocate): Fixed a long-standing bug: update a few local variables unconditionally after calling collect(), since they may be used even if we don't "goto scan". (In the bug I saw, usedBlocks got out of sync with heap.usedBlocks). (JSC::Heap::sweep): Keep enough free heap space to accomodate the number of objects we'll allocate before the next GC, plus 25%, for good measure.
  • runtime/Collector.h: Bumped the block size to 256k. This seems to give the best cache performance, and it prevents us from initiating lots of VM traffic to recover very small chunks of memory.

Begin re-roll-in of r47738:47740 with Windows crash fixed

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/JavaScriptCore/runtime/Collector.cpp

    r47759 r47795  
    6161
    6262#include <windows.h>
     63#include <malloc.h>
    6364
    6465#elif PLATFORM(HAIKU)
     
    102103// tunable parameters
    103104
    104 const size_t SPARE_EMPTY_BLOCKS = 2;
    105105const size_t GROWTH_FACTOR = 2;
    106106const size_t LOW_WATER_FACTOR = 4;
     
    241241    memset(reinterpret_cast<void*>(address), 0, BLOCK_SIZE);
    242242#elif PLATFORM(WIN_OS)
    243      // windows virtual address granularity is naturally 64k
    244     LPVOID address = VirtualAlloc(NULL, BLOCK_SIZE, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
     243    void* address = _aligned_malloc(BLOCK_SIZE, BLOCK_SIZE);
    245244#elif HAVE(POSIX_MEMALIGN)
    246245    void* address;
     
    285284    userChunk->Free(reinterpret_cast<TAny*>(block));
    286285#elif PLATFORM(WIN_OS)
    287     VirtualFree(block, 0, MEM_RELEASE);
     286    _aligned_free(block);
    288287#elif HAVE(POSIX_MEMALIGN)
    289288    free(block);
     
    386385            heap.operationInProgress = NoOperation;
    387386#endif
    388             bool collected = collect();
     387            bool foundGarbage = collect();
     388            numLiveObjects = heap.numLiveObjects;
     389            usedBlocks = heap.usedBlocks;
     390            i = heap.firstBlockWithPossibleSpace;
    389391#ifndef NDEBUG
    390392            heap.operationInProgress = Allocation;
    391393#endif
    392             if (collected) {
    393                 numLiveObjects = heap.numLiveObjects;
    394                 usedBlocks = heap.usedBlocks;
    395                 i = heap.firstBlockWithPossibleSpace;
     394            if (foundGarbage)
    396395                goto scan;
    397             }
    398396        }
    399397 
     
    10851083        curBlock->marked.clearAll();
    10861084       
    1087         if (usedCells == 0) {
    1088             emptyBlocks++;
    1089             if (emptyBlocks > SPARE_EMPTY_BLOCKS) {
    1090 #if !DEBUG_COLLECTOR
    1091                 freeBlock(reinterpret_cast<CollectorBlock*>(curBlock));
    1092 #endif
    1093                 // swap with the last block so we compact as we go
    1094                 heap.blocks[block] = heap.blocks[heap.usedBlocks - 1];
    1095                 heap.usedBlocks--;
    1096                 block--; // Don't move forward a step in this case
    1097                
    1098                 if (heap.numBlocks > MIN_ARRAY_SIZE && heap.usedBlocks < heap.numBlocks / LOW_WATER_FACTOR) {
    1099                     heap.numBlocks = heap.numBlocks / GROWTH_FACTOR;
    1100                     heap.blocks = static_cast<CollectorBlock**>(fastRealloc(heap.blocks, heap.numBlocks * sizeof(CollectorBlock*)));
    1101                 }
    1102             }
    1103         }
     1085        if (!usedCells)
     1086            ++emptyBlocks;
    11041087    }
    11051088   
    11061089    if (heap.numLiveObjects != numLiveObjects)
    11071090        heap.firstBlockWithPossibleSpace = 0;
    1108        
     1091   
    11091092    heap.numLiveObjects = numLiveObjects;
    11101093    heap.numLiveObjectsAtLastCollect = numLiveObjects;
    11111094    heap.extraCost = 0;
     1095   
     1096    if (!emptyBlocks)
     1097        return numLiveObjects;
     1098
     1099    size_t neededCells = 1.25f * (numLiveObjects + max(ALLOCATIONS_PER_COLLECTION, numLiveObjects));
     1100    size_t neededBlocks = (neededCells + HeapConstants<heapType>::cellsPerBlock - 1) / HeapConstants<heapType>::cellsPerBlock;
     1101    for (size_t block = 0; block < heap.usedBlocks; block++) {
     1102        if (heap.usedBlocks <= neededBlocks)
     1103            break;
     1104
     1105        Block* curBlock = reinterpret_cast<Block*>(heap.blocks[block]);
     1106        if (curBlock->usedCells)
     1107            continue;
     1108
     1109#if !DEBUG_COLLECTOR
     1110        freeBlock(reinterpret_cast<CollectorBlock*>(curBlock));
     1111#endif
     1112        // swap with the last block so we compact as we go
     1113        heap.blocks[block] = heap.blocks[heap.usedBlocks - 1];
     1114        heap.usedBlocks--;
     1115        block--; // Don't move forward a step in this case
     1116
     1117        if (heap.numBlocks > MIN_ARRAY_SIZE && heap.usedBlocks < heap.numBlocks / LOW_WATER_FACTOR) {
     1118            heap.numBlocks = heap.numBlocks / GROWTH_FACTOR;
     1119            heap.blocks = static_cast<CollectorBlock**>(fastRealloc(heap.blocks, heap.numBlocks * sizeof(CollectorBlock*)));
     1120        }
     1121    }
     1122
    11121123    return numLiveObjects;
    11131124}
Note: See TracChangeset for help on using the changeset viewer.