Changeset 227592 in webkit for trunk/Source/JavaScriptCore/jit/AssemblyHelpers.cpp
- Timestamp:
- Jan 24, 2018, 8:19:44 PM (8 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/jit/AssemblyHelpers.cpp
r226440 r227592 1 1 /* 2 * Copyright (C) 2011-201 7Apple Inc. All rights reserved.2 * Copyright (C) 2011-2018 Apple Inc. All rights reserved. 3 3 * 4 4 * Redistribution and use in source and binary forms, with or without … … 582 582 } 583 583 #endif 584 585 void AssemblyHelpers::emitAllocateWithNonNullAllocator(GPRReg resultGPR, const JITAllocator& allocator, GPRReg allocatorGPR, GPRReg scratchGPR, JumpList& slowPath) 586 { 587 // NOTE: This is carefully written so that we can call it while we disallow scratch 588 // register usage. 589 590 if (Options::forceGCSlowPaths()) { 591 slowPath.append(jump()); 592 return; 593 } 594 595 Jump popPath; 596 Jump done; 597 598 #if ENABLE(FAST_TLS_JIT) 599 loadFromTLSPtr(fastTLSOffsetForKey(WTF_GC_TLC_KEY), scratchGPR); 600 #else 601 loadPtr(&vm().threadLocalCacheData, scratchGPR); 602 #endif 603 if (allocator.isConstant()) { 604 slowPath.append(branch32(BelowOrEqual, Address(scratchGPR, ThreadLocalCache::offsetOfSizeInData()), TrustedImm32(allocator.allocator().offset()))); 605 addPtr(TrustedImm32(ThreadLocalCache::offsetOfFirstAllocatorInData() + allocator.allocator().offset()), scratchGPR, allocatorGPR); 606 } else { 607 slowPath.append(branch32(BelowOrEqual, Address(scratchGPR, ThreadLocalCache::offsetOfSizeInData()), allocatorGPR)); 608 addPtr(TrustedImm32(ThreadLocalCache::offsetOfFirstAllocatorInData()), allocatorGPR); 609 addPtr(scratchGPR, allocatorGPR); 610 } 611 612 load32(Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfRemaining()), resultGPR); 613 popPath = branchTest32(Zero, resultGPR); 614 if (allocator.isConstant()) 615 add32(TrustedImm32(-allocator.allocator().cellSize(vm().heap)), resultGPR, scratchGPR); 616 else { 617 if (isX86()) { 618 move(resultGPR, scratchGPR); 619 sub32(Address(allocatorGPR, LocalAllocator::offsetOfCellSize()), scratchGPR); 620 } else { 621 load32(Address(allocatorGPR, LocalAllocator::offsetOfCellSize()), scratchGPR); 622 sub32(resultGPR, scratchGPR, scratchGPR); 623 } 624 } 625 negPtr(resultGPR); 626 store32(scratchGPR, Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfRemaining())); 627 Address payloadEndAddr = Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfPayloadEnd()); 628 if (isX86()) 629 addPtr(payloadEndAddr, resultGPR); 630 else { 631 loadPtr(payloadEndAddr, scratchGPR); 632 addPtr(scratchGPR, resultGPR); 633 } 634 635 done = jump(); 636 637 popPath.link(this); 638 639 loadPtr(Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfScrambledHead()), resultGPR); 640 if (isX86()) 641 xorPtr(Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfSecret()), resultGPR); 642 else { 643 loadPtr(Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfSecret()), scratchGPR); 644 xorPtr(scratchGPR, resultGPR); 645 } 646 slowPath.append(branchTestPtr(Zero, resultGPR)); 647 648 // The object is half-allocated: we have what we know is a fresh object, but 649 // it's still on the GC's free list. 650 loadPtr(Address(resultGPR), scratchGPR); 651 storePtr(scratchGPR, Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfScrambledHead())); 652 653 done.link(this); 654 } 655 656 void AssemblyHelpers::emitAllocate(GPRReg resultGPR, const JITAllocator& allocator, GPRReg allocatorGPR, GPRReg scratchGPR, JumpList& slowPath) 657 { 658 if (allocator.isConstant()) { 659 if (!allocator.allocator()) { 660 slowPath.append(jump()); 661 return; 662 } 663 } 664 emitAllocateWithNonNullAllocator(resultGPR, allocator, allocatorGPR, scratchGPR, slowPath); 665 } 666 667 void AssemblyHelpers::emitAllocateVariableSized(GPRReg resultGPR, CompleteSubspace& subspace, GPRReg allocationSize, GPRReg scratchGPR1, GPRReg scratchGPR2, JumpList& slowPath) 668 { 669 static_assert(!(MarkedSpace::sizeStep & (MarkedSpace::sizeStep - 1)), "MarkedSpace::sizeStep must be a power of two."); 670 671 unsigned stepShift = getLSBSet(MarkedSpace::sizeStep); 672 673 add32(TrustedImm32(MarkedSpace::sizeStep - 1), allocationSize, scratchGPR1); 674 urshift32(TrustedImm32(stepShift), scratchGPR1); 675 slowPath.append(branch32(Above, scratchGPR1, TrustedImm32(MarkedSpace::largeCutoff >> stepShift))); 676 move(TrustedImmPtr(subspace.allocatorForSizeStep() - 1), scratchGPR2); 677 load32(BaseIndex(scratchGPR2, scratchGPR1, TimesFour), scratchGPR1); 678 679 emitAllocate(resultGPR, JITAllocator::variable(), scratchGPR1, scratchGPR2, slowPath); 680 } 584 681 585 682 void AssemblyHelpers::restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(EntryFrame*& topEntryFrame)
Note:
See TracChangeset
for help on using the changeset viewer.