Changeset 179357 in webkit for trunk/Source/JavaScriptCore/jit/Repatch.cpp
- Timestamp:
- Jan 29, 2015, 12:33:45 PM (10 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/jit/Repatch.cpp
r178928 r179357 30 30 31 31 #include "AccessorCallJITStubRoutine.h" 32 #include "BinarySwitch.h" 32 33 #include "CCallHelpers.h" 33 34 #include "DFGOperations.h" … … 1576 1577 1577 1578 static void linkSlowFor( 1579 RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator) 1580 { 1581 repatchBuffer.relink( 1582 callLinkInfo.callReturnLocation, vm->getCTIStub(generator).code()); 1583 } 1584 1585 static void linkSlowFor( 1578 1586 RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, 1579 1587 CodeSpecializationKind kind, RegisterPreservationMode registers) 1580 1588 { 1581 repatchBuffer.relink( 1582 callLinkInfo.callReturnLocation, 1583 vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code()); 1589 linkSlowFor(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers)); 1584 1590 } 1585 1591 … … 1593 1599 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); 1594 1600 1595 // If you're being call-linked from a DFG caller then you obviously didn't get inlined.1596 if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))1597 calleeCodeBlock->m_shouldAlwaysBeInlined = false;1598 1599 1601 VM* vm = callerCodeBlock->vm(); 1600 1602 … … 1612 1614 1613 1615 if (kind == CodeForCall) { 1614 repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code()); 1616 linkSlowFor( 1617 repatchBuffer, vm, callLinkInfo, linkPolymorphicCallThunkGeneratorFor(registers)); 1615 1618 return; 1616 1619 } … … 1632 1635 } 1633 1636 1634 void linkClosureCall( 1635 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, 1636 ExecutableBase* executable, MacroAssemblerCodePtr codePtr, 1637 RegisterPreservationMode registers) 1638 { 1639 ASSERT(!callLinkInfo.stub); 1637 static void revertCall( 1638 RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator) 1639 { 1640 repatchBuffer.revertJumpReplacementToBranchPtrWithPatch( 1641 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin), 1642 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR), 0); 1643 linkSlowFor(repatchBuffer, vm, callLinkInfo, generator); 1644 callLinkInfo.hasSeenShouldRepatch = false; 1645 callLinkInfo.callee.clear(); 1646 callLinkInfo.stub.clear(); 1647 if (callLinkInfo.isOnList()) 1648 callLinkInfo.remove(); 1649 } 1650 1651 void unlinkFor( 1652 RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo, 1653 CodeSpecializationKind kind, RegisterPreservationMode registers) 1654 { 1655 if (Options::showDisassembly()) 1656 dataLog("Unlinking call from ", callLinkInfo.callReturnLocation, " in request from ", pointerDump(repatchBuffer.codeBlock()), "\n"); 1657 1658 revertCall( 1659 repatchBuffer, repatchBuffer.codeBlock()->vm(), callLinkInfo, 1660 linkThunkGeneratorFor(kind, registers)); 1661 } 1662 1663 void linkVirtualFor( 1664 ExecState* exec, CallLinkInfo& callLinkInfo, 1665 CodeSpecializationKind kind, RegisterPreservationMode registers) 1666 { 1667 // FIXME: We could generate a virtual call stub here. This would lead to faster virtual calls 1668 // by eliminating the branch prediction bottleneck inside the shared virtual call thunk. 1640 1669 1641 1670 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); 1642 1671 VM* vm = callerCodeBlock->vm(); 1672 1673 if (shouldShowDisassemblyFor(callerCodeBlock)) 1674 dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n"); 1675 1676 RepatchBuffer repatchBuffer(callerCodeBlock); 1677 revertCall(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers)); 1678 } 1679 1680 namespace { 1681 struct CallToCodePtr { 1682 CCallHelpers::Call call; 1683 MacroAssemblerCodePtr codePtr; 1684 }; 1685 } // annonymous namespace 1686 1687 void linkPolymorphicCall( 1688 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant, 1689 RegisterPreservationMode registers) 1690 { 1691 // Currently we can't do anything for non-function callees. 1692 // https://bugs.webkit.org/show_bug.cgi?id=140685 1693 if (!newVariant || !newVariant.executable()) { 1694 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers); 1695 return; 1696 } 1697 1698 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); 1699 VM* vm = callerCodeBlock->vm(); 1700 1701 CallVariantList list; 1702 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub.get()) 1703 list = stub->variants(); 1704 else if (JSFunction* oldCallee = callLinkInfo.callee.get()) 1705 list = CallVariantList{ CallVariant(oldCallee) }; 1706 1707 list = variantListWithVariant(list, newVariant); 1708 1709 // If there are any closure calls then it makes sense to treat all of them as closure calls. 1710 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG; 1711 // the DFG doesn't really want to deal with a combination of closure and non-closure callees. 1712 bool isClosureCall = false; 1713 for (CallVariant variant : list) { 1714 if (variant.isClosureCall()) { 1715 list = despecifiedVariantList(list); 1716 isClosureCall = true; 1717 break; 1718 } 1719 } 1720 1721 Vector<PolymorphicCallCase> callCases; 1722 1723 // Figure out what our cases are. 1724 for (CallVariant variant : list) { 1725 CodeBlock* codeBlock; 1726 if (variant.executable()->isHostFunction()) 1727 codeBlock = nullptr; 1728 else { 1729 codeBlock = jsCast<FunctionExecutable*>(variant.executable())->codeBlockForCall(); 1730 1731 // If we cannot handle a callee, assume that it's better for this whole thing to be a 1732 // virtual call. 1733 if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType == CallLinkInfo::CallVarargs || callLinkInfo.callType == CallLinkInfo::ConstructVarargs) { 1734 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers); 1735 return; 1736 } 1737 } 1738 1739 callCases.append(PolymorphicCallCase(variant, codeBlock)); 1740 } 1741 1742 // If we are over the limit, just use a normal virtual call. 1743 unsigned maxPolymorphicCallVariantListSize; 1744 if (callerCodeBlock->jitType() == JITCode::topTierJIT()) 1745 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier(); 1746 else 1747 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize(); 1748 if (list.size() > maxPolymorphicCallVariantListSize) { 1749 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers); 1750 return; 1751 } 1643 1752 1644 1753 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR); … … 1656 1765 okArgumentCount.link(&stubJit); 1657 1766 } 1767 1768 GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR); 1769 GPRReg comparisonValueGPR; 1770 1771 if (isClosureCall) { 1772 // Verify that we have a function and stash the executable in scratch. 1658 1773 1659 1774 #if USE(JSVALUE64) 1660 // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister 1661 // being set. So we do this the hard way. 1662 GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR); 1663 stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch); 1664 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch)); 1775 // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister 1776 // being set. So we do this the hard way. 1777 stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch); 1778 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch)); 1665 1779 #else 1666 // We would have already checked that the callee is a cell. 1667 #endif 1668 1669 slowPath.append( 1670 stubJit.branch8( 1671 CCallHelpers::NotEqual, 1672 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()), 1673 CCallHelpers::TrustedImm32(JSFunctionType))); 1674 1675 slowPath.append( 1676 stubJit.branchPtr( 1677 CCallHelpers::NotEqual, 1780 // We would have already checked that the callee is a cell. 1781 #endif 1782 1783 slowPath.append( 1784 stubJit.branch8( 1785 CCallHelpers::NotEqual, 1786 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()), 1787 CCallHelpers::TrustedImm32(JSFunctionType))); 1788 1789 stubJit.loadPtr( 1678 1790 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()), 1679 CCallHelpers::TrustedImmPtr(executable))); 1680 1681 AssemblyHelpers::Call call = stubJit.nearCall(); 1682 AssemblyHelpers::Jump done = stubJit.jump(); 1791 scratch); 1792 1793 comparisonValueGPR = scratch; 1794 } else 1795 comparisonValueGPR = calleeGPR; 1796 1797 Vector<int64_t> caseValues(callCases.size()); 1798 Vector<CallToCodePtr> calls(callCases.size()); 1799 std::unique_ptr<uint32_t[]> fastCounts; 1800 1801 if (callerCodeBlock->jitType() != JITCode::topTierJIT()) 1802 fastCounts = std::make_unique<uint32_t[]>(callCases.size()); 1803 1804 for (size_t i = callCases.size(); i--;) { 1805 if (fastCounts) 1806 fastCounts[i] = 0; 1807 1808 CallVariant variant = callCases[i].variant(); 1809 if (isClosureCall) 1810 caseValues[i] = bitwise_cast<intptr_t>(variant.executable()); 1811 else 1812 caseValues[i] = bitwise_cast<intptr_t>(variant.function()); 1813 } 1814 1815 GPRReg fastCountsBaseGPR = 1816 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3); 1817 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR); 1818 1819 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr); 1820 CCallHelpers::JumpList done; 1821 while (binarySwitch.advance(stubJit)) { 1822 size_t caseIndex = binarySwitch.caseIndex(); 1823 1824 CallVariant variant = callCases[caseIndex].variant(); 1825 1826 ASSERT(variant.executable()->hasJITCodeForCall()); 1827 MacroAssemblerCodePtr codePtr = 1828 variant.executable()->generatedJITCodeForCall()->addressForCall( 1829 *vm, variant.executable(), ArityCheckNotRequired, registers); 1830 1831 if (fastCounts) { 1832 stubJit.add32( 1833 CCallHelpers::TrustedImm32(1), 1834 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t))); 1835 } 1836 calls[caseIndex].call = stubJit.nearCall(); 1837 calls[caseIndex].codePtr = codePtr; 1838 done.append(stubJit.jump()); 1839 } 1683 1840 1684 1841 slowPath.link(&stubJit); 1842 binarySwitch.fallThrough().link(&stubJit); 1685 1843 stubJit.move(calleeGPR, GPRInfo::regT0); 1686 1844 #if USE(JSVALUE32_64) … … 1692 1850 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4); 1693 1851 AssemblyHelpers::Jump slow = stubJit.jump(); 1694 1852 1695 1853 LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock); 1696 1854 1697 patchBuffer.link(call, FunctionPtr(codePtr.executableAddress())); 1855 RELEASE_ASSERT(callCases.size() == calls.size()); 1856 for (CallToCodePtr callToCodePtr : calls) { 1857 patchBuffer.link( 1858 callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress())); 1859 } 1698 1860 if (JITCode::isOptimizingJIT(callerCodeBlock->jitType())) 1699 1861 patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0)); 1700 1862 else 1701 1863 patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0)); 1702 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub( virtualThunkGeneratorFor(CodeForCall,registers)).code()));1703 1704 RefPtr< ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(1864 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGeneratorFor(registers)).code())); 1865 1866 RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine( 1705 1867 FINALIZE_CODE_FOR( 1706 1868 callerCodeBlock, patchBuffer, 1707 (" Closure call stub for %s, return point %p, target %p (%s)",1869 ("Polymorphic call stub for %s, return point %p, targets %s", 1708 1870 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(), 1709 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())), 1710 *vm, callerCodeBlock->ownerExecutable(), executable)); 1871 toCString(listDump(callCases)).data())), 1872 *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases, 1873 WTF::move(fastCounts))); 1711 1874 1712 1875 RepatchBuffer repatchBuffer(callerCodeBlock); … … 1715 1878 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin), 1716 1879 CodeLocationLabel(stubRoutine->code().code())); 1880 // This is weird. The original slow path should no longer be reachable. 1717 1881 linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers); 1718 1882 1883 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees 1884 // that it's no longer on stack. 1719 1885 callLinkInfo.stub = stubRoutine.release(); 1720 1886 1721 ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo)); 1887 // The call link info no longer has a call cache apart from the jump to the polymorphic call 1888 // stub. 1889 if (callLinkInfo.isOnList()) 1890 callLinkInfo.remove(); 1722 1891 } 1723 1892
Note:
See TracChangeset
for help on using the changeset viewer.