Changeset 179392 in webkit for trunk/Source/JavaScriptCore/jit/Repatch.cpp
- Timestamp:
- Jan 29, 2015, 8:28:36 PM (10 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/jit/Repatch.cpp
r179357 r179392 30 30 31 31 #include "AccessorCallJITStubRoutine.h" 32 #include "BinarySwitch.h"33 32 #include "CCallHelpers.h" 34 33 #include "DFGOperations.h" … … 1577 1576 1578 1577 static void linkSlowFor( 1579 RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)1580 {1581 repatchBuffer.relink(1582 callLinkInfo.callReturnLocation, vm->getCTIStub(generator).code());1583 }1584 1585 static void linkSlowFor(1586 1578 RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, 1587 1579 CodeSpecializationKind kind, RegisterPreservationMode registers) 1588 1580 { 1589 linkSlowFor(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers)); 1581 repatchBuffer.relink( 1582 callLinkInfo.callReturnLocation, 1583 vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code()); 1590 1584 } 1591 1585 … … 1599 1593 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); 1600 1594 1595 // If you're being call-linked from a DFG caller then you obviously didn't get inlined. 1596 if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType())) 1597 calleeCodeBlock->m_shouldAlwaysBeInlined = false; 1598 1601 1599 VM* vm = callerCodeBlock->vm(); 1602 1600 … … 1614 1612 1615 1613 if (kind == CodeForCall) { 1616 linkSlowFor( 1617 repatchBuffer, vm, callLinkInfo, linkPolymorphicCallThunkGeneratorFor(registers)); 1614 repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code()); 1618 1615 return; 1619 1616 } … … 1635 1632 } 1636 1633 1637 static void revertCall( 1638 RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator) 1639 { 1640 repatchBuffer.revertJumpReplacementToBranchPtrWithPatch( 1641 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin), 1642 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR), 0); 1643 linkSlowFor(repatchBuffer, vm, callLinkInfo, generator); 1644 callLinkInfo.hasSeenShouldRepatch = false; 1645 callLinkInfo.callee.clear(); 1646 callLinkInfo.stub.clear(); 1647 if (callLinkInfo.isOnList()) 1648 callLinkInfo.remove(); 1649 } 1650 1651 void unlinkFor( 1652 RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo, 1653 CodeSpecializationKind kind, RegisterPreservationMode registers) 1654 { 1655 if (Options::showDisassembly()) 1656 dataLog("Unlinking call from ", callLinkInfo.callReturnLocation, " in request from ", pointerDump(repatchBuffer.codeBlock()), "\n"); 1657 1658 revertCall( 1659 repatchBuffer, repatchBuffer.codeBlock()->vm(), callLinkInfo, 1660 linkThunkGeneratorFor(kind, registers)); 1661 } 1662 1663 void linkVirtualFor( 1664 ExecState* exec, CallLinkInfo& callLinkInfo, 1665 CodeSpecializationKind kind, RegisterPreservationMode registers) 1666 { 1667 // FIXME: We could generate a virtual call stub here. This would lead to faster virtual calls 1668 // by eliminating the branch prediction bottleneck inside the shared virtual call thunk. 1634 void linkClosureCall( 1635 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, 1636 ExecutableBase* executable, MacroAssemblerCodePtr codePtr, 1637 RegisterPreservationMode registers) 1638 { 1639 ASSERT(!callLinkInfo.stub); 1669 1640 1670 1641 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); 1671 1642 VM* vm = callerCodeBlock->vm(); 1672 1673 if (shouldShowDisassemblyFor(callerCodeBlock))1674 dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");1675 1676 RepatchBuffer repatchBuffer(callerCodeBlock);1677 revertCall(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));1678 }1679 1680 namespace {1681 struct CallToCodePtr {1682 CCallHelpers::Call call;1683 MacroAssemblerCodePtr codePtr;1684 };1685 } // annonymous namespace1686 1687 void linkPolymorphicCall(1688 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant,1689 RegisterPreservationMode registers)1690 {1691 // Currently we can't do anything for non-function callees.1692 // https://bugs.webkit.org/show_bug.cgi?id=1406851693 if (!newVariant || !newVariant.executable()) {1694 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);1695 return;1696 }1697 1698 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();1699 VM* vm = callerCodeBlock->vm();1700 1701 CallVariantList list;1702 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub.get())1703 list = stub->variants();1704 else if (JSFunction* oldCallee = callLinkInfo.callee.get())1705 list = CallVariantList{ CallVariant(oldCallee) };1706 1707 list = variantListWithVariant(list, newVariant);1708 1709 // If there are any closure calls then it makes sense to treat all of them as closure calls.1710 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;1711 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.1712 bool isClosureCall = false;1713 for (CallVariant variant : list) {1714 if (variant.isClosureCall()) {1715 list = despecifiedVariantList(list);1716 isClosureCall = true;1717 break;1718 }1719 }1720 1721 Vector<PolymorphicCallCase> callCases;1722 1723 // Figure out what our cases are.1724 for (CallVariant variant : list) {1725 CodeBlock* codeBlock;1726 if (variant.executable()->isHostFunction())1727 codeBlock = nullptr;1728 else {1729 codeBlock = jsCast<FunctionExecutable*>(variant.executable())->codeBlockForCall();1730 1731 // If we cannot handle a callee, assume that it's better for this whole thing to be a1732 // virtual call.1733 if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType == CallLinkInfo::CallVarargs || callLinkInfo.callType == CallLinkInfo::ConstructVarargs) {1734 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);1735 return;1736 }1737 }1738 1739 callCases.append(PolymorphicCallCase(variant, codeBlock));1740 }1741 1742 // If we are over the limit, just use a normal virtual call.1743 unsigned maxPolymorphicCallVariantListSize;1744 if (callerCodeBlock->jitType() == JITCode::topTierJIT())1745 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();1746 else1747 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();1748 if (list.size() > maxPolymorphicCallVariantListSize) {1749 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);1750 return;1751 }1752 1643 1753 1644 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR); … … 1765 1656 okArgumentCount.link(&stubJit); 1766 1657 } 1767 1658 1659 #if USE(JSVALUE64) 1660 // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister 1661 // being set. So we do this the hard way. 1768 1662 GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR); 1769 GPRReg comparisonValueGPR; 1770 1771 if (isClosureCall) { 1772 // Verify that we have a function and stash the executable in scratch. 1773 1774 #if USE(JSVALUE64) 1775 // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister 1776 // being set. So we do this the hard way. 1777 stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch); 1778 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch)); 1663 stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch); 1664 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch)); 1779 1665 #else 1780 // We would have already checked that the callee is a cell. 1781 #endif 1782 1783 slowPath.append( 1784 stubJit.branch8( 1785 CCallHelpers::NotEqual, 1786 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()), 1787 CCallHelpers::TrustedImm32(JSFunctionType))); 1788 1789 stubJit.loadPtr( 1666 // We would have already checked that the callee is a cell. 1667 #endif 1668 1669 slowPath.append( 1670 stubJit.branch8( 1671 CCallHelpers::NotEqual, 1672 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()), 1673 CCallHelpers::TrustedImm32(JSFunctionType))); 1674 1675 slowPath.append( 1676 stubJit.branchPtr( 1677 CCallHelpers::NotEqual, 1790 1678 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()), 1791 scratch); 1792 1793 comparisonValueGPR = scratch; 1794 } else 1795 comparisonValueGPR = calleeGPR; 1796 1797 Vector<int64_t> caseValues(callCases.size()); 1798 Vector<CallToCodePtr> calls(callCases.size()); 1799 std::unique_ptr<uint32_t[]> fastCounts; 1800 1801 if (callerCodeBlock->jitType() != JITCode::topTierJIT()) 1802 fastCounts = std::make_unique<uint32_t[]>(callCases.size()); 1803 1804 for (size_t i = callCases.size(); i--;) { 1805 if (fastCounts) 1806 fastCounts[i] = 0; 1807 1808 CallVariant variant = callCases[i].variant(); 1809 if (isClosureCall) 1810 caseValues[i] = bitwise_cast<intptr_t>(variant.executable()); 1811 else 1812 caseValues[i] = bitwise_cast<intptr_t>(variant.function()); 1813 } 1814 1815 GPRReg fastCountsBaseGPR = 1816 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3); 1817 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR); 1818 1819 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr); 1820 CCallHelpers::JumpList done; 1821 while (binarySwitch.advance(stubJit)) { 1822 size_t caseIndex = binarySwitch.caseIndex(); 1823 1824 CallVariant variant = callCases[caseIndex].variant(); 1825 1826 ASSERT(variant.executable()->hasJITCodeForCall()); 1827 MacroAssemblerCodePtr codePtr = 1828 variant.executable()->generatedJITCodeForCall()->addressForCall( 1829 *vm, variant.executable(), ArityCheckNotRequired, registers); 1830 1831 if (fastCounts) { 1832 stubJit.add32( 1833 CCallHelpers::TrustedImm32(1), 1834 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t))); 1835 } 1836 calls[caseIndex].call = stubJit.nearCall(); 1837 calls[caseIndex].codePtr = codePtr; 1838 done.append(stubJit.jump()); 1839 } 1679 CCallHelpers::TrustedImmPtr(executable))); 1680 1681 AssemblyHelpers::Call call = stubJit.nearCall(); 1682 AssemblyHelpers::Jump done = stubJit.jump(); 1840 1683 1841 1684 slowPath.link(&stubJit); 1842 binarySwitch.fallThrough().link(&stubJit);1843 1685 stubJit.move(calleeGPR, GPRInfo::regT0); 1844 1686 #if USE(JSVALUE32_64) … … 1850 1692 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4); 1851 1693 AssemblyHelpers::Jump slow = stubJit.jump(); 1852 1694 1853 1695 LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock); 1854 1696 1855 RELEASE_ASSERT(callCases.size() == calls.size()); 1856 for (CallToCodePtr callToCodePtr : calls) { 1857 patchBuffer.link( 1858 callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress())); 1859 } 1697 patchBuffer.link(call, FunctionPtr(codePtr.executableAddress())); 1860 1698 if (JITCode::isOptimizingJIT(callerCodeBlock->jitType())) 1861 1699 patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0)); 1862 1700 else 1863 1701 patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0)); 1864 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub( linkPolymorphicCallThunkGeneratorFor(registers)).code()));1865 1866 RefPtr< PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(1702 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code())); 1703 1704 RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine( 1867 1705 FINALIZE_CODE_FOR( 1868 1706 callerCodeBlock, patchBuffer, 1869 (" Polymorphic call stub for %s, return point %p, targets %s",1707 ("Closure call stub for %s, return point %p, target %p (%s)", 1870 1708 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(), 1871 toCString(listDump(callCases)).data())), 1872 *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases, 1873 WTF::move(fastCounts))); 1709 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())), 1710 *vm, callerCodeBlock->ownerExecutable(), executable)); 1874 1711 1875 1712 RepatchBuffer repatchBuffer(callerCodeBlock); … … 1878 1715 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin), 1879 1716 CodeLocationLabel(stubRoutine->code().code())); 1880 // This is weird. The original slow path should no longer be reachable.1881 1717 linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers); 1882 1718 1883 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees1884 // that it's no longer on stack.1885 1719 callLinkInfo.stub = stubRoutine.release(); 1886 1720 1887 // The call link info no longer has a call cache apart from the jump to the polymorphic call 1888 // stub. 1889 if (callLinkInfo.isOnList()) 1890 callLinkInfo.remove(); 1721 ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo)); 1891 1722 } 1892 1723
Note:
See TracChangeset
for help on using the changeset viewer.