Ignore:
Timestamp:
Jan 29, 2015, 8:28:36 PM (10 years ago)
Author:
[email protected]
Message:

Unreviewed, rolling out r179357 and r179358.
https://bugs.webkit.org/show_bug.cgi?id=141062

Suspect this caused WebGL tests to start flaking (Requested by
kling on #webkit).

Reverted changesets:

"Polymorphic call inlining should be based on polymorphic call
inline caching rather than logging"
https://bugs.webkit.org/show_bug.cgi?id=140660
http://trac.webkit.org/changeset/179357

"Unreviewed, fix no-JIT build."
http://trac.webkit.org/changeset/179358

Patch by Commit Queue <[email protected]> on 2015-01-29

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/JavaScriptCore/jit/Repatch.cpp

    r179357 r179392  
    3030
    3131#include "AccessorCallJITStubRoutine.h"
    32 #include "BinarySwitch.h"
    3332#include "CCallHelpers.h"
    3433#include "DFGOperations.h"
     
    15771576
    15781577static void linkSlowFor(
    1579     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
    1580 {
    1581     repatchBuffer.relink(
    1582         callLinkInfo.callReturnLocation, vm->getCTIStub(generator).code());
    1583 }
    1584 
    1585 static void linkSlowFor(
    15861578    RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
    15871579    CodeSpecializationKind kind, RegisterPreservationMode registers)
    15881580{
    1589     linkSlowFor(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
     1581    repatchBuffer.relink(
     1582        callLinkInfo.callReturnLocation,
     1583        vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
    15901584}
    15911585
     
    15991593    CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
    16001594
     1595    // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
     1596    if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
     1597        calleeCodeBlock->m_shouldAlwaysBeInlined = false;
     1598   
    16011599    VM* vm = callerCodeBlock->vm();
    16021600   
     
    16141612   
    16151613    if (kind == CodeForCall) {
    1616         linkSlowFor(
    1617             repatchBuffer, vm, callLinkInfo, linkPolymorphicCallThunkGeneratorFor(registers));
     1614        repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
    16181615        return;
    16191616    }
     
    16351632}
    16361633
    1637 static void revertCall(
    1638     RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
    1639 {
    1640     repatchBuffer.revertJumpReplacementToBranchPtrWithPatch(
    1641         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
    1642         static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR), 0);
    1643     linkSlowFor(repatchBuffer, vm, callLinkInfo, generator);
    1644     callLinkInfo.hasSeenShouldRepatch = false;
    1645     callLinkInfo.callee.clear();
    1646     callLinkInfo.stub.clear();
    1647     if (callLinkInfo.isOnList())
    1648         callLinkInfo.remove();
    1649 }
    1650 
    1651 void unlinkFor(
    1652     RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo,
    1653     CodeSpecializationKind kind, RegisterPreservationMode registers)
    1654 {
    1655     if (Options::showDisassembly())
    1656         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation, " in request from ", pointerDump(repatchBuffer.codeBlock()), "\n");
    1657    
    1658     revertCall(
    1659         repatchBuffer, repatchBuffer.codeBlock()->vm(), callLinkInfo,
    1660         linkThunkGeneratorFor(kind, registers));
    1661 }
    1662 
    1663 void linkVirtualFor(
    1664     ExecState* exec, CallLinkInfo& callLinkInfo,
    1665     CodeSpecializationKind kind, RegisterPreservationMode registers)
    1666 {
    1667     // FIXME: We could generate a virtual call stub here. This would lead to faster virtual calls
    1668     // by eliminating the branch prediction bottleneck inside the shared virtual call thunk.
     1634void linkClosureCall(
     1635    ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
     1636    ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
     1637    RegisterPreservationMode registers)
     1638{
     1639    ASSERT(!callLinkInfo.stub);
    16691640   
    16701641    CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
    16711642    VM* vm = callerCodeBlock->vm();
    1672    
    1673     if (shouldShowDisassemblyFor(callerCodeBlock))
    1674         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
    1675    
    1676     RepatchBuffer repatchBuffer(callerCodeBlock);
    1677     revertCall(repatchBuffer, vm, callLinkInfo, virtualThunkGeneratorFor(kind, registers));
    1678 }
    1679 
    1680 namespace {
    1681 struct CallToCodePtr {
    1682     CCallHelpers::Call call;
    1683     MacroAssemblerCodePtr codePtr;
    1684 };
    1685 } // annonymous namespace
    1686 
    1687 void linkPolymorphicCall(
    1688     ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant,
    1689     RegisterPreservationMode registers)
    1690 {
    1691     // Currently we can't do anything for non-function callees.
    1692     // https://bugs.webkit.org/show_bug.cgi?id=140685
    1693     if (!newVariant || !newVariant.executable()) {
    1694         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
    1695         return;
    1696     }
    1697    
    1698     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
    1699     VM* vm = callerCodeBlock->vm();
    1700    
    1701     CallVariantList list;
    1702     if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub.get())
    1703         list = stub->variants();
    1704     else if (JSFunction* oldCallee = callLinkInfo.callee.get())
    1705         list = CallVariantList{ CallVariant(oldCallee) };
    1706    
    1707     list = variantListWithVariant(list, newVariant);
    1708 
    1709     // If there are any closure calls then it makes sense to treat all of them as closure calls.
    1710     // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
    1711     // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
    1712     bool isClosureCall = false;
    1713     for (CallVariant variant : list)  {
    1714         if (variant.isClosureCall()) {
    1715             list = despecifiedVariantList(list);
    1716             isClosureCall = true;
    1717             break;
    1718         }
    1719     }
    1720    
    1721     Vector<PolymorphicCallCase> callCases;
    1722    
    1723     // Figure out what our cases are.
    1724     for (CallVariant variant : list) {
    1725         CodeBlock* codeBlock;
    1726         if (variant.executable()->isHostFunction())
    1727             codeBlock = nullptr;
    1728         else {
    1729             codeBlock = jsCast<FunctionExecutable*>(variant.executable())->codeBlockForCall();
    1730            
    1731             // If we cannot handle a callee, assume that it's better for this whole thing to be a
    1732             // virtual call.
    1733             if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType == CallLinkInfo::CallVarargs || callLinkInfo.callType == CallLinkInfo::ConstructVarargs) {
    1734                 linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
    1735                 return;
    1736             }
    1737         }
    1738        
    1739         callCases.append(PolymorphicCallCase(variant, codeBlock));
    1740     }
    1741    
    1742     // If we are over the limit, just use a normal virtual call.
    1743     unsigned maxPolymorphicCallVariantListSize;
    1744     if (callerCodeBlock->jitType() == JITCode::topTierJIT())
    1745         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
    1746     else
    1747         maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
    1748     if (list.size() > maxPolymorphicCallVariantListSize) {
    1749         linkVirtualFor(exec, callLinkInfo, CodeForCall, registers);
    1750         return;
    1751     }
    17521643   
    17531644    GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
     
    17651656        okArgumentCount.link(&stubJit);
    17661657    }
    1767    
     1658
     1659#if USE(JSVALUE64)
     1660    // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
     1661    // being set. So we do this the hard way.
    17681662    GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
    1769     GPRReg comparisonValueGPR;
    1770    
    1771     if (isClosureCall) {
    1772         // Verify that we have a function and stash the executable in scratch.
    1773 
    1774 #if USE(JSVALUE64)
    1775         // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
    1776         // being set. So we do this the hard way.
    1777         stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
    1778         slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
     1663    stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
     1664    slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
    17791665#else
    1780         // We would have already checked that the callee is a cell.
    1781 #endif
    1782    
    1783         slowPath.append(
    1784             stubJit.branch8(
    1785                 CCallHelpers::NotEqual,
    1786                 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
    1787                 CCallHelpers::TrustedImm32(JSFunctionType)));
    1788    
    1789         stubJit.loadPtr(
     1666    // We would have already checked that the callee is a cell.
     1667#endif
     1668   
     1669    slowPath.append(
     1670        stubJit.branch8(
     1671            CCallHelpers::NotEqual,
     1672            CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
     1673            CCallHelpers::TrustedImm32(JSFunctionType)));
     1674   
     1675    slowPath.append(
     1676        stubJit.branchPtr(
     1677            CCallHelpers::NotEqual,
    17901678            CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
    1791             scratch);
    1792        
    1793         comparisonValueGPR = scratch;
    1794     } else
    1795         comparisonValueGPR = calleeGPR;
    1796    
    1797     Vector<int64_t> caseValues(callCases.size());
    1798     Vector<CallToCodePtr> calls(callCases.size());
    1799     std::unique_ptr<uint32_t[]> fastCounts;
    1800    
    1801     if (callerCodeBlock->jitType() != JITCode::topTierJIT())
    1802         fastCounts = std::make_unique<uint32_t[]>(callCases.size());
    1803    
    1804     for (size_t i = callCases.size(); i--;) {
    1805         if (fastCounts)
    1806             fastCounts[i] = 0;
    1807        
    1808         CallVariant variant = callCases[i].variant();
    1809         if (isClosureCall)
    1810             caseValues[i] = bitwise_cast<intptr_t>(variant.executable());
    1811         else
    1812             caseValues[i] = bitwise_cast<intptr_t>(variant.function());
    1813     }
    1814    
    1815     GPRReg fastCountsBaseGPR =
    1816         AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
    1817     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
    1818    
    1819     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
    1820     CCallHelpers::JumpList done;
    1821     while (binarySwitch.advance(stubJit)) {
    1822         size_t caseIndex = binarySwitch.caseIndex();
    1823        
    1824         CallVariant variant = callCases[caseIndex].variant();
    1825        
    1826         ASSERT(variant.executable()->hasJITCodeForCall());
    1827         MacroAssemblerCodePtr codePtr =
    1828             variant.executable()->generatedJITCodeForCall()->addressForCall(
    1829                 *vm, variant.executable(), ArityCheckNotRequired, registers);
    1830        
    1831         if (fastCounts) {
    1832             stubJit.add32(
    1833                 CCallHelpers::TrustedImm32(1),
    1834                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
    1835         }
    1836         calls[caseIndex].call = stubJit.nearCall();
    1837         calls[caseIndex].codePtr = codePtr;
    1838         done.append(stubJit.jump());
    1839     }
     1679            CCallHelpers::TrustedImmPtr(executable)));
     1680   
     1681    AssemblyHelpers::Call call = stubJit.nearCall();
     1682    AssemblyHelpers::Jump done = stubJit.jump();
    18401683   
    18411684    slowPath.link(&stubJit);
    1842     binarySwitch.fallThrough().link(&stubJit);
    18431685    stubJit.move(calleeGPR, GPRInfo::regT0);
    18441686#if USE(JSVALUE32_64)
     
    18501692    stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
    18511693    AssemblyHelpers::Jump slow = stubJit.jump();
    1852        
     1694   
    18531695    LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock);
    18541696   
    1855     RELEASE_ASSERT(callCases.size() == calls.size());
    1856     for (CallToCodePtr callToCodePtr : calls) {
    1857         patchBuffer.link(
    1858             callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.executableAddress()));
    1859     }
     1697    patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
    18601698    if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
    18611699        patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
    18621700    else
    18631701        patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
    1864     patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGeneratorFor(registers)).code()));
    1865    
    1866     RefPtr<PolymorphicCallStubRoutine> stubRoutine = adoptRef(new PolymorphicCallStubRoutine(
     1702    patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
     1703   
     1704    RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
    18671705        FINALIZE_CODE_FOR(
    18681706            callerCodeBlock, patchBuffer,
    1869             ("Polymorphic call stub for %s, return point %p, targets %s",
     1707            ("Closure call stub for %s, return point %p, target %p (%s)",
    18701708                toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
    1871                 toCString(listDump(callCases)).data())),
    1872         *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
    1873         WTF::move(fastCounts)));
     1709                codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
     1710        *vm, callerCodeBlock->ownerExecutable(), executable));
    18741711   
    18751712    RepatchBuffer repatchBuffer(callerCodeBlock);
     
    18781715        RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
    18791716        CodeLocationLabel(stubRoutine->code().code()));
    1880     // This is weird. The original slow path should no longer be reachable.
    18811717    linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
    18821718   
    1883     // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
    1884     // that it's no longer on stack.
    18851719    callLinkInfo.stub = stubRoutine.release();
    18861720   
    1887     // The call link info no longer has a call cache apart from the jump to the polymorphic call
    1888     // stub.
    1889     if (callLinkInfo.isOnList())
    1890         callLinkInfo.remove();
     1721    ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
    18911722}
    18921723
Note: See TracChangeset for help on using the changeset viewer.