Ignore:
Timestamp:
Nov 20, 2011, 8:45:17 PM (14 years ago)
Author:
[email protected]
Message:

Inline caches that refer to otherwise dead objects should be cleared
https://bugs.webkit.org/show_bug.cgi?id=72311

Reviewed by Geoff Garen.

DFG code blocks now participate in the weak reference harvester fixpoint
so that they only consider themselves to be live if either they are
currently executing, or their owner is live and all of their weak references
are live. If not, the relevant code blocks are jettisoned.

Inline caches in both the old JIT and the DFG are now cleared if any of
their references are not marked at the end of a GC.

This is performance-neutral on SunSpider, V8, and Kraken. With the clear-
all-code-on-GC policy that we currently have, it shows a slight reduction
in memory usage. If we turn that policy off, it's pretty easy to come up
with an example program that will cause ToT to experience linear heap
growth, while with this patch, the heap stays small and remains at a
constant size.

  • assembler/ARMv7Assembler.h:

(JSC::ARMv7Assembler::readCallTarget):

  • assembler/MacroAssemblerARMv7.h:

(JSC::MacroAssemblerARMv7::readCallTarget):

  • assembler/MacroAssemblerX86.h:

(JSC::MacroAssemblerX86::readCallTarget):

  • assembler/MacroAssemblerX86_64.h:

(JSC::MacroAssemblerX86_64::readCallTarget):

  • bytecode/CodeBlock.cpp:

(JSC::CodeBlock::visitAggregate):
(JSC::CodeBlock::performTracingFixpointIteration):
(JSC::CodeBlock::visitWeakReferences):
(JSC::CodeBlock::finalizeUnconditionally):
(JSC::CodeBlock::stronglyVisitStrongReferences):
(JSC::MethodCallLinkInfo::reset):
(JSC::ProgramCodeBlock::jettison):
(JSC::EvalCodeBlock::jettison):
(JSC::FunctionCodeBlock::jettison):

  • bytecode/CodeBlock.h:

(JSC::CodeBlock::reoptimize):
(JSC::CodeBlock::shouldImmediatelyAssumeLivenessDuringScan):

  • bytecode/Instruction.h:

(JSC::PolymorphicAccessStructureList::visitWeak):

  • bytecode/StructureStubInfo.cpp:

(JSC::StructureStubInfo::visitWeakReferences):

  • bytecode/StructureStubInfo.h:

(JSC::isGetByIdAccess):
(JSC::isPutByIdAccess):
(JSC::StructureStubInfo::reset):

  • dfg/DFGJITCompiler.cpp:

(JSC::DFG::JITCompiler::link):

  • dfg/DFGOperations.cpp:
  • dfg/DFGRepatch.cpp:

(JSC::DFG::dfgRepatchByIdSelfAccess):
(JSC::DFG::dfgResetGetByID):
(JSC::DFG::dfgResetPutByID):

  • dfg/DFGRepatch.h:

(JSC::DFG::dfgResetGetByID):
(JSC::DFG::dfgResetPutByID):

  • jit/JIT.h:
  • jit/JITPropertyAccess.cpp:

(JSC::JIT::resetPatchGetById):
(JSC::JIT::resetPatchPutById):

  • jit/JITPropertyAccess32_64.cpp:

(JSC::JIT::resetPatchGetById):
(JSC::JIT::resetPatchPutById):

  • jit/JITStubs.cpp:

(JSC::DEFINE_STUB_FUNCTION):

  • jit/JITWriteBarrier.h:

(JSC::JITWriteBarrierBase::clearToMaxUnsigned):

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp

    r100556 r100880  
    3434#include "DFGCapabilities.h"
    3535#include "DFGNode.h"
     36#include "DFGRepatch.h"
    3637#include "Debugger.h"
    3738#include "Interpreter.h"
    3839#include "JIT.h"
     40#include "JITStubs.h"
    3941#include "JSActivation.h"
    4042#include "JSFunction.h"
     
    15881590    if (!!m_alternative)
    15891591        m_alternative->visitAggregate(visitor);
     1592
     1593    // There are three things that may use unconditional finalizers: lazy bytecode freeing,
     1594    // inline cache clearing, and jettisoning. The probability of us wanting to do at
     1595    // least one of those things is probably quite close to 1. So we add one no matter what
     1596    // and when it runs, it figures out whether it has any work to do.
     1597    visitor.addUnconditionalFinalizer(this);
     1598   
     1599    if (shouldImmediatelyAssumeLivenessDuringScan()) {
     1600        // This code block is live, so scan all references strongly and return.
     1601        stronglyVisitStrongReferences(visitor);
     1602        stronglyVisitWeakReferences(visitor);
     1603        return;
     1604    }
     1605   
     1606#if ENABLE(DFG_JIT)
     1607    // We get here if we're live in the sense that our owner executable is live,
     1608    // but we're not yet live for sure in another sense: we may yet decide that this
     1609    // code block should be jettisoned based on its outgoing weak references being
     1610    // stale. Set a flag to indicate that we're still assuming that we're dead, and
     1611    // perform one round of determining if we're live. The GC may determine, based on
     1612    // either us marking additional objects, or by other objects being marked for
     1613    // other reasons, that this iteration should run again; it will notify us of this
     1614    // decision by calling harvestWeakReferences().
     1615   
     1616    m_dfgData->livenessHasBeenProved = false;
     1617    m_dfgData->allTransitionsHaveBeenMarked = false;
     1618   
     1619    performTracingFixpointIteration(visitor);
     1620
     1621    // GC doesn't have enough information yet for us to decide whether to keep our DFG
     1622    // data, so we need to register a handler to run again at the end of GC, when more
     1623    // information is available.
     1624    if (!(m_dfgData->livenessHasBeenProved && m_dfgData->allTransitionsHaveBeenMarked))
     1625        visitor.addWeakReferenceHarvester(this);
     1626   
     1627#else // ENABLE(DFG_JIT)
     1628    ASSERT_NOT_REACHED();
     1629#endif // ENABLE(DFG_JIT)
     1630}
     1631
     1632void CodeBlock::performTracingFixpointIteration(SlotVisitor& visitor)
     1633{
     1634    UNUSED_PARAM(visitor);
     1635   
     1636#if ENABLE(DFG_JIT)
     1637    // Evaluate our weak reference transitions, if there are still some to evaluate.
     1638    if (!m_dfgData->allTransitionsHaveBeenMarked) {
     1639        bool allAreMarkedSoFar = true;
     1640        for (unsigned i = 0; i < m_dfgData->transitions.size(); ++i) {
     1641            if ((!m_dfgData->transitions[i].m_codeOrigin
     1642                 || Heap::isMarked(m_dfgData->transitions[i].m_codeOrigin.get()))
     1643                && Heap::isMarked(m_dfgData->transitions[i].m_from.get())) {
     1644                // If the following three things are live, then the target of the
     1645                // transition is also live:
     1646                // - This code block. We know it's live already because otherwise
     1647                //   we wouldn't be scanning ourselves.
     1648                // - The code origin of the transition. Transitions may arise from
     1649                //   code that was inlined. They are not relevant if the user's
     1650                //   object that is required for the inlinee to run is no longer
     1651                //   live.
     1652                // - The source of the transition. The transition checks if some
     1653                //   heap location holds the source, and if so, stores the target.
     1654                //   Hence the source must be live for the transition to be live.
     1655                visitor.append(&m_dfgData->transitions[i].m_to);
     1656            } else
     1657                allAreMarkedSoFar = false;
     1658        }
     1659       
     1660        if (allAreMarkedSoFar)
     1661            m_dfgData->allTransitionsHaveBeenMarked = true;
     1662    }
     1663   
     1664    // Check if we have any remaining work to do.
     1665    if (m_dfgData->livenessHasBeenProved)
     1666        return;
     1667   
     1668    // Now check all of our weak references. If all of them are live, then we
     1669    // have proved liveness and so we scan our strong references. If at end of
     1670    // GC we still have not proved liveness, then this code block is toast.
     1671    bool allAreLiveSoFar = true;
     1672    for (unsigned i = 0; i < m_dfgData->weakReferences.size(); ++i) {
     1673        if (!Heap::isMarked(m_dfgData->weakReferences[i].get())) {
     1674            allAreLiveSoFar = false;
     1675            break;
     1676        }
     1677    }
     1678   
     1679    // If some weak references are dead, then this fixpoint iteration was
     1680    // unsuccessful.
     1681    if (!allAreLiveSoFar)
     1682        return;
     1683   
     1684    // All weak references are live. Record this information so we don't
     1685    // come back here again, and scan the strong references.
     1686    m_dfgData->livenessHasBeenProved = true;
     1687    stronglyVisitStrongReferences(visitor);
     1688#endif // ENABLE(DFG_JIT)
     1689}
     1690
     1691void CodeBlock::visitWeakReferences(SlotVisitor& visitor)
     1692{
     1693    performTracingFixpointIteration(visitor);
     1694}
     1695
     1696void CodeBlock::finalizeUnconditionally()
     1697{
     1698#if ENABLE(JIT_VERBOSE_OSR)
     1699    static const bool verboseUnlinking = true;
     1700#else
     1701    static const bool verboseUnlinking = false;
     1702#endif
     1703   
     1704#if ENABLE(DFG_JIT)
     1705    // Check if we're not live. If we are, then jettison.
     1706    if (!(shouldImmediatelyAssumeLivenessDuringScan() || m_dfgData->livenessHasBeenProved)) {
     1707        if (verboseUnlinking)
     1708            printf("Code block %p has dead weak references, jettisoning during GC.\n", this);
     1709
     1710        // Make sure that the baseline JIT knows that it should re-warm-up before
     1711        // optimizing.
     1712        alternative()->optimizeAfterWarmUp();
     1713       
     1714        jettison();
     1715        return;
     1716    }
     1717#endif // ENABLE(DFG_JIT)
     1718   
     1719#if ENABLE(JIT)
     1720    // Handle inline caches.
     1721    if (!!getJITCode()) {
     1722        RepatchBuffer repatchBuffer(this);
     1723        for (unsigned i = 0; i < numberOfCallLinkInfos(); ++i) {
     1724            if (callLinkInfo(i).isLinked() && !Heap::isMarked(callLinkInfo(i).callee.get())) {
     1725                if (verboseUnlinking)
     1726                    printf("Clearing call from %p.\n", this);
     1727                callLinkInfo(i).unlink(*m_globalData, repatchBuffer);
     1728            }
     1729            if (!!callLinkInfo(i).lastSeenCallee
     1730                && !Heap::isMarked(callLinkInfo(i).lastSeenCallee.get()))
     1731                callLinkInfo(i).lastSeenCallee.clear();
     1732        }
     1733        for (size_t size = m_globalResolveInfos.size(), i = 0; i < size; ++i) {
     1734            if (m_globalResolveInfos[i].structure && !Heap::isMarked(m_globalResolveInfos[i].structure.get())) {
     1735                if (verboseUnlinking)
     1736                    printf("Clearing resolve info in %p.\n", this);
     1737                m_globalResolveInfos[i].structure.clear();
     1738            }
     1739        }
     1740
     1741        for (size_t size = m_structureStubInfos.size(), i = 0; i < size; ++i) {
     1742            StructureStubInfo& stubInfo = m_structureStubInfos[i];
     1743           
     1744            AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
     1745           
     1746            if (stubInfo.visitWeakReferences())
     1747                continue;
     1748           
     1749            if (verboseUnlinking)
     1750                printf("Clearing structure cache (kind %d) in %p.\n", stubInfo.accessType, this);
     1751           
     1752            if (isGetByIdAccess(accessType)) {
     1753                if (getJITCode().jitType() == JITCode::DFGJIT)
     1754                    DFG::dfgResetGetByID(repatchBuffer, stubInfo);
     1755                else
     1756                    JIT::resetPatchGetById(repatchBuffer, &stubInfo);
     1757            } else {
     1758                ASSERT(isPutByIdAccess(accessType));
     1759                if (getJITCode().jitType() == JITCode::DFGJIT)
     1760                    DFG::dfgResetPutByID(repatchBuffer, stubInfo);
     1761                else
     1762                    JIT::resetPatchPutById(repatchBuffer, &stubInfo);
     1763            }
     1764           
     1765            stubInfo.reset();
     1766        }
     1767
     1768        for (size_t size = m_methodCallLinkInfos.size(), i = 0; i < size; ++i) {
     1769            if (!m_methodCallLinkInfos[i].cachedStructure)
     1770                continue;
     1771           
     1772            ASSERT(m_methodCallLinkInfos[i].seenOnce());
     1773            ASSERT(!!m_methodCallLinkInfos[i].cachedPrototypeStructure);
     1774
     1775            if (!Heap::isMarked(m_methodCallLinkInfos[i].cachedStructure.get())
     1776                || !Heap::isMarked(m_methodCallLinkInfos[i].cachedPrototypeStructure.get())
     1777                || !Heap::isMarked(m_methodCallLinkInfos[i].cachedFunction.get())
     1778                || !Heap::isMarked(m_methodCallLinkInfos[i].cachedPrototype.get())) {
     1779                if (verboseUnlinking)
     1780                    printf("Clearing method call in %p.\n", this);
     1781                m_methodCallLinkInfos[i].reset(repatchBuffer, getJITType());
     1782            }
     1783        }
     1784    }
     1785#endif
     1786
     1787    // Handle the bytecode discarding chore.
     1788    if (m_shouldDiscardBytecode) {
     1789        discardBytecode();
     1790        m_shouldDiscardBytecode = false;
     1791    }
     1792}
     1793
     1794void CodeBlock::stronglyVisitStrongReferences(SlotVisitor& visitor)
     1795{
    15901796    visitor.append(&m_globalObject);
    15911797    visitor.append(&m_ownerExecutable);
     
    16021808    for (size_t i = 0; i < m_functionDecls.size(); ++i)
    16031809        visitor.append(&m_functionDecls[i]);
    1604 #if ENABLE(JIT)
    1605     for (unsigned i = 0; i < numberOfCallLinkInfos(); ++i) {
    1606         if (callLinkInfo(i).isLinked())
    1607             visitor.append(&callLinkInfo(i).callee);
    1608         if (!!callLinkInfo(i).lastSeenCallee)
    1609             visitor.append(&callLinkInfo(i).lastSeenCallee);
    1610     }
    1611 #endif
    16121810#if ENABLE(INTERPRETER)
    16131811    for (size_t size = m_propertyAccessInstructions.size(), i = 0; i < size; ++i)
     
    16151813    for (size_t size = m_globalResolveInstructions.size(), i = 0; i < size; ++i)
    16161814        visitStructures(visitor, &instructions()[m_globalResolveInstructions[i]]);
    1617 #endif
    1618 #if ENABLE(JIT)
    1619     for (size_t size = m_globalResolveInfos.size(), i = 0; i < size; ++i) {
    1620         if (m_globalResolveInfos[i].structure)
    1621             visitor.append(&m_globalResolveInfos[i].structure);
    1622     }
    1623 
    1624     for (size_t size = m_structureStubInfos.size(), i = 0; i < size; ++i)
    1625         m_structureStubInfos[i].visitAggregate(visitor);
    1626 
    1627     for (size_t size = m_methodCallLinkInfos.size(), i = 0; i < size; ++i) {
    1628         if (m_methodCallLinkInfos[i].cachedStructure) {
    1629             // These members must be filled at the same time, and only after
    1630             // the MethodCallLinkInfo is set as seen.
    1631             ASSERT(m_methodCallLinkInfos[i].seenOnce());
    1632             visitor.append(&m_methodCallLinkInfos[i].cachedStructure);
    1633             ASSERT(!!m_methodCallLinkInfos[i].cachedPrototypeStructure);
    1634             visitor.append(&m_methodCallLinkInfos[i].cachedPrototypeStructure);
    1635             visitor.append(&m_methodCallLinkInfos[i].cachedFunction);
    1636             visitor.append(&m_methodCallLinkInfos[i].cachedPrototype);
    1637         }
    1638     }
    16391815#endif
    16401816
     
    16541830        valueProfile(profileIndex)->computeUpdatedPrediction();
    16551831#endif
    1656    
    1657 #if ENABLE(JIT) && !ENABLE(OPCODE_SAMPLING)
    1658     // Kill off some bytecode. We can't do it here because we don't want to accidentally
    1659     // call into malloc while in stop-the-world GC mode.
    1660     if (hasInstructions() && m_shouldDiscardBytecode)
    1661         visitor.addUnconditionalFinalizer(this);
    1662 #endif
    1663    
    1664     stronglyVisitWeakReferences(visitor);
    16651832}
    16661833
     
    18682035}
    18692036
     2037void MethodCallLinkInfo::reset(RepatchBuffer& repatchBuffer, JITCode::JITType jitType)
     2038{
     2039    cachedStructure.clearToMaxUnsigned();
     2040    cachedPrototype.clear();
     2041    cachedPrototypeStructure.clearToMaxUnsigned();
     2042    cachedFunction.clear();
     2043   
     2044    if (jitType == JITCode::DFGJIT) {
     2045#if ENABLE(DFG_JIT)
     2046        repatchBuffer.relink(callReturnLocation, operationGetMethodOptimize);
     2047#else
     2048        ASSERT_NOT_REACHED();
     2049#endif
     2050    } else {
     2051        ASSERT(jitType == JITCode::BaselineJIT);
     2052        repatchBuffer.relink(callReturnLocation, cti_op_get_by_id_method_check);
     2053    }
     2054}
     2055
    18702056void CodeBlock::unlinkCalls()
    18712057{
     
    19832169}
    19842170
    1985 void ProgramCodeBlock::jettison(JSGlobalData& globalData)
     2171void ProgramCodeBlock::jettison()
    19862172{
    19872173    ASSERT(getJITType() != JITCode::BaselineJIT);
    19882174    ASSERT(this == replacement());
    1989     static_cast<ProgramExecutable*>(ownerExecutable())->jettisonOptimizedCode(globalData);
    1990 }
    1991 
    1992 void EvalCodeBlock::jettison(JSGlobalData& globalData)
     2175    static_cast<ProgramExecutable*>(ownerExecutable())->jettisonOptimizedCode(*globalData());
     2176}
     2177
     2178void EvalCodeBlock::jettison()
    19932179{
    19942180    ASSERT(getJITType() != JITCode::BaselineJIT);
    19952181    ASSERT(this == replacement());
    1996     static_cast<EvalExecutable*>(ownerExecutable())->jettisonOptimizedCode(globalData);
    1997 }
    1998 
    1999 void FunctionCodeBlock::jettison(JSGlobalData& globalData)
     2182    static_cast<EvalExecutable*>(ownerExecutable())->jettisonOptimizedCode(*globalData());
     2183}
     2184
     2185void FunctionCodeBlock::jettison()
    20002186{
    20012187    ASSERT(getJITType() != JITCode::BaselineJIT);
    20022188    ASSERT(this == replacement());
    2003     static_cast<FunctionExecutable*>(ownerExecutable())->jettisonOptimizedCodeFor(globalData, m_isConstructor ? CodeForConstruct : CodeForCall);
    2004 }
    2005 #endif
    2006 
    2007 void CodeBlock::finalizeUnconditionally()
    2008 {
    2009 #if ENABLE(OPCODE_SAMPLING) || !ENABLE(JIT)
    2010     ASSERT_NOT_REACHED();
    2011 #endif
    2012     ASSERT(m_shouldDiscardBytecode);
    2013     discardBytecode();
    2014 }
     2189    static_cast<FunctionExecutable*>(ownerExecutable())->jettisonOptimizedCodeFor(*globalData(), m_isConstructor ? CodeForConstruct : CodeForCall);
     2190}
     2191#endif
    20152192
    20162193#if ENABLE(VALUE_PROFILER)
Note: See TracChangeset for help on using the changeset viewer.