Changeset 100880 in webkit for trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp
- Timestamp:
- Nov 20, 2011, 8:45:17 PM (14 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp
r100556 r100880 34 34 #include "DFGCapabilities.h" 35 35 #include "DFGNode.h" 36 #include "DFGRepatch.h" 36 37 #include "Debugger.h" 37 38 #include "Interpreter.h" 38 39 #include "JIT.h" 40 #include "JITStubs.h" 39 41 #include "JSActivation.h" 40 42 #include "JSFunction.h" … … 1588 1590 if (!!m_alternative) 1589 1591 m_alternative->visitAggregate(visitor); 1592 1593 // There are three things that may use unconditional finalizers: lazy bytecode freeing, 1594 // inline cache clearing, and jettisoning. The probability of us wanting to do at 1595 // least one of those things is probably quite close to 1. So we add one no matter what 1596 // and when it runs, it figures out whether it has any work to do. 1597 visitor.addUnconditionalFinalizer(this); 1598 1599 if (shouldImmediatelyAssumeLivenessDuringScan()) { 1600 // This code block is live, so scan all references strongly and return. 1601 stronglyVisitStrongReferences(visitor); 1602 stronglyVisitWeakReferences(visitor); 1603 return; 1604 } 1605 1606 #if ENABLE(DFG_JIT) 1607 // We get here if we're live in the sense that our owner executable is live, 1608 // but we're not yet live for sure in another sense: we may yet decide that this 1609 // code block should be jettisoned based on its outgoing weak references being 1610 // stale. Set a flag to indicate that we're still assuming that we're dead, and 1611 // perform one round of determining if we're live. The GC may determine, based on 1612 // either us marking additional objects, or by other objects being marked for 1613 // other reasons, that this iteration should run again; it will notify us of this 1614 // decision by calling harvestWeakReferences(). 1615 1616 m_dfgData->livenessHasBeenProved = false; 1617 m_dfgData->allTransitionsHaveBeenMarked = false; 1618 1619 performTracingFixpointIteration(visitor); 1620 1621 // GC doesn't have enough information yet for us to decide whether to keep our DFG 1622 // data, so we need to register a handler to run again at the end of GC, when more 1623 // information is available. 1624 if (!(m_dfgData->livenessHasBeenProved && m_dfgData->allTransitionsHaveBeenMarked)) 1625 visitor.addWeakReferenceHarvester(this); 1626 1627 #else // ENABLE(DFG_JIT) 1628 ASSERT_NOT_REACHED(); 1629 #endif // ENABLE(DFG_JIT) 1630 } 1631 1632 void CodeBlock::performTracingFixpointIteration(SlotVisitor& visitor) 1633 { 1634 UNUSED_PARAM(visitor); 1635 1636 #if ENABLE(DFG_JIT) 1637 // Evaluate our weak reference transitions, if there are still some to evaluate. 1638 if (!m_dfgData->allTransitionsHaveBeenMarked) { 1639 bool allAreMarkedSoFar = true; 1640 for (unsigned i = 0; i < m_dfgData->transitions.size(); ++i) { 1641 if ((!m_dfgData->transitions[i].m_codeOrigin 1642 || Heap::isMarked(m_dfgData->transitions[i].m_codeOrigin.get())) 1643 && Heap::isMarked(m_dfgData->transitions[i].m_from.get())) { 1644 // If the following three things are live, then the target of the 1645 // transition is also live: 1646 // - This code block. We know it's live already because otherwise 1647 // we wouldn't be scanning ourselves. 1648 // - The code origin of the transition. Transitions may arise from 1649 // code that was inlined. They are not relevant if the user's 1650 // object that is required for the inlinee to run is no longer 1651 // live. 1652 // - The source of the transition. The transition checks if some 1653 // heap location holds the source, and if so, stores the target. 1654 // Hence the source must be live for the transition to be live. 1655 visitor.append(&m_dfgData->transitions[i].m_to); 1656 } else 1657 allAreMarkedSoFar = false; 1658 } 1659 1660 if (allAreMarkedSoFar) 1661 m_dfgData->allTransitionsHaveBeenMarked = true; 1662 } 1663 1664 // Check if we have any remaining work to do. 1665 if (m_dfgData->livenessHasBeenProved) 1666 return; 1667 1668 // Now check all of our weak references. If all of them are live, then we 1669 // have proved liveness and so we scan our strong references. If at end of 1670 // GC we still have not proved liveness, then this code block is toast. 1671 bool allAreLiveSoFar = true; 1672 for (unsigned i = 0; i < m_dfgData->weakReferences.size(); ++i) { 1673 if (!Heap::isMarked(m_dfgData->weakReferences[i].get())) { 1674 allAreLiveSoFar = false; 1675 break; 1676 } 1677 } 1678 1679 // If some weak references are dead, then this fixpoint iteration was 1680 // unsuccessful. 1681 if (!allAreLiveSoFar) 1682 return; 1683 1684 // All weak references are live. Record this information so we don't 1685 // come back here again, and scan the strong references. 1686 m_dfgData->livenessHasBeenProved = true; 1687 stronglyVisitStrongReferences(visitor); 1688 #endif // ENABLE(DFG_JIT) 1689 } 1690 1691 void CodeBlock::visitWeakReferences(SlotVisitor& visitor) 1692 { 1693 performTracingFixpointIteration(visitor); 1694 } 1695 1696 void CodeBlock::finalizeUnconditionally() 1697 { 1698 #if ENABLE(JIT_VERBOSE_OSR) 1699 static const bool verboseUnlinking = true; 1700 #else 1701 static const bool verboseUnlinking = false; 1702 #endif 1703 1704 #if ENABLE(DFG_JIT) 1705 // Check if we're not live. If we are, then jettison. 1706 if (!(shouldImmediatelyAssumeLivenessDuringScan() || m_dfgData->livenessHasBeenProved)) { 1707 if (verboseUnlinking) 1708 printf("Code block %p has dead weak references, jettisoning during GC.\n", this); 1709 1710 // Make sure that the baseline JIT knows that it should re-warm-up before 1711 // optimizing. 1712 alternative()->optimizeAfterWarmUp(); 1713 1714 jettison(); 1715 return; 1716 } 1717 #endif // ENABLE(DFG_JIT) 1718 1719 #if ENABLE(JIT) 1720 // Handle inline caches. 1721 if (!!getJITCode()) { 1722 RepatchBuffer repatchBuffer(this); 1723 for (unsigned i = 0; i < numberOfCallLinkInfos(); ++i) { 1724 if (callLinkInfo(i).isLinked() && !Heap::isMarked(callLinkInfo(i).callee.get())) { 1725 if (verboseUnlinking) 1726 printf("Clearing call from %p.\n", this); 1727 callLinkInfo(i).unlink(*m_globalData, repatchBuffer); 1728 } 1729 if (!!callLinkInfo(i).lastSeenCallee 1730 && !Heap::isMarked(callLinkInfo(i).lastSeenCallee.get())) 1731 callLinkInfo(i).lastSeenCallee.clear(); 1732 } 1733 for (size_t size = m_globalResolveInfos.size(), i = 0; i < size; ++i) { 1734 if (m_globalResolveInfos[i].structure && !Heap::isMarked(m_globalResolveInfos[i].structure.get())) { 1735 if (verboseUnlinking) 1736 printf("Clearing resolve info in %p.\n", this); 1737 m_globalResolveInfos[i].structure.clear(); 1738 } 1739 } 1740 1741 for (size_t size = m_structureStubInfos.size(), i = 0; i < size; ++i) { 1742 StructureStubInfo& stubInfo = m_structureStubInfos[i]; 1743 1744 AccessType accessType = static_cast<AccessType>(stubInfo.accessType); 1745 1746 if (stubInfo.visitWeakReferences()) 1747 continue; 1748 1749 if (verboseUnlinking) 1750 printf("Clearing structure cache (kind %d) in %p.\n", stubInfo.accessType, this); 1751 1752 if (isGetByIdAccess(accessType)) { 1753 if (getJITCode().jitType() == JITCode::DFGJIT) 1754 DFG::dfgResetGetByID(repatchBuffer, stubInfo); 1755 else 1756 JIT::resetPatchGetById(repatchBuffer, &stubInfo); 1757 } else { 1758 ASSERT(isPutByIdAccess(accessType)); 1759 if (getJITCode().jitType() == JITCode::DFGJIT) 1760 DFG::dfgResetPutByID(repatchBuffer, stubInfo); 1761 else 1762 JIT::resetPatchPutById(repatchBuffer, &stubInfo); 1763 } 1764 1765 stubInfo.reset(); 1766 } 1767 1768 for (size_t size = m_methodCallLinkInfos.size(), i = 0; i < size; ++i) { 1769 if (!m_methodCallLinkInfos[i].cachedStructure) 1770 continue; 1771 1772 ASSERT(m_methodCallLinkInfos[i].seenOnce()); 1773 ASSERT(!!m_methodCallLinkInfos[i].cachedPrototypeStructure); 1774 1775 if (!Heap::isMarked(m_methodCallLinkInfos[i].cachedStructure.get()) 1776 || !Heap::isMarked(m_methodCallLinkInfos[i].cachedPrototypeStructure.get()) 1777 || !Heap::isMarked(m_methodCallLinkInfos[i].cachedFunction.get()) 1778 || !Heap::isMarked(m_methodCallLinkInfos[i].cachedPrototype.get())) { 1779 if (verboseUnlinking) 1780 printf("Clearing method call in %p.\n", this); 1781 m_methodCallLinkInfos[i].reset(repatchBuffer, getJITType()); 1782 } 1783 } 1784 } 1785 #endif 1786 1787 // Handle the bytecode discarding chore. 1788 if (m_shouldDiscardBytecode) { 1789 discardBytecode(); 1790 m_shouldDiscardBytecode = false; 1791 } 1792 } 1793 1794 void CodeBlock::stronglyVisitStrongReferences(SlotVisitor& visitor) 1795 { 1590 1796 visitor.append(&m_globalObject); 1591 1797 visitor.append(&m_ownerExecutable); … … 1602 1808 for (size_t i = 0; i < m_functionDecls.size(); ++i) 1603 1809 visitor.append(&m_functionDecls[i]); 1604 #if ENABLE(JIT)1605 for (unsigned i = 0; i < numberOfCallLinkInfos(); ++i) {1606 if (callLinkInfo(i).isLinked())1607 visitor.append(&callLinkInfo(i).callee);1608 if (!!callLinkInfo(i).lastSeenCallee)1609 visitor.append(&callLinkInfo(i).lastSeenCallee);1610 }1611 #endif1612 1810 #if ENABLE(INTERPRETER) 1613 1811 for (size_t size = m_propertyAccessInstructions.size(), i = 0; i < size; ++i) … … 1615 1813 for (size_t size = m_globalResolveInstructions.size(), i = 0; i < size; ++i) 1616 1814 visitStructures(visitor, &instructions()[m_globalResolveInstructions[i]]); 1617 #endif1618 #if ENABLE(JIT)1619 for (size_t size = m_globalResolveInfos.size(), i = 0; i < size; ++i) {1620 if (m_globalResolveInfos[i].structure)1621 visitor.append(&m_globalResolveInfos[i].structure);1622 }1623 1624 for (size_t size = m_structureStubInfos.size(), i = 0; i < size; ++i)1625 m_structureStubInfos[i].visitAggregate(visitor);1626 1627 for (size_t size = m_methodCallLinkInfos.size(), i = 0; i < size; ++i) {1628 if (m_methodCallLinkInfos[i].cachedStructure) {1629 // These members must be filled at the same time, and only after1630 // the MethodCallLinkInfo is set as seen.1631 ASSERT(m_methodCallLinkInfos[i].seenOnce());1632 visitor.append(&m_methodCallLinkInfos[i].cachedStructure);1633 ASSERT(!!m_methodCallLinkInfos[i].cachedPrototypeStructure);1634 visitor.append(&m_methodCallLinkInfos[i].cachedPrototypeStructure);1635 visitor.append(&m_methodCallLinkInfos[i].cachedFunction);1636 visitor.append(&m_methodCallLinkInfos[i].cachedPrototype);1637 }1638 }1639 1815 #endif 1640 1816 … … 1654 1830 valueProfile(profileIndex)->computeUpdatedPrediction(); 1655 1831 #endif 1656 1657 #if ENABLE(JIT) && !ENABLE(OPCODE_SAMPLING)1658 // Kill off some bytecode. We can't do it here because we don't want to accidentally1659 // call into malloc while in stop-the-world GC mode.1660 if (hasInstructions() && m_shouldDiscardBytecode)1661 visitor.addUnconditionalFinalizer(this);1662 #endif1663 1664 stronglyVisitWeakReferences(visitor);1665 1832 } 1666 1833 … … 1868 2035 } 1869 2036 2037 void MethodCallLinkInfo::reset(RepatchBuffer& repatchBuffer, JITCode::JITType jitType) 2038 { 2039 cachedStructure.clearToMaxUnsigned(); 2040 cachedPrototype.clear(); 2041 cachedPrototypeStructure.clearToMaxUnsigned(); 2042 cachedFunction.clear(); 2043 2044 if (jitType == JITCode::DFGJIT) { 2045 #if ENABLE(DFG_JIT) 2046 repatchBuffer.relink(callReturnLocation, operationGetMethodOptimize); 2047 #else 2048 ASSERT_NOT_REACHED(); 2049 #endif 2050 } else { 2051 ASSERT(jitType == JITCode::BaselineJIT); 2052 repatchBuffer.relink(callReturnLocation, cti_op_get_by_id_method_check); 2053 } 2054 } 2055 1870 2056 void CodeBlock::unlinkCalls() 1871 2057 { … … 1983 2169 } 1984 2170 1985 void ProgramCodeBlock::jettison( JSGlobalData& globalData)2171 void ProgramCodeBlock::jettison() 1986 2172 { 1987 2173 ASSERT(getJITType() != JITCode::BaselineJIT); 1988 2174 ASSERT(this == replacement()); 1989 static_cast<ProgramExecutable*>(ownerExecutable())->jettisonOptimizedCode( globalData);1990 } 1991 1992 void EvalCodeBlock::jettison( JSGlobalData& globalData)2175 static_cast<ProgramExecutable*>(ownerExecutable())->jettisonOptimizedCode(*globalData()); 2176 } 2177 2178 void EvalCodeBlock::jettison() 1993 2179 { 1994 2180 ASSERT(getJITType() != JITCode::BaselineJIT); 1995 2181 ASSERT(this == replacement()); 1996 static_cast<EvalExecutable*>(ownerExecutable())->jettisonOptimizedCode( globalData);1997 } 1998 1999 void FunctionCodeBlock::jettison( JSGlobalData& globalData)2182 static_cast<EvalExecutable*>(ownerExecutable())->jettisonOptimizedCode(*globalData()); 2183 } 2184 2185 void FunctionCodeBlock::jettison() 2000 2186 { 2001 2187 ASSERT(getJITType() != JITCode::BaselineJIT); 2002 2188 ASSERT(this == replacement()); 2003 static_cast<FunctionExecutable*>(ownerExecutable())->jettisonOptimizedCodeFor(globalData, m_isConstructor ? CodeForConstruct : CodeForCall); 2004 } 2005 #endif 2006 2007 void CodeBlock::finalizeUnconditionally() 2008 { 2009 #if ENABLE(OPCODE_SAMPLING) || !ENABLE(JIT) 2010 ASSERT_NOT_REACHED(); 2011 #endif 2012 ASSERT(m_shouldDiscardBytecode); 2013 discardBytecode(); 2014 } 2189 static_cast<FunctionExecutable*>(ownerExecutable())->jettisonOptimizedCodeFor(*globalData(), m_isConstructor ? CodeForConstruct : CodeForCall); 2190 } 2191 #endif 2015 2192 2016 2193 #if ENABLE(VALUE_PROFILER)
Note:
See TracChangeset
for help on using the changeset viewer.