Changeset 172961 in webkit for trunk/Source/JavaScriptCore/bytecode/CallLinkStatus.cpp
- Timestamp:
- Aug 26, 2014, 9:46:10 AM (11 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/bytecode/CallLinkStatus.cpp
r172940 r172961 33 33 #include "JSCInlines.h" 34 34 #include <wtf/CommaPrinter.h> 35 #include <wtf/ListDump.h>36 35 37 36 namespace JSC { … … 40 39 41 40 CallLinkStatus::CallLinkStatus(JSValue value) 42 : m_couldTakeSlowPath(false) 41 : m_callTarget(value) 42 , m_executable(0) 43 , m_couldTakeSlowPath(false) 43 44 , m_isProved(false) 44 45 { 45 if (!value || !value.isCell()) { 46 m_couldTakeSlowPath = true; 46 if (!value || !value.isCell()) 47 47 return; 48 } 49 50 m_edges.append(CallEdge(CallVariant(value.asCell()), 1)); 48 49 if (!value.asCell()->inherits(JSFunction::info())) 50 return; 51 52 m_executable = jsCast<JSFunction*>(value.asCell())->executable(); 53 } 54 55 JSFunction* CallLinkStatus::function() const 56 { 57 if (!m_callTarget || !m_callTarget.isCell()) 58 return 0; 59 60 if (!m_callTarget.asCell()->inherits(JSFunction::info())) 61 return 0; 62 63 return jsCast<JSFunction*>(m_callTarget.asCell()); 64 } 65 66 InternalFunction* CallLinkStatus::internalFunction() const 67 { 68 if (!m_callTarget || !m_callTarget.isCell()) 69 return 0; 70 71 if (!m_callTarget.asCell()->inherits(InternalFunction::info())) 72 return 0; 73 74 return jsCast<InternalFunction*>(m_callTarget.asCell()); 75 } 76 77 Intrinsic CallLinkStatus::intrinsicFor(CodeSpecializationKind kind) const 78 { 79 if (!m_executable) 80 return NoIntrinsic; 81 82 return m_executable->intrinsicFor(kind); 51 83 } 52 84 … … 56 88 UNUSED_PARAM(bytecodeIndex); 57 89 #if ENABLE(DFG_JIT) 58 if (profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, Bad Cell))) {90 if (profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadFunction))) { 59 91 // We could force this to be a closure call, but instead we'll just assume that it 60 92 // takes slow path. … … 94 126 return computeFromLLInt(locker, profiledBlock, bytecodeIndex); 95 127 96 return computeFor(locker, profiledBlock,*callLinkInfo, exitSiteData);128 return computeFor(locker, *callLinkInfo, exitSiteData); 97 129 #else 98 130 return CallLinkStatus(); … … 108 140 #if ENABLE(DFG_JIT) 109 141 exitSiteData.m_takesSlowPath = 110 profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, Bad Type, exitingJITType))142 profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache, exitingJITType)) 111 143 || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadExecutable, exitingJITType)); 112 144 exitSiteData.m_badFunction = 113 profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, Bad Cell, exitingJITType));145 profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadFunction, exitingJITType)); 114 146 #else 115 147 UNUSED_PARAM(locker); … … 123 155 124 156 #if ENABLE(JIT) 125 CallLinkStatus CallLinkStatus::computeFor( 126 const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, CallLinkInfo& callLinkInfo) 127 { 128 // We don't really need this, but anytime we have to debug this code, it becomes indispensable. 129 UNUSED_PARAM(profiledBlock); 130 131 if (Options::callStatusShouldUseCallEdgeProfile()) { 132 // Always trust the call edge profile over anything else since this has precise counts. 133 // It can make the best possible decision because it never "forgets" what happened for any 134 // call, with the exception of fading out the counts of old calls (for example if the 135 // counter type is 16-bit then calls that happened more than 2^16 calls ago are given half 136 // weight, and this compounds for every 2^15 [sic] calls after that). The combination of 137 // high fidelity for recent calls and fading for older calls makes this the most useful 138 // mechamism of choosing how to optimize future calls. 139 CallEdgeProfile* edgeProfile = callLinkInfo.callEdgeProfile.get(); 140 WTF::loadLoadFence(); 141 if (edgeProfile) { 142 CallLinkStatus result = computeFromCallEdgeProfile(edgeProfile); 143 if (!!result) 144 return result; 145 } 146 } 147 148 return computeFromCallLinkInfo(locker, callLinkInfo); 149 } 150 151 CallLinkStatus CallLinkStatus::computeFromCallLinkInfo( 152 const ConcurrentJITLocker&, CallLinkInfo& callLinkInfo) 157 CallLinkStatus CallLinkStatus::computeFor(const ConcurrentJITLocker&, CallLinkInfo& callLinkInfo) 153 158 { 154 159 // Note that despite requiring that the locker is held, this code is racy with respect … … 173 178 JSFunction* target = callLinkInfo.lastSeenCallee.get(); 174 179 if (!target) 175 return takesSlowPath();180 return CallLinkStatus(); 176 181 177 182 if (callLinkInfo.hasSeenClosure) … … 181 186 } 182 187 183 CallLinkStatus CallLinkStatus::computeFromCallEdgeProfile(CallEdgeProfile* edgeProfile)184 {185 // In cases where the call edge profile saw nothing, use the CallLinkInfo instead.186 if (!edgeProfile->totalCalls())187 return CallLinkStatus();188 189 // To do anything meaningful, we require that the majority of calls are to something we190 // know how to handle.191 unsigned numCallsToKnown = edgeProfile->numCallsToKnownCells();192 unsigned numCallsToUnknown = edgeProfile->numCallsToNotCell() + edgeProfile->numCallsToUnknownCell();193 194 // We require that the majority of calls were to something that we could possibly inline.195 if (numCallsToKnown <= numCallsToUnknown)196 return takesSlowPath();197 198 // We require that the number of such calls is greater than some minimal threshold, so that we199 // avoid inlining completely cold calls.200 if (numCallsToKnown < Options::frequentCallThreshold())201 return takesSlowPath();202 203 CallLinkStatus result;204 result.m_edges = edgeProfile->callEdges();205 result.m_couldTakeSlowPath = !!numCallsToUnknown;206 result.m_canTrustCounts = true;207 208 return result;209 }210 211 188 CallLinkStatus CallLinkStatus::computeFor( 212 const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, CallLinkInfo& callLinkInfo, 213 ExitSiteData exitSiteData) 214 { 215 CallLinkStatus result = computeFor(locker, profiledBlock, callLinkInfo); 189 const ConcurrentJITLocker& locker, CallLinkInfo& callLinkInfo, ExitSiteData exitSiteData) 190 { 191 if (exitSiteData.m_takesSlowPath) 192 return takesSlowPath(); 193 194 CallLinkStatus result = computeFor(locker, callLinkInfo); 216 195 if (exitSiteData.m_badFunction) 217 196 result.makeClosureCall(); 218 if (exitSiteData.m_takesSlowPath)219 result.m_couldTakeSlowPath = true;220 197 221 198 return result; … … 251 228 { 252 229 ConcurrentJITLocker locker(dfgCodeBlock->m_lock); 253 map.add(info.codeOrigin, computeFor(locker, dfgCodeBlock,info, exitSiteData));230 map.add(info.codeOrigin, computeFor(locker, info, exitSiteData)); 254 231 } 255 232 } … … 280 257 } 281 258 282 bool CallLinkStatus::isClosureCall() const283 {284 for (unsigned i = m_edges.size(); i--;) {285 if (m_edges[i].callee().isClosureCall())286 return true;287 }288 return false;289 }290 291 void CallLinkStatus::makeClosureCall()292 {293 ASSERT(!m_isProved);294 for (unsigned i = m_edges.size(); i--;)295 m_edges[i] = m_edges[i].despecifiedClosure();296 297 if (!ASSERT_DISABLED) {298 // Doing this should not have created duplicates, because the CallEdgeProfile299 // should despecify closures if doing so would reduce the number of known callees.300 for (unsigned i = 0; i < m_edges.size(); ++i) {301 for (unsigned j = i + 1; j < m_edges.size(); ++j)302 ASSERT(m_edges[i].callee() != m_edges[j].callee());303 }304 }305 }306 307 259 void CallLinkStatus::dump(PrintStream& out) const 308 260 { … … 320 272 out.print(comma, "Could Take Slow Path"); 321 273 322 out.print(listDump(m_edges)); 274 if (m_callTarget) 275 out.print(comma, "Known target: ", m_callTarget); 276 277 if (m_executable) { 278 out.print(comma, "Executable/CallHash: ", RawPointer(m_executable)); 279 if (!isCompilationThread()) 280 out.print("/", m_executable->hashFor(CodeForCall)); 281 } 323 282 } 324 283
Note:
See TracChangeset
for help on using the changeset viewer.