Changeset 253896 in webkit for trunk/Source/JavaScriptCore/dfg/DFGByteCodeParser.cpp
- Timestamp:
- Dec 23, 2019, 5:49:45 PM (5 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/dfg/DFGByteCodeParser.cpp
r253867 r253896 112 112 , m_numArguments(m_codeBlock->numParameters()) 113 113 , m_numLocals(m_codeBlock->numCalleeLocals()) 114 , m_numTmps(m_codeBlock->numTmps()) 114 115 , m_parameterSlots(0) 115 116 , m_numPassedVarArgs(0) … … 139 140 m_graph.block(i)->ensureLocals(newNumLocals); 140 141 } 142 143 void ensureTmps(unsigned newNumTmps) 144 { 145 VERBOSE_LOG(" ensureTmps: trying to raise m_numTmps from ", m_numTmps, " to ", newNumTmps, "\n"); 146 if (newNumTmps <= m_numTmps) 147 return; 148 m_numTmps = newNumTmps; 149 for (size_t i = 0; i < m_graph.numBlocks(); ++i) 150 m_graph.block(i)->ensureTmps(newNumTmps); 151 } 152 141 153 142 154 // Helper for min and max. … … 271 283 void linkBlocks(Vector<BasicBlock*>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets); 272 284 273 VariableAccessData* newVariableAccessData(VirtualRegister operand) 285 void progressToNextCheckpoint() 286 { 287 m_currentIndex = BytecodeIndex(m_currentIndex.offset(), m_currentIndex.checkpoint() + 1); 288 // At this point, it's again OK to OSR exit. 289 m_exitOK = true; 290 addToGraph(ExitOK); 291 292 processSetLocalQueue(); 293 } 294 295 VariableAccessData* newVariableAccessData(Operand operand) 274 296 { 275 297 ASSERT(!operand.isConstant()); … … 280 302 281 303 // Get/Set the operands/result of a bytecode instruction. 282 Node* getDirect( VirtualRegisteroperand)304 Node* getDirect(Operand operand) 283 305 { 284 306 ASSERT(!operand.isConstant()); 285 307 286 // Is this an argument?287 308 if (operand.isArgument()) 288 return getArgument(operand); 289 290 // Must be a local. 291 return getLocal(operand); 309 return getArgument(operand.virtualRegister()); 310 311 return getLocalOrTmp(operand); 292 312 } 293 313 … … 299 319 if (constantIndex >= oldSize || !m_constants[constantIndex]) { 300 320 const CodeBlock& codeBlock = *m_inlineStackTop->m_codeBlock; 301 JSValue value = codeBlock.getConstant(operand .offset());302 SourceCodeRepresentation sourceCodeRepresentation = codeBlock.constantSourceCodeRepresentation(operand .offset());321 JSValue value = codeBlock.getConstant(operand); 322 SourceCodeRepresentation sourceCodeRepresentation = codeBlock.constantSourceCodeRepresentation(operand); 303 323 if (constantIndex >= oldSize) { 304 324 m_constants.grow(constantIndex + 1); … … 359 379 ImmediateNakedSet 360 380 }; 361 Node* setDirect(VirtualRegister operand, Node* value, SetMode setMode = NormalSet) 381 382 Node* setDirect(Operand operand, Node* value, SetMode setMode = NormalSet) 362 383 { 363 addToGraph(MovHint, OpInfo(operand .offset()), value);384 addToGraph(MovHint, OpInfo(operand), value); 364 385 365 386 // We can't exit anymore because our OSR exit state has changed. … … 375 396 return delayed.execute(this); 376 397 } 377 398 378 399 void processSetLocalQueue() 379 400 { … … 393 414 ASSERT(node->origin.semantic.bytecodeIndex() == m_currentIndex); 394 415 ConcurrentJSLocker locker(m_inlineStackTop->m_profiledBlock->m_lock); 395 LazyOperandValueProfileKey key(m_currentIndex, node-> local());416 LazyOperandValueProfileKey key(m_currentIndex, node->operand()); 396 417 SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key); 397 418 node->variableAccessData()->predict(prediction); … … 400 421 401 422 // Used in implementing get/set, above, where the operand is a local variable. 402 Node* getLocal (VirtualRegisteroperand)423 Node* getLocalOrTmp(Operand operand) 403 424 { 404 unsigned local = operand.toLocal(); 405 406 Node* node = m_currentBlock->variablesAtTail.local(local); 425 ASSERT(operand.isTmp() || operand.isLocal()); 426 Node*& node = m_currentBlock->variablesAtTail.operand(operand); 407 427 408 428 // This has two goals: 1) link together variable access datas, and 2) … … 429 449 430 450 node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable))); 431 m_currentBlock->variablesAtTail.local(local) = node;432 451 return node; 433 452 } 434 Node* setLocal (const CodeOrigin& semanticOrigin, VirtualRegisteroperand, Node* value, SetMode setMode = NormalSet)453 Node* setLocalOrTmp(const CodeOrigin& semanticOrigin, Operand operand, Node* value, SetMode setMode = NormalSet) 435 454 { 455 ASSERT(operand.isTmp() || operand.isLocal()); 436 456 SetForScope<CodeOrigin> originChange(m_currentSemanticOrigin, semanticOrigin); 437 457 438 unsigned local = operand.toLocal(); 439 440 if (setMode != ImmediateNakedSet) { 441 ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand); 458 if (operand.isTmp() && static_cast<unsigned>(operand.value()) >= m_numTmps) { 459 if (inlineCallFrame()) 460 dataLogLn(*inlineCallFrame()); 461 dataLogLn("Bad operand: ", operand, " but current number of tmps is: ", m_numTmps, " code block has: ", m_profiledBlock->numTmps(), " tmps."); 462 CRASH(); 463 } 464 465 if (setMode != ImmediateNakedSet && !operand.isTmp()) { 466 VirtualRegister reg = operand.virtualRegister(); 467 ArgumentPosition* argumentPosition = findArgumentPositionForLocal(reg); 442 468 if (argumentPosition) 443 469 flushDirect(operand, argumentPosition); 444 else if (m_graph.needsScopeRegister() && operand== m_codeBlock->scopeRegister())470 else if (m_graph.needsScopeRegister() && reg == m_codeBlock->scopeRegister()) 445 471 flush(operand); 446 472 } … … 452 478 m_inlineStackTop->m_exitProfile.hasExitSite(semanticOrigin.bytecodeIndex(), BadIndexingType)); 453 479 Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value); 454 m_currentBlock->variablesAtTail. local(local) = node;480 m_currentBlock->variablesAtTail.operand(operand) = node; 455 481 return node; 456 482 } … … 484 510 return node; 485 511 } 486 Node* setArgument(const CodeOrigin& semanticOrigin, VirtualRegisteroperand, Node* value, SetMode setMode = NormalSet)512 Node* setArgument(const CodeOrigin& semanticOrigin, Operand operand, Node* value, SetMode setMode = NormalSet) 487 513 { 488 514 SetForScope<CodeOrigin> originChange(m_currentSemanticOrigin, semanticOrigin); 489 515 490 unsigned argument = operand.toArgument(); 516 VirtualRegister reg = operand.virtualRegister(); 517 unsigned argument = reg.toArgument(); 491 518 ASSERT(argument < m_numArguments); 492 519 493 VariableAccessData* variableAccessData = newVariableAccessData( operand);520 VariableAccessData* variableAccessData = newVariableAccessData(reg); 494 521 495 522 // Always flush arguments, except for 'this'. If 'this' is created by us, … … 497 524 if (argument || m_graph.needsFlushedThis()) { 498 525 if (setMode != ImmediateNakedSet) 499 flushDirect( operand);526 flushDirect(reg); 500 527 } 501 528 … … 533 560 return stack->m_argumentPositions[argument]; 534 561 } 535 return 0;536 } 537 538 ArgumentPosition* findArgumentPosition( VirtualRegisteroperand)562 return nullptr; 563 } 564 565 ArgumentPosition* findArgumentPosition(Operand operand) 539 566 { 567 if (operand.isTmp()) 568 return nullptr; 540 569 if (operand.isArgument()) 541 570 return findArgumentPositionForArgument(operand.toArgument()); 542 return findArgumentPositionForLocal(operand );571 return findArgumentPositionForLocal(operand.virtualRegister()); 543 572 } 544 573 … … 551 580 numArguments = inlineCallFrame->argumentsWithFixup.size(); 552 581 if (inlineCallFrame->isClosureCall) 553 addFlushDirect(inlineCallFrame, remapOperand(inlineCallFrame, VirtualRegister(CallFrameSlot::callee)));582 addFlushDirect(inlineCallFrame, remapOperand(inlineCallFrame, CallFrameSlot::callee)); 554 583 if (inlineCallFrame->isVarargs()) 555 addFlushDirect(inlineCallFrame, remapOperand(inlineCallFrame, VirtualRegister(CallFrameSlot::argumentCountIncludingThis)));584 addFlushDirect(inlineCallFrame, remapOperand(inlineCallFrame, CallFrameSlot::argumentCountIncludingThis)); 556 585 } else 557 586 numArguments = m_graph.baselineCodeBlockFor(inlineCallFrame)->numParameters(); … … 576 605 CodeBlock* codeBlock = m_graph.baselineCodeBlockFor(inlineCallFrame); 577 606 FullBytecodeLiveness& fullLiveness = m_graph.livenessFor(codeBlock); 607 // Note: We don't need to handle tmps here because tmps are not required to be flushed to the stack. 578 608 const auto& livenessAtBytecode = fullLiveness.getLiveness(bytecodeIndex, m_graph.appropriateLivenessCalculationPoint(origin, isCallerOrigin)); 579 609 for (unsigned local = codeBlock->numCalleeLocals(); local--;) { … … 585 615 } 586 616 587 void flush( VirtualRegisteroperand)617 void flush(Operand operand) 588 618 { 589 619 flushDirect(m_inlineStackTop->remapOperand(operand)); 590 620 } 591 621 592 void flushDirect( VirtualRegisteroperand)622 void flushDirect(Operand operand) 593 623 { 594 624 flushDirect(operand, findArgumentPosition(operand)); 595 625 } 596 626 597 void flushDirect( VirtualRegisteroperand, ArgumentPosition* argumentPosition)627 void flushDirect(Operand operand, ArgumentPosition* argumentPosition) 598 628 { 599 629 addFlushOrPhantomLocal<Flush>(operand, argumentPosition); … … 601 631 602 632 template<NodeType nodeType> 603 void addFlushOrPhantomLocal( VirtualRegisteroperand, ArgumentPosition* argumentPosition)633 void addFlushOrPhantomLocal(Operand operand, ArgumentPosition* argumentPosition) 604 634 { 605 635 ASSERT(!operand.isConstant()); 606 636 607 Node* node = m_currentBlock->variablesAtTail.operand(operand);637 Node*& node = m_currentBlock->variablesAtTail.operand(operand); 608 638 609 639 VariableAccessData* variable; … … 615 645 616 646 node = addToGraph(nodeType, OpInfo(variable)); 617 m_currentBlock->variablesAtTail.operand(operand) = node;618 647 if (argumentPosition) 619 648 argumentPosition->addVariable(variable); 620 649 } 621 650 622 void phantomLocalDirect( VirtualRegisteroperand)651 void phantomLocalDirect(Operand operand) 623 652 { 624 653 addFlushOrPhantomLocal<PhantomLocal>(operand, findArgumentPosition(operand)); … … 627 656 void flush(InlineStackEntry* inlineStackEntry) 628 657 { 629 auto addFlushDirect = [&] (InlineCallFrame*, VirtualRegister reg) { flushDirect(reg); };658 auto addFlushDirect = [&] (InlineCallFrame*, Operand operand) { flushDirect(operand); }; 630 659 flushImpl(inlineStackEntry->m_inlineCallFrame, addFlushDirect); 631 660 } … … 633 662 void flushForTerminal() 634 663 { 635 auto addFlushDirect = [&] (InlineCallFrame*, VirtualRegister reg) { flushDirect(reg); };636 auto addPhantomLocalDirect = [&] (InlineCallFrame*, VirtualRegister reg) { phantomLocalDirect(reg); };664 auto addFlushDirect = [&] (InlineCallFrame*, Operand operand) { flushDirect(operand); }; 665 auto addPhantomLocalDirect = [&] (InlineCallFrame*, Operand operand) { phantomLocalDirect(operand); }; 637 666 flushForTerminalImpl(currentCodeOrigin(), addFlushDirect, addPhantomLocalDirect); 638 667 } … … 761 790 Edge(child1), Edge(child2), Edge(child3)); 762 791 return addToGraph(result); 792 } 793 Node* addToGraph(NodeType op, Operand operand, Node* child1) 794 { 795 ASSERT(op == MovHint); 796 return addToGraph(op, OpInfo(operand.kind()), OpInfo(operand.value()), child1); 763 797 } 764 798 Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Edge child1, Edge child2 = Edge(), Edge child3 = Edge()) … … 1092 1126 // The number of arguments passed to the function. 1093 1127 unsigned m_numArguments; 1094 // The number of locals (vars + temporaries) used inthe function.1128 // The number of locals (vars + temporaries) used by the bytecode for the function. 1095 1129 unsigned m_numLocals; 1130 // The max number of temps used for forwarding data to an OSR exit checkpoint. 1131 unsigned m_numTmps; 1096 1132 // The number of slots (in units of sizeof(Register)) that we need to 1097 1133 // preallocate for arguments to outgoing calls from this frame. This … … 1160 1196 ~InlineStackEntry(); 1161 1197 1162 VirtualRegister remapOperand(VirtualRegisteroperand) const1198 Operand remapOperand(Operand operand) const 1163 1199 { 1164 1200 if (!m_inlineCallFrame) 1165 1201 return operand; 1202 1203 if (operand.isTmp()) 1204 return Operand::tmp(operand.value() + m_inlineCallFrame->tmpOffset); 1166 1205 1167 ASSERT(!operand. isConstant());1168 1169 return VirtualRegister(operand.offset() + m_inlineCallFrame->stackOffset);1206 ASSERT(!operand.virtualRegister().isConstant()); 1207 1208 return operand.virtualRegister() + m_inlineCallFrame->stackOffset; 1170 1209 } 1171 1210 }; … … 1176 1215 1177 1216 struct DelayedSetLocal { 1178 CodeOrigin m_origin;1179 VirtualRegister m_operand;1180 Node* m_value;1181 SetMode m_setMode;1182 1183 1217 DelayedSetLocal() { } 1184 DelayedSetLocal(const CodeOrigin& origin, VirtualRegisteroperand, Node* value, SetMode setMode)1218 DelayedSetLocal(const CodeOrigin& origin, Operand operand, Node* value, SetMode setMode) 1185 1219 : m_origin(origin) 1186 1220 , m_operand(operand) … … 1195 1229 if (m_operand.isArgument()) 1196 1230 return parser->setArgument(m_origin, m_operand, m_value, m_setMode); 1197 return parser->setLocal(m_origin, m_operand, m_value, m_setMode); 1198 } 1231 return parser->setLocalOrTmp(m_origin, m_operand, m_value, m_setMode); 1232 } 1233 1234 CodeOrigin m_origin; 1235 Operand m_operand; 1236 Node* m_value { nullptr }; 1237 SetMode m_setMode; 1199 1238 }; 1200 1239 … … 1209 1248 { 1210 1249 ASSERT(bytecodeIndex); 1211 Ref<BasicBlock> block = adoptRef(*new BasicBlock(bytecodeIndex, m_numArguments, m_numLocals, 1));1250 Ref<BasicBlock> block = adoptRef(*new BasicBlock(bytecodeIndex, m_numArguments, m_numLocals, m_numTmps, 1)); 1212 1251 BasicBlock* blockPtr = block.ptr(); 1213 1252 // m_blockLinkingTargets must always be sorted in increasing order of bytecodeBegin … … 1221 1260 BasicBlock* ByteCodeParser::allocateUntargetableBlock() 1222 1261 { 1223 Ref<BasicBlock> block = adoptRef(*new BasicBlock(BytecodeIndex(), m_numArguments, m_numLocals, 1));1262 Ref<BasicBlock> block = adoptRef(*new BasicBlock(BytecodeIndex(), m_numArguments, m_numLocals, m_numTmps, 1)); 1224 1263 BasicBlock* blockPtr = block.ptr(); 1225 1264 m_graph.appendBlock(WTFMove(block)); … … 1424 1463 continue; 1425 1464 // If the target InlineCallFrame is Varargs, we do not know how many arguments are actually filled by LoadVarargs. Varargs InlineCallFrame's 1426 // argumentCountIncludingThis is maximum number of potentially filled arguments by LoadVarargs. We "continue" to the upper frame which may be1465 // argumentCountIncludingThis is maximum number of potentially filled arguments by xkLoadVarargs. We "continue" to the upper frame which may be 1427 1466 // a good target to jump into. 1428 1467 if (callFrame->isVarargs()) … … 1450 1489 if (stackEntry->m_inlineCallFrame) { 1451 1490 if (stackEntry->m_inlineCallFrame->isClosureCall) 1452 setDirect( stackEntry->remapOperand(VirtualRegister(CallFrameSlot::callee)), callTargetNode, NormalSet);1491 setDirect(remapOperand(stackEntry->m_inlineCallFrame, CallFrameSlot::callee), callTargetNode, NormalSet); 1453 1492 } else 1454 1493 addToGraph(SetCallee, callTargetNode); … … 1615 1654 int registerOffsetAfterFixup = registerOffset - numberOfStackPaddingSlots; 1616 1655 1617 int inlineCallFrameStart = m_inlineStackTop->remapOperand(VirtualRegister(registerOffsetAfterFixup)).offset() + CallFrame::headerSizeInRegisters;1656 Operand inlineCallFrameStart = VirtualRegister(m_inlineStackTop->remapOperand(VirtualRegister(registerOffsetAfterFixup)).value() + CallFrame::headerSizeInRegisters); 1618 1657 1619 1658 ensureLocals( 1620 VirtualRegister(inlineCallFrameStart).toLocal() + 1 +1659 inlineCallFrameStart.toLocal() + 1 + 1621 1660 CallFrame::headerSizeInRegisters + codeBlock->numCalleeLocals()); 1622 1661 1662 ensureTmps((m_inlineStackTop->m_inlineCallFrame ? m_inlineStackTop->m_inlineCallFrame->tmpOffset : 0) + m_inlineStackTop->m_codeBlock->numTmps() + codeBlock->numTmps()); 1663 1623 1664 size_t argumentPositionStart = m_graph.m_argumentPositions.size(); 1624 1665 1625 1666 if (result.isValid()) 1626 result = m_inlineStackTop->remapOperand(result) ;1667 result = m_inlineStackTop->remapOperand(result).virtualRegister(); 1627 1668 1628 1669 VariableAccessData* calleeVariable = nullptr; … … 1637 1678 InlineStackEntry* callerStackTop = m_inlineStackTop; 1638 1679 InlineStackEntry inlineStackEntry(this, codeBlock, codeBlock, callee.function(), result, 1639 (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind, continuationBlock);1680 inlineCallFrameStart.virtualRegister(), argumentCountIncludingThis, kind, continuationBlock); 1640 1681 1641 1682 // This is where the actual inlining really happens. … … 1672 1713 // callee make it so that if we exit at <HERE>, we can recover loc9 and loc10. 1673 1714 for (int index = 0; index < argumentCountIncludingThis; ++index) { 1674 VirtualRegisterargumentToGet = callerStackTop->remapOperand(virtualRegisterForArgument(index, registerOffset));1715 Operand argumentToGet = callerStackTop->remapOperand(virtualRegisterForArgument(index, registerOffset)); 1675 1716 Node* value = getDirect(argumentToGet); 1676 addToGraph(MovHint, OpInfo(argumentToGet .offset()), value);1717 addToGraph(MovHint, OpInfo(argumentToGet), value); 1677 1718 m_setLocalQueue.append(DelayedSetLocal { currentCodeOrigin(), argumentToGet, value, ImmediateNakedSet }); 1678 1719 } … … 1718 1759 if (registerOffsetAfterFixup != registerOffset) { 1719 1760 for (int index = 0; index < argumentCountIncludingThis; ++index) { 1720 VirtualRegisterargumentToGet = callerStackTop->remapOperand(virtualRegisterForArgument(index, registerOffset));1761 Operand argumentToGet = callerStackTop->remapOperand(virtualRegisterForArgument(index, registerOffset)); 1721 1762 Node* value = getDirect(argumentToGet); 1722 VirtualRegisterargumentToSet = m_inlineStackTop->remapOperand(virtualRegisterForArgument(index));1723 addToGraph(MovHint, OpInfo(argumentToSet .offset()), value);1763 Operand argumentToSet = m_inlineStackTop->remapOperand(virtualRegisterForArgument(index)); 1764 addToGraph(MovHint, OpInfo(argumentToSet), value); 1724 1765 m_setLocalQueue.append(DelayedSetLocal { currentCodeOrigin(), argumentToSet, value, ImmediateNakedSet }); 1725 1766 } 1726 1767 } 1727 1768 for (int index = 0; index < arityFixupCount; ++index) { 1728 VirtualRegisterargumentToSet = m_inlineStackTop->remapOperand(virtualRegisterForArgument(argumentCountIncludingThis + index));1729 addToGraph(MovHint, OpInfo(argumentToSet .offset()), undefined);1769 Operand argumentToSet = m_inlineStackTop->remapOperand(virtualRegisterForArgument(argumentCountIncludingThis + index)); 1770 addToGraph(MovHint, OpInfo(argumentToSet), undefined); 1730 1771 m_setLocalQueue.append(DelayedSetLocal { currentCodeOrigin(), argumentToSet, undefined, ImmediateNakedSet }); 1731 1772 } … … 1894 1935 1895 1936 int remappedRegisterOffset = 1896 m_inlineStackTop->remapOperand(VirtualRegister(registerOffset)). offset();1937 m_inlineStackTop->remapOperand(VirtualRegister(registerOffset)).virtualRegister().offset(); 1897 1938 1898 1939 ensureLocals(VirtualRegister(remappedRegisterOffset).toLocal()); 1899 1940 1900 1941 int argumentStart = registerOffset + CallFrame::headerSizeInRegisters; 1901 int remappedArgumentStart = m_inlineStackTop->remapOperand(VirtualRegister(argumentStart)). offset();1942 int remappedArgumentStart = m_inlineStackTop->remapOperand(VirtualRegister(argumentStart)).virtualRegister().offset(); 1902 1943 1903 1944 LoadVarargsData* data = m_graph.m_loadVarargsData.add(); … … 1907 1948 data->limit = maxArgumentCountIncludingThis; 1908 1949 data->mandatoryMinimum = mandatoryMinimum; 1909 1910 if (callOp == TailCallForwardVarargs) 1911 addToGraph(ForwardVarargs, OpInfo(data)); 1912 else 1913 addToGraph(LoadVarargs, OpInfo(data), get(argumentsArgument)); 1950 1951 if (callOp == TailCallForwardVarargs) { 1952 Node* argumentCount; 1953 if (!inlineCallFrame()) 1954 argumentCount = addToGraph(GetArgumentCountIncludingThis); 1955 else if (inlineCallFrame()->isVarargs()) 1956 argumentCount = getDirect(remapOperand(inlineCallFrame(), CallFrameSlot::argumentCountIncludingThis)); 1957 else 1958 argumentCount = addToGraph(JSConstant, OpInfo(m_graph.freeze(jsNumber(inlineCallFrame()->argumentCountIncludingThis)))); 1959 addToGraph(ForwardVarargs, OpInfo(data), argumentCount); 1960 } else { 1961 Node* arguments = get(argumentsArgument); 1962 auto argCountTmp = m_inlineStackTop->remapOperand(Operand::tmp(OpCallVarargs::argCountIncludingThis)); 1963 setDirect(argCountTmp, addToGraph(VarargsLength, OpInfo(data), arguments)); 1964 progressToNextCheckpoint(); 1965 1966 addToGraph(LoadVarargs, OpInfo(data), getLocalOrTmp(argCountTmp), arguments); 1967 } 1914 1968 1915 1969 // LoadVarargs may OSR exit. Hence, we need to keep alive callTargetNode, thisArgument … … 1923 1977 // before SSA. 1924 1978 1925 VariableAccessData* countVariable = newVariableAccessData( VirtualRegister(remappedRegisterOffset + CallFrameSlot::argumentCountIncludingThis));1979 VariableAccessData* countVariable = newVariableAccessData(data->count); 1926 1980 // This is pretty lame, but it will force the count to be flushed as an int. This doesn't 1927 1981 // matter very much, since our use of a SetArgumentDefinitely and Flushes for this local slot is … … 1930 1984 countVariable->mergeIsProfitableToUnbox(true); 1931 1985 Node* setArgumentCount = addToGraph(SetArgumentDefinitely, OpInfo(countVariable)); 1932 m_currentBlock->variablesAtTail.setOperand(countVariable-> local(), setArgumentCount);1986 m_currentBlock->variablesAtTail.setOperand(countVariable->operand(), setArgumentCount); 1933 1987 1934 1988 set(VirtualRegister(argumentStart), get(thisArgument), ImmediateNakedSet); … … 1954 2008 1955 2009 Node* setArgument = addToGraph(numSetArguments >= mandatoryMinimum ? SetArgumentMaybe : SetArgumentDefinitely, OpInfo(variable)); 1956 m_currentBlock->variablesAtTail.setOperand(variable-> local(), setArgument);2010 m_currentBlock->variablesAtTail.setOperand(variable->operand(), setArgument); 1957 2011 ++numSetArguments; 1958 2012 } … … 2054 2108 VERBOSE_LOG("Register offset: ", registerOffset); 2055 2109 VirtualRegister calleeReg(registerOffset + CallFrameSlot::callee); 2056 calleeReg = m_inlineStackTop->remapOperand(calleeReg) ;2110 calleeReg = m_inlineStackTop->remapOperand(calleeReg).virtualRegister(); 2057 2111 VERBOSE_LOG("Callee is going to be ", calleeReg, "\n"); 2058 2112 setDirect(calleeReg, callTargetNode, ImmediateSetWithFlush); … … 5103 5157 auto bytecode = currentInstruction->as<OpNewRegexp>(); 5104 5158 ASSERT(bytecode.m_regexp.isConstant()); 5105 FrozenValue* frozenRegExp = m_graph.freezeStrong(m_inlineStackTop->m_codeBlock->getConstant(bytecode.m_regexp .offset()));5159 FrozenValue* frozenRegExp = m_graph.freezeStrong(m_inlineStackTop->m_codeBlock->getConstant(bytecode.m_regexp)); 5106 5160 set(bytecode.m_dst, addToGraph(NewRegexp, OpInfo(frozenRegExp), jsConstant(jsNumber(0)))); 5107 5161 NEXT_OPCODE(op_new_regexp); … … 6179 6233 RELEASE_ASSERT(!m_currentBlock->size() || (m_graph.compilation() && m_currentBlock->size() == 1 && m_currentBlock->at(0)->op() == CountExecution)); 6180 6234 6181 ValueProfileAnd OperandBuffer* buffer = bytecode.metadata(codeBlock).m_buffer;6235 ValueProfileAndVirtualRegisterBuffer* buffer = bytecode.metadata(codeBlock).m_buffer; 6182 6236 6183 6237 if (!buffer) { … … 6196 6250 ConcurrentJSLocker locker(m_inlineStackTop->m_profiledBlock->m_lock); 6197 6251 6198 buffer->forEach([&] (ValueProfileAnd Operand& profile) {6252 buffer->forEach([&] (ValueProfileAndVirtualRegister& profile) { 6199 6253 VirtualRegister operand(profile.m_operand); 6200 6254 SpeculatedType prediction = profile.computeUpdatedPrediction(locker); … … 6224 6278 6225 6279 unsigned numberOfLocals = 0; 6226 buffer->forEach([&] (ValueProfileAnd Operand& profile) {6280 buffer->forEach([&] (ValueProfileAndVirtualRegister& profile) { 6227 6281 VirtualRegister operand(profile.m_operand); 6228 6282 if (operand.isArgument()) … … 6231 6285 Node* value = addToGraph(ExtractCatchLocal, OpInfo(numberOfLocals), OpInfo(localPredictions[numberOfLocals])); 6232 6286 ++numberOfLocals; 6233 addToGraph(MovHint, OpInfo( profile.m_operand), value);6287 addToGraph(MovHint, OpInfo(operand), value); 6234 6288 localsToSet.uncheckedAppend(std::make_pair(operand, value)); 6235 6289 }); … … 6359 6413 case op_jneq_ptr: { 6360 6414 auto bytecode = currentInstruction->as<OpJneqPtr>(); 6361 FrozenValue* frozenPointer = m_graph.freezeStrong(m_inlineStackTop->m_codeBlock->getConstant(bytecode.m_specialPointer .offset()));6415 FrozenValue* frozenPointer = m_graph.freezeStrong(m_inlineStackTop->m_codeBlock->getConstant(bytecode.m_specialPointer)); 6362 6416 unsigned relativeOffset = jumpTarget(bytecode.m_targetLabel); 6363 6417 Node* child = get(bytecode.m_value); … … 6827 6881 auto bytecode = currentInstruction->as<OpCreateLexicalEnvironment>(); 6828 6882 ASSERT(bytecode.m_symbolTable.isConstant() && bytecode.m_initialValue.isConstant()); 6829 FrozenValue* symbolTable = m_graph.freezeStrong(m_inlineStackTop->m_codeBlock->getConstant(bytecode.m_symbolTable .offset()));6830 FrozenValue* initialValue = m_graph.freezeStrong(m_inlineStackTop->m_codeBlock->getConstant(bytecode.m_initialValue .offset()));6883 FrozenValue* symbolTable = m_graph.freezeStrong(m_inlineStackTop->m_codeBlock->getConstant(bytecode.m_symbolTable)); 6884 FrozenValue* initialValue = m_graph.freezeStrong(m_inlineStackTop->m_codeBlock->getConstant(bytecode.m_initialValue)); 6831 6885 Node* scope = get(bytecode.m_scope); 6832 6886 Node* lexicalEnvironment = addToGraph(CreateActivation, OpInfo(symbolTable), OpInfo(initialValue), scope); … … 6858 6912 // bytecode-level liveness of the scope register. 6859 6913 auto bytecode = currentInstruction->as<OpGetScope>(); 6860 Node* callee = get( VirtualRegister(CallFrameSlot::callee));6914 Node* callee = get(CallFrameSlot::callee); 6861 6915 Node* result; 6862 6916 if (JSFunction* function = callee->dynamicCastConstant<JSFunction*>(*m_vm)) … … 7283 7337 // plan finishes. 7284 7338 m_inlineCallFrame->baselineCodeBlock.setWithoutWriteBarrier(codeBlock->baselineVersion()); 7339 m_inlineCallFrame->setTmpOffset((m_caller->m_inlineCallFrame ? m_caller->m_inlineCallFrame->tmpOffset : 0) + m_caller->m_codeBlock->numTmps()); 7285 7340 m_inlineCallFrame->setStackOffset(inlineCallFrameStart.offset() - CallFrame::headerSizeInRegisters); 7286 7341 m_inlineCallFrame->argumentCountIncludingThis = argumentCountIncludingThis; 7342 RELEASE_ASSERT(m_inlineCallFrame->argumentCountIncludingThis == argumentCountIncludingThis); 7287 7343 if (callee) { 7288 7344 m_inlineCallFrame->calleeRecovery = ValueRecovery::constant(callee); … … 7646 7702 7647 7703 if (node->hasVariableAccessData(m_graph)) 7648 mapping.operand(node-> local()) = node->variableAccessData();7704 mapping.operand(node->operand()) = node->variableAccessData(); 7649 7705 7650 7706 if (node->op() != ForceOSRExit) … … 7666 7722 } 7667 7723 7668 auto insertLivenessPreservingOp = [&] (InlineCallFrame* inlineCallFrame, NodeType op, VirtualRegisteroperand) {7724 auto insertLivenessPreservingOp = [&] (InlineCallFrame* inlineCallFrame, NodeType op, Operand operand) { 7669 7725 VariableAccessData* variable = mapping.operand(operand); 7670 7726 if (!variable) { … … 7673 7729 } 7674 7730 7675 VirtualRegister argument = operand - (inlineCallFrame ? inlineCallFrame->stackOffset : 0);7731 Operand argument = unmapOperand(inlineCallFrame, operand); 7676 7732 if (argument.isArgument() && !argument.isHeader()) { 7677 7733 const Vector<ArgumentPosition*>& arguments = m_inlineCallFrameToArgumentPositions.get(inlineCallFrame); … … 7680 7736 insertionSet.insertNode(nodeIndex, SpecNone, op, origin, OpInfo(variable)); 7681 7737 }; 7682 auto addFlushDirect = [&] (InlineCallFrame* inlineCallFrame, VirtualRegisteroperand) {7738 auto addFlushDirect = [&] (InlineCallFrame* inlineCallFrame, Operand operand) { 7683 7739 insertLivenessPreservingOp(inlineCallFrame, Flush, operand); 7684 7740 }; 7685 auto addPhantomLocalDirect = [&] (InlineCallFrame* inlineCallFrame, VirtualRegisteroperand) {7741 auto addPhantomLocalDirect = [&] (InlineCallFrame* inlineCallFrame, Operand operand) { 7686 7742 insertLivenessPreservingOp(inlineCallFrame, PhantomLocal, operand); 7687 7743 }; … … 7748 7804 } 7749 7805 7806 m_graph.m_tmps = m_numTmps; 7750 7807 m_graph.m_localVars = m_numLocals; 7751 7808 m_graph.m_parameterSlots = m_parameterSlots;
Note:
See TracChangeset
for help on using the changeset viewer.