Changeset 193640 in webkit for trunk/Source/JavaScriptCore/ftl/FTLLowerDFGToLLVM.cpp
- Timestamp:
- Dec 7, 2015, 11:17:56 AM (10 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ftl/FTLLowerDFGToLLVM.cpp
r193606 r193640 31 31 #include "AirGenerationContext.h" 32 32 #include "AllowMacroScratchRegisterUsage.h" 33 #include "B3StackmapGenerationParams.h" 33 34 #include "CodeBlockWithJITType.h" 34 35 #include "DFGAbstractInterpreterInlines.h" … … 4829 4830 void compileCallOrConstruct() 4830 4831 { 4832 Node* node = m_node; 4833 unsigned numArgs = node->numChildren() - 1; 4834 4835 LValue jsCallee = lowJSValue(m_graph.varArgChild(node, 0)); 4836 4831 4837 #if FTL_USES_B3 4832 if (verboseCompilationEnabled() || !verboseCompilationEnabled()) 4833 CRASH(); 4838 unsigned frameSize = JSStack::CallFrameHeaderSize + numArgs; 4839 unsigned alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize); 4840 4841 // JS->JS calling convention requires that the caller allows this much space on top of stack to 4842 // get trashed by the callee, even if not all of that space is used to pass arguments. We tell 4843 // B3 this explicitly for two reasons: 4844 // 4845 // - We will only pass frameSize worth of stuff. 4846 // - The trashed stack guarantee is logically separate from the act of passing arguments, so we 4847 // shouldn't rely on Air to infer the trashed stack property based on the arguments it ends 4848 // up seeing. 4849 m_proc.requestCallArgAreaSize(alignedFrameSize); 4850 4851 // Collect the arguments, since this can generate code and we want to generate it before we emit 4852 // the call. 4853 Vector<ConstrainedValue> arguments; 4854 4855 // Make sure that the callee goes into GPR0 because that's where the slow path thunks expect the 4856 // callee to be. 4857 arguments.append(ConstrainedValue(jsCallee, ValueRep::reg(GPRInfo::regT0))); 4858 4859 auto addArgument = [&] (LValue value, VirtualRegister reg, int offset) { 4860 intptr_t offsetFromSP = 4861 (reg.offset() - JSStack::CallerFrameAndPCSize) * sizeof(EncodedJSValue) + offset; 4862 arguments.append(ConstrainedValue(value, ValueRep::stackArgument(offsetFromSP))); 4863 }; 4864 4865 addArgument(jsCallee, VirtualRegister(JSStack::Callee), 0); 4866 addArgument(m_out.constInt32(numArgs), VirtualRegister(JSStack::ArgumentCount), PayloadOffset); 4867 for (unsigned i = 0; i < numArgs; ++i) 4868 addArgument(lowJSValue(m_graph.varArgChild(node, 1 + i)), virtualRegisterForArgument(i), 0); 4869 4870 PatchpointValue* patchpoint = m_out.patchpoint(Int64); 4871 patchpoint->appendVector(arguments); 4872 patchpoint->clobber(RegisterSet::macroScratchRegisters()); 4873 patchpoint->clobberLate(RegisterSet::volatileRegistersForJSCall()); 4874 patchpoint->resultConstraint = ValueRep::reg(GPRInfo::returnValueGPR); 4875 4876 CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite(); 4877 State* state = &m_ftlState; 4878 patchpoint->setGenerator( 4879 [=] (CCallHelpers& jit, const StackmapGenerationParams& params) { 4880 AllowMacroScratchRegisterUsage allowScratch(jit); 4881 CallSiteIndex callSiteIndex = state->jitCode->common.addUniqueCallSiteIndex(codeOrigin); 4882 4883 // FIXME: If we were handling exceptions, then at this point we would ask our descriptor 4884 // to prepare and then we would modify the OSRExit data structure inside the 4885 // OSRExitHandle to link it up to this call. 4886 // https://bugs.webkit.org/show_bug.cgi?id=151686 4887 4888 jit.store32( 4889 CCallHelpers::TrustedImm32(callSiteIndex.bits()), 4890 CCallHelpers::tagFor(VirtualRegister(JSStack::ArgumentCount))); 4891 4892 CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo(); 4893 4894 CCallHelpers::DataLabelPtr targetToCheck; 4895 CCallHelpers::Jump slowPath = jit.branchPtrWithPatch( 4896 CCallHelpers::NotEqual, GPRInfo::regT0, targetToCheck, 4897 CCallHelpers::TrustedImmPtr(0)); 4898 4899 CCallHelpers::Call fastCall = jit.nearCall(); 4900 CCallHelpers::Jump done = jit.jump(); 4901 4902 slowPath.link(&jit); 4903 4904 jit.move(CCallHelpers::TrustedImmPtr(callLinkInfo), GPRInfo::regT2); 4905 CCallHelpers::Call slowCall = jit.nearCall(); 4906 done.link(&jit); 4907 4908 callLinkInfo->setUpCall( 4909 node->op() == Construct ? CallLinkInfo::Construct : CallLinkInfo::Call, 4910 node->origin.semantic, GPRInfo::regT0); 4911 4912 jit.addPtr( 4913 CCallHelpers::TrustedImm32(-params.proc().frameSize()), 4914 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister); 4915 4916 jit.addLinkTask( 4917 [=] (LinkBuffer& linkBuffer) { 4918 MacroAssemblerCodePtr linkCall = 4919 linkBuffer.vm().getCTIStub(linkCallThunkGenerator).code(); 4920 linkBuffer.link(slowCall, FunctionPtr(linkCall.executableAddress())); 4921 4922 callLinkInfo->setCallLocations( 4923 linkBuffer.locationOfNearCall(slowCall), 4924 linkBuffer.locationOf(targetToCheck), 4925 linkBuffer.locationOfNearCall(fastCall)); 4926 }); 4927 }); 4928 4929 setJSValue(patchpoint); 4834 4930 #else 4835 int numArgs = m_node->numChildren() - 1;4836 4837 LValue jsCallee = lowJSValue(m_graph.varArgChild(m_node, 0));4838 4839 4931 unsigned stackmapID = m_stackmapIDs++; 4840 4932 … … 4849 4941 arguments.append(jsCallee); // callee -> stack 4850 4942 arguments.append(m_out.constInt64(numArgs)); // argument count and zeros for the tag 4851 for ( inti = 0; i < numArgs; ++i)4852 arguments.append(lowJSValue(m_graph.varArgChild( m_node, 1 + i)));4943 for (unsigned i = 0; i < numArgs; ++i) 4944 arguments.append(lowJSValue(m_graph.varArgChild(node, 1 + i))); 4853 4945 for (unsigned i = 0; i < padding; ++i) 4854 4946 arguments.append(getUndef(m_out.int64)); … … 4864 4956 setInstructionCallingConvention(call, LLVMWebKitJSCallConv); 4865 4957 4866 m_ftlState.jsCalls.append(JSCall(stackmapID, m_node, codeOriginDescriptionOfCallSite()));4958 m_ftlState.jsCalls.append(JSCall(stackmapID, node, codeOriginDescriptionOfCallSite())); 4867 4959 4868 4960 setJSValue(call); … … 5323 5415 void compileInvalidationPoint() 5324 5416 { 5325 #if FTL_USES_B35326 UNREACHABLE_FOR_PLATFORM();5327 #else // FTL_USES_B35328 5417 if (verboseCompilationEnabled()) 5329 5418 dataLog(" Invalidation point with availability: ", availabilityMap(), "\n"); … … 5331 5420 DFG_ASSERT(m_graph, m_node, m_origin.exitOK); 5332 5421 5422 #if FTL_USES_B3 5423 B3::PatchpointValue* patchpoint = m_out.patchpoint(Void); 5424 OSRExitDescriptor* descriptor = appendOSRExitDescriptor(noValue(), nullptr); 5425 NodeOrigin origin = m_origin; 5426 patchpoint->appendColdAnys(buildExitArguments(descriptor, origin.forExit, noValue())); 5427 5428 State* state = &m_ftlState; 5429 5430 patchpoint->setGenerator( 5431 [=] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) { 5432 // The MacroAssembler knows more about this than B3 does. The watchpointLabel() method 5433 // will ensure that this is followed by a nop shadow but only when this is actually 5434 // necessary. 5435 CCallHelpers::Label label = jit.watchpointLabel(); 5436 5437 RefPtr<OSRExitHandle> handle = descriptor->emitOSRExitLater( 5438 *state, UncountableInvalidation, origin, params); 5439 5440 RefPtr<JITCode> jitCode = state->jitCode.get(); 5441 5442 jit.addLinkTask( 5443 [=] (LinkBuffer& linkBuffer) { 5444 JumpReplacement jumpReplacement( 5445 linkBuffer.locationOf(label), 5446 linkBuffer.locationOf(handle->label)); 5447 jitCode->common.jumpReplacements.append(jumpReplacement); 5448 }); 5449 }); 5450 5451 // Set some obvious things. 5452 patchpoint->effects.terminal = false; 5453 patchpoint->effects.writesSSAState = false; 5454 patchpoint->effects.readsSSAState = false; 5455 5456 // This is how we tell B3 about the possibility of jump replacement. 5457 patchpoint->effects.exitsSideways = true; 5458 5459 // It's not possible for some prior branch to determine the safety of this operation. It's always 5460 // fine to execute this on some path that wouldn't have originally executed it before 5461 // optimization. 5462 patchpoint->effects.controlDependent = false; 5463 5464 // If this falls through then it won't write anything. 5465 patchpoint->effects.writes = HeapRange(); 5466 5467 // When this abruptly terminates, it could read any heap location. 5468 patchpoint->effects.reads = HeapRange::top(); 5469 #else // FTL_USES_B3 5333 5470 5334 5471 OSRExitDescriptor* exitDescriptor = appendOSRExitDescriptor(UncountableInvalidation, ExceptionType::None, noValue(), nullptr, m_origin); 5335 5472 5336 StackmapArgumentList arguments = buildExitArguments(exitDescriptor, m_ftlState.osrExitDescriptorImpls.last() , FormattedValue());5473 StackmapArgumentList arguments = buildExitArguments(exitDescriptor, m_ftlState.osrExitDescriptorImpls.last().m_codeOrigin, FormattedValue()); 5337 5474 callStackmap(exitDescriptor, arguments); 5338 5475 … … 7902 8039 [=] (CCallHelpers& jit, const StackmapGenerationParams& params) { 7903 8040 Vector<Location> locations; 7904 for (const B3::ValueRep& rep : params .reps)8041 for (const B3::ValueRep& rep : params) 7905 8042 locations.append(Location::forValueRep(rep)); 7906 8043 … … 7910 8047 CCallHelpers::Label done = jit.label(); 7911 8048 7912 RegisterSet usedRegisters = params.usedRegisters ;8049 RegisterSet usedRegisters = params.usedRegisters(); 7913 8050 7914 8051 // FIXME: As part of handling exceptions, we need to create a concrete OSRExit here. 7915 8052 // Doing so should automagically register late paths that emit exit thunks. 7916 7917 params.context->latePaths.append( 7918 createSharedTask<Air::GenerationContext::LatePathFunction>( 7919 [=] (CCallHelpers& jit, Air::GenerationContext&) { 7920 AllowMacroScratchRegisterUsage allowScratch(jit); 7921 patchableJump.m_jump.link(&jit); 7922 unsigned index = state->jitCode->lazySlowPaths.size(); 7923 state->jitCode->lazySlowPaths.append(nullptr); 7924 jit.pushToSaveImmediateWithoutTouchingRegisters( 7925 CCallHelpers::TrustedImm32(index)); 7926 CCallHelpers::Jump generatorJump = jit.jump(); 7927 7928 // Note that so long as we're here, we don't really know if our late path 7929 // runs before or after any other late paths that we might depend on, like 7930 // the exception thunk. 7931 7932 RefPtr<JITCode> jitCode = state->jitCode; 7933 VM* vm = &state->graph.m_vm; 7934 7935 jit.addLinkTask( 7936 [=] (LinkBuffer& linkBuffer) { 7937 linkBuffer.link( 7938 generatorJump, CodeLocationLabel( 7939 vm->getCTIStub( 7940 lazySlowPathGenerationThunkGenerator).code())); 8053 8054 params.addLatePath( 8055 [=] (CCallHelpers& jit) { 8056 AllowMacroScratchRegisterUsage allowScratch(jit); 8057 patchableJump.m_jump.link(&jit); 8058 unsigned index = state->jitCode->lazySlowPaths.size(); 8059 state->jitCode->lazySlowPaths.append(nullptr); 8060 jit.pushToSaveImmediateWithoutTouchingRegisters( 8061 CCallHelpers::TrustedImm32(index)); 8062 CCallHelpers::Jump generatorJump = jit.jump(); 8063 8064 // Note that so long as we're here, we don't really know if our late path 8065 // runs before or after any other late paths that we might depend on, like 8066 // the exception thunk. 8067 8068 RefPtr<JITCode> jitCode = state->jitCode; 8069 VM* vm = &state->graph.m_vm; 8070 8071 jit.addLinkTask( 8072 [=] (LinkBuffer& linkBuffer) { 8073 linkBuffer.link( 8074 generatorJump, CodeLocationLabel( 8075 vm->getCTIStub( 8076 lazySlowPathGenerationThunkGenerator).code())); 7941 8077 7942 7943 7944 7945 7946 7947 7948 7949 7950 7951 7952 8078 CodeLocationJump linkedPatchableJump = CodeLocationJump( 8079 linkBuffer.locationOf(patchableJump)); 8080 CodeLocationLabel linkedDone = linkBuffer.locationOf(done); 8081 8082 // FIXME: Need a story for exceptions in FTL-B3. That basically means 8083 // doing a lookup of the exception entrypoint here. We will have an 8084 // OSR exit data structure of some sort. 8085 // https://bugs.webkit.org/show_bug.cgi?id=151686 8086 CodeLocationLabel exceptionTarget; 8087 CallSiteIndex callSiteIndex = 8088 jitCode->common.addUniqueCallSiteIndex(origin); 7953 8089 7954 7955 7956 7957 8090 std::unique_ptr<LazySlowPath> lazySlowPath = 8091 std::make_unique<LazySlowPath>( 8092 linkedPatchableJump, linkedDone, exceptionTarget, 8093 usedRegisters, callSiteIndex, generator); 7958 8094 7959 7960 7961 }));8095 jitCode->lazySlowPaths[index] = WTF::move(lazySlowPath); 8096 }); 8097 }); 7962 8098 }); 7963 8099 return result; … … 9214 9350 } 9215 9351 9352 #if !FTL_USES_B3 9216 9353 void appendOSRExitArgumentsForPatchpointIfWillCatchException(StackmapArgumentList& arguments, ExceptionType exceptionType, unsigned offsetOfExitArguments) 9217 9354 { … … 9231 9368 9232 9369 StackmapArgumentList freshList = 9233 buildExitArguments(exitDescriptor, exitDescriptorImpl , noValue(), offsetOfExitArguments);9370 buildExitArguments(exitDescriptor, exitDescriptorImpl.m_codeOrigin, noValue(), offsetOfExitArguments); 9234 9371 arguments.appendVector(freshList); 9235 9372 } 9373 #endif // !FTL_USES_B3 9236 9374 9237 9375 bool emitBranchToOSRExitIfWillCatchException(LValue hadException) … … 9252 9390 } 9253 9391 9392 9393 #if FTL_USES_B3 9394 OSRExitDescriptor* appendOSRExitDescriptor(FormattedValue lowValue, Node* highValue) 9395 { 9396 return &m_ftlState.jitCode->osrExitDescriptors.alloc( 9397 lowValue.format(), m_graph.methodOfGettingAValueProfileFor(highValue), 9398 availabilityMap().m_locals.numberOfArguments(), 9399 availabilityMap().m_locals.numberOfLocals()); 9400 } 9401 #else // FTL_USES_B3 9254 9402 OSRExitDescriptor* appendOSRExitDescriptor(ExitKind kind, ExceptionType exceptionType, FormattedValue lowValue, Node* highValue, NodeOrigin origin) 9255 9403 { … … 9262 9410 return &result; 9263 9411 } 9412 #endif // FTL_USES_B3 9264 9413 9265 9414 void appendOSRExit( … … 9329 9478 void blessSpeculation(B3::CheckValue* value, ExitKind kind, FormattedValue lowValue, Node* highValue, NodeOrigin origin, bool isExceptionHandler = false) 9330 9479 { 9331 OSRExitDescriptor* exitDescriptor = appendOSRExitDescriptor( 9332 kind, isExceptionHandler ? ExceptionType::CCallException : ExceptionType::None, lowValue, 9333 highValue, origin); 9334 OSRExitDescriptorImpl* exitDescriptorImpl = &m_ftlState.osrExitDescriptorImpls.last(); 9480 OSRExitDescriptor* exitDescriptor = appendOSRExitDescriptor(lowValue, highValue); 9335 9481 9336 9482 unsigned offset = value->numChildren(); 9337 value->appendColdAnys(buildExitArguments(exitDescriptor, m_ftlState.osrExitDescriptorImpls.last(), lowValue));9483 value->appendColdAnys(buildExitArguments(exitDescriptor, origin.forExit, lowValue)); 9338 9484 9339 9485 State* state = &m_ftlState; 9340 9486 value->setGenerator( 9341 9487 [=] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) { 9342 exitDescriptor->emitOSRExit(*state, exitDescriptorImpl, jit, params, offset); 9488 exitDescriptor->emitOSRExit( 9489 *state, kind, origin, jit, params, offset, isExceptionHandler); 9343 9490 }); 9344 9491 } … … 9348 9495 void emitOSRExitCall(OSRExitDescriptor* exitDescriptor, FormattedValue lowValue) 9349 9496 { 9350 callStackmap(exitDescriptor, buildExitArguments(exitDescriptor, m_ftlState.osrExitDescriptorImpls.last() , lowValue));9497 callStackmap(exitDescriptor, buildExitArguments(exitDescriptor, m_ftlState.osrExitDescriptorImpls.last().m_codeOrigin, lowValue)); 9351 9498 } 9352 9499 #endif 9353 9500 9354 9501 StackmapArgumentList buildExitArguments( 9355 OSRExitDescriptor* exitDescriptor, OSRExitDescriptorImpl& exitDescriptorImpl, FormattedValue lowValue,9502 OSRExitDescriptor* exitDescriptor, CodeOrigin exitOrigin, FormattedValue lowValue, 9356 9503 unsigned offsetOfExitArgumentsInStackmapLocations = 0) 9357 9504 { 9358 9505 StackmapArgumentList result; 9359 9506 buildExitArguments( 9360 exitDescriptor, exit DescriptorImpl, result, lowValue, offsetOfExitArgumentsInStackmapLocations);9507 exitDescriptor, exitOrigin, result, lowValue, offsetOfExitArgumentsInStackmapLocations); 9361 9508 return result; 9362 9509 } 9363 9510 9364 9511 void buildExitArguments( 9365 OSRExitDescriptor* exitDescriptor, OSRExitDescriptorImpl& exitDescriptorImpl, StackmapArgumentList& arguments, FormattedValue lowValue,9512 OSRExitDescriptor* exitDescriptor, CodeOrigin exitOrigin, StackmapArgumentList& arguments, FormattedValue lowValue, 9366 9513 unsigned offsetOfExitArgumentsInStackmapLocations = 0) 9367 9514 { … … 9370 9517 9371 9518 AvailabilityMap availabilityMap = this->availabilityMap(); 9372 availabilityMap.pruneByLiveness(m_graph, exit DescriptorImpl.m_codeOrigin);9519 availabilityMap.pruneByLiveness(m_graph, exitOrigin); 9373 9520 9374 9521 HashMap<Node*, ExitTimeObjectMaterialization*> map; … … 9397 9544 DFG_ASSERT( 9398 9545 m_graph, m_node, 9399 (!(availability.isDead() && m_graph.isLiveInBytecode(VirtualRegister(operand), exit DescriptorImpl.m_codeOrigin))) || m_graph.m_plan.mode == FTLForOSREntryMode);9546 (!(availability.isDead() && m_graph.isLiveInBytecode(VirtualRegister(operand), exitOrigin))) || m_graph.m_plan.mode == FTLForOSREntryMode); 9400 9547 } 9401 9548 ExitValue exitValue = exitValueForAvailability(arguments, map, availability); … … 9476 9623 Node* node) 9477 9624 { 9625 // NOTE: In FTL->B3, we cannot generate code here, because m_output is positioned after the 9626 // stackmap value. Like all values, the stackmap value cannot use a child that is defined after 9627 // it. 9628 9478 9629 ASSERT(node->shouldGenerate()); 9479 9630 ASSERT(node->hasResult()); … … 9523 9674 9524 9675 value = m_booleanValues.get(node); 9676 #if FTL_USES_B3 9677 if (isValid(value)) 9678 return exitArgument(arguments, DataFormatBoolean, value.value()); 9679 #else // FTL_USES_B3 9525 9680 if (isValid(value)) { 9526 9681 LValue valueToPass = m_out.zeroExt(value.value(), m_out.int32); 9527 9682 return exitArgument(arguments, DataFormatBoolean, valueToPass); 9528 9683 } 9684 #endif // FTL_USES_B3 9529 9685 9530 9686 value = m_jsValueValues.get(node);
Note:
See TracChangeset
for help on using the changeset viewer.