Changeset 214571 in webkit for trunk/Source/JavaScriptCore/ftl
- Timestamp:
- Mar 29, 2017, 3:55:53 PM (8 years ago)
- Location:
- trunk/Source/JavaScriptCore/ftl
- Files:
-
- 11 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ftl/FTLCompile.cpp
r214531 r214571 142 142 }); 143 143 144 state.finalizer->b3CodeLinkBuffer = std::make_unique<LinkBuffer>( 145 vm, jit, codeBlock, JITCompilationCanFail); 144 state.finalizer->b3CodeLinkBuffer = std::make_unique<LinkBuffer>(jit, codeBlock, JITCompilationCanFail); 146 145 if (state.finalizer->b3CodeLinkBuffer->didFailToAllocate()) { 147 146 state.allocationFailed = true; -
trunk/Source/JavaScriptCore/ftl/FTLLazySlowPath.cpp
r214531 r214571 56 56 RELEASE_ASSERT(!m_stub); 57 57 58 VM& vm = *codeBlock->vm();59 60 58 CCallHelpers jit(codeBlock); 61 59 GenerationParams params; … … 66 64 m_generator->run(jit, params); 67 65 68 LinkBuffer linkBuffer( vm,jit, codeBlock, JITCompilationMustSucceed);66 LinkBuffer linkBuffer(jit, codeBlock, JITCompilationMustSucceed); 69 67 linkBuffer.link(params.doneJumps, m_done); 70 68 if (m_exceptionTarget) -
trunk/Source/JavaScriptCore/ftl/FTLLink.cpp
r214531 r214571 163 163 mainPathJumps.append(jit.jump()); 164 164 165 linkBuffer = std::make_unique<LinkBuffer>( vm,jit, codeBlock, JITCompilationCanFail);165 linkBuffer = std::make_unique<LinkBuffer>(jit, codeBlock, JITCompilationCanFail); 166 166 if (linkBuffer->didFailToAllocate()) { 167 167 state.allocationFailed = true; … … 187 187 CCallHelpers::Jump mainPathJump = jit.jump(); 188 188 189 linkBuffer = std::make_unique<LinkBuffer>( vm,jit, codeBlock, JITCompilationCanFail);189 linkBuffer = std::make_unique<LinkBuffer>(jit, codeBlock, JITCompilationCanFail); 190 190 if (linkBuffer->didFailToAllocate()) { 191 191 state.allocationFailed = true; -
trunk/Source/JavaScriptCore/ftl/FTLLowerDFGToB3.cpp
r214531 r214571 5982 5982 CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite(); 5983 5983 State* state = &m_ftlState; 5984 VM* vm = &this->vm(); 5984 5985 patchpoint->setGenerator( 5985 5986 [=] (CCallHelpers& jit, const StackmapGenerationParams& params) { … … 6020 6021 [=] (LinkBuffer& linkBuffer) { 6021 6022 MacroAssemblerCodePtr linkCall = 6022 linkBuffer.vm().getCTIStub(linkCallThunkGenerator).code();6023 vm->getCTIStub(linkCallThunkGenerator).code(); 6023 6024 linkBuffer.link(slowCall, FunctionPtr(linkCall.executableAddress())); 6024 6025 … … 6279 6280 CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite(); 6280 6281 State* state = &m_ftlState; 6282 VM* vm = &this->vm(); 6281 6283 patchpoint->setGenerator( 6282 6284 [=] (CCallHelpers& jit, const StackmapGenerationParams& params) { … … 6330 6332 [=] (LinkBuffer& linkBuffer) { 6331 6333 MacroAssemblerCodePtr linkCall = 6332 linkBuffer.vm().getCTIStub(linkCallThunkGenerator).code();6334 vm->getCTIStub(linkCallThunkGenerator).code(); 6333 6335 linkBuffer.link(slowCall, FunctionPtr(linkCall.executableAddress())); 6334 6336 … … 6404 6406 CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite(); 6405 6407 State* state = &m_ftlState; 6408 VM* vm = &this->vm(); 6406 6409 patchpoint->setGenerator( 6407 6410 [=] (CCallHelpers& jit, const StackmapGenerationParams& params) { … … 6457 6460 jit.move(CCallHelpers::TrustedImmPtr(callee), GPRInfo::nonPreservedNonArgumentGPR); 6458 6461 jit.call(GPRInfo::nonPreservedNonArgumentGPR); 6459 exceptions->append(jit.emitExceptionCheck( state->vm(), AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));6462 exceptions->append(jit.emitExceptionCheck(*vm, AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth)); 6460 6463 }; 6461 6464 … … 6583 6586 [=] (LinkBuffer& linkBuffer) { 6584 6587 MacroAssemblerCodePtr linkCall = 6585 linkBuffer.vm().getCTIStub(linkCallThunkGenerator).code();6588 vm->getCTIStub(linkCallThunkGenerator).code(); 6586 6589 linkBuffer.link(slowCall, FunctionPtr(linkCall.executableAddress())); 6587 6590 … … 6673 6676 CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite(); 6674 6677 State* state = &m_ftlState; 6678 VM* vm = &this->vm(); 6675 6679 patchpoint->setGenerator( 6676 6680 [=] (CCallHelpers& jit, const StackmapGenerationParams& params) { … … 6764 6768 jit.move(CCallHelpers::TrustedImmPtr(callee), GPRInfo::nonPreservedNonArgumentGPR); 6765 6769 jit.call(GPRInfo::nonPreservedNonArgumentGPR); 6766 exceptions->append(jit.emitExceptionCheck( state->vm(), AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));6770 exceptions->append(jit.emitExceptionCheck(*vm, AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth)); 6767 6771 }; 6768 6772 … … 6780 6784 6781 6785 // emitSetupVarargsFrameFastCase modifies the stack pointer if it succeeds. 6782 emitSetupVarargsFrameFastCase( state->vm(), jit, scratchGPR2, scratchGPR1, scratchGPR2, scratchGPR3, inlineCallFrame, data->firstVarArgOffset, slowCase);6786 emitSetupVarargsFrameFastCase(*vm, jit, scratchGPR2, scratchGPR1, scratchGPR2, scratchGPR3, inlineCallFrame, data->firstVarArgOffset, slowCase); 6783 6787 6784 6788 CCallHelpers::Jump done = jit.jump(); … … 6862 6866 [=] (LinkBuffer& linkBuffer) { 6863 6867 MacroAssemblerCodePtr linkCall = 6864 linkBuffer.vm().getCTIStub(linkCallThunkGenerator).code();6868 vm->getCTIStub(linkCallThunkGenerator).code(); 6865 6869 linkBuffer.link(slowCall, FunctionPtr(linkCall.executableAddress())); 6866 6870 … … 6923 6927 CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite(); 6924 6928 State* state = &m_ftlState; 6929 VM& vm = this->vm(); 6925 6930 patchpoint->setGenerator( 6926 [= ] (CCallHelpers& jit, const StackmapGenerationParams& params) {6931 [=, &vm] (CCallHelpers& jit, const StackmapGenerationParams& params) { 6927 6932 AllowMacroScratchRegisterUsage allowScratch(jit); 6928 6933 CallSiteIndex callSiteIndex = state->jitCode->common.addUniqueCallSiteIndex(codeOrigin); … … 6957 6962 jit.addPtr(CCallHelpers::TrustedImm32(requiredBytes), CCallHelpers::stackPointerRegister); 6958 6963 jit.load64(CCallHelpers::calleeFrameSlot(CallFrameSlot::callee), GPRInfo::regT0); 6959 jit.emitDumbVirtualCall( callLinkInfo);6964 jit.emitDumbVirtualCall(vm, callLinkInfo); 6960 6965 6961 6966 done.link(&jit); -
trunk/Source/JavaScriptCore/ftl/FTLOSRExitCompiler.cpp
r214531 r214571 492 492 adjustAndJumpToTarget(*vm, jit, exit); 493 493 494 LinkBuffer patchBuffer( *vm,jit, codeBlock);494 LinkBuffer patchBuffer(jit, codeBlock); 495 495 exit.m_code = FINALIZE_CODE_IF( 496 496 shouldDumpDisassembly() || Options::verboseOSR() || Options::verboseFTLOSRExit(), -
trunk/Source/JavaScriptCore/ftl/FTLOSRExitHandle.cpp
r196729 r214571 45 45 CCallHelpers::PatchableJump jump = jit.patchableJump(); 46 46 RefPtr<OSRExitHandle> self = this; 47 VM& vm = state.vm(); 47 48 jit.addLinkTask( 48 [self, jump, myLabel, compilation ] (LinkBuffer& linkBuffer) {49 [self, jump, myLabel, compilation, &vm] (LinkBuffer& linkBuffer) { 49 50 self->exit.m_patchableJump = CodeLocationJump(linkBuffer.locationOf(jump)); 50 51 51 52 linkBuffer.link( 52 53 jump.m_jump, 53 CodeLocationLabel( linkBuffer.vm().getCTIStub(osrExitGenerationThunkGenerator).code()));54 CodeLocationLabel(vm.getCTIStub(osrExitGenerationThunkGenerator).code())); 54 55 if (compilation) 55 56 compilation->addOSRExitSite({ linkBuffer.locationOf(myLabel).executableAddress() }); -
trunk/Source/JavaScriptCore/ftl/FTLSlowPathCall.cpp
r190860 r214571 119 119 } 120 120 121 SlowPathCall SlowPathCallContext::makeCall( void* callTarget)121 SlowPathCall SlowPathCallContext::makeCall(VM& vm, void* callTarget) 122 122 { 123 123 SlowPathCall result = SlowPathCall(m_jit.call(), keyWithTarget(callTarget)); 124 124 125 125 m_jit.addLinkTask( 126 [result] (LinkBuffer& linkBuffer) { 127 VM& vm = linkBuffer.vm(); 128 126 [result, &vm] (LinkBuffer& linkBuffer) { 129 127 MacroAssemblerCodeRef thunk = 130 vm.ftlThunks->getSlowPathCallThunk( vm,result.key());128 vm.ftlThunks->getSlowPathCallThunk(result.key()); 131 129 132 130 linkBuffer.link(result.call(), CodeLocationLabel(thunk.code())); -
trunk/Source/JavaScriptCore/ftl/FTLSlowPathCall.h
r214531 r214571 60 60 // NOTE: The call that this returns is already going to be linked by the JIT using addLinkTask(), 61 61 // so there is no need for you to link it yourself. 62 SlowPathCall makeCall( void* callTarget);62 SlowPathCall makeCall(VM&, void* callTarget); 63 63 64 64 private: … … 85 85 SlowPathCallContext context(usedRegisters, jit, sizeof...(ArgumentTypes) + 1, resultGPR); 86 86 jit.setupArgumentsWithExecState(arguments...); 87 call = context.makeCall( function.value());87 call = context.makeCall(vm, function.value()); 88 88 } 89 89 if (exceptionTarget) -
trunk/Source/JavaScriptCore/ftl/FTLState.h
r214531 r214571 67 67 68 68 VM& vm() { return graph.m_vm; } 69 69 70 70 // None of these things is owned by State. It is the responsibility of 71 71 // FTL phases to properly manage the lifecycle of the module and function. -
trunk/Source/JavaScriptCore/ftl/FTLThunks.cpp
r214531 r214571 118 118 jit.ret(); 119 119 120 LinkBuffer patchBuffer( *vm,jit, GLOBAL_THUNK_ID);120 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID); 121 121 patchBuffer.link(functionCall, generationFunction); 122 122 return FINALIZE_CODE(patchBuffer, ("%s", name)); … … 165 165 } 166 166 167 MacroAssemblerCodeRef slowPathCallThunkGenerator( VM& vm,const SlowPathCallKey& key)167 MacroAssemblerCodeRef slowPathCallThunkGenerator(const SlowPathCallKey& key) 168 168 { 169 169 AssemblyHelpers jit(nullptr); … … 223 223 jit.ret(); 224 224 225 LinkBuffer patchBuffer( vm,jit, GLOBAL_THUNK_ID);225 LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID); 226 226 patchBuffer.link(call, FunctionPtr(key.callTarget())); 227 227 return FINALIZE_CODE(patchBuffer, ("FTL slow path call thunk for %s", toCString(key).data())); -
trunk/Source/JavaScriptCore/ftl/FTLThunks.h
r206525 r214571 41 41 MacroAssemblerCodeRef osrExitGenerationThunkGenerator(VM*); 42 42 MacroAssemblerCodeRef lazySlowPathGenerationThunkGenerator(VM*); 43 MacroAssemblerCodeRef slowPathCallThunkGenerator( VM&,const SlowPathCallKey&);43 MacroAssemblerCodeRef slowPathCallThunkGenerator(const SlowPathCallKey&); 44 44 45 45 template<typename KeyTypeArgument> … … 55 55 template<typename MapType, typename GeneratorType> 56 56 MacroAssemblerCodeRef generateIfNecessary( 57 VM& vm,MapType& map, const typename MapType::KeyType& key, GeneratorType generator)57 MapType& map, const typename MapType::KeyType& key, GeneratorType generator) 58 58 { 59 59 typename MapType::ToThunkMap::iterator iter = map.m_toThunk.find(key); … … 61 61 return iter->value; 62 62 63 MacroAssemblerCodeRef result = generator( vm,key);63 MacroAssemblerCodeRef result = generator(key); 64 64 map.m_toThunk.add(key, result); 65 65 map.m_fromThunk.add(result.code(), key); … … 77 77 class Thunks { 78 78 public: 79 MacroAssemblerCodeRef getSlowPathCallThunk( VM& vm,const SlowPathCallKey& key)79 MacroAssemblerCodeRef getSlowPathCallThunk(const SlowPathCallKey& key) 80 80 { 81 81 return generateIfNecessary( 82 vm,m_slowPathCallThunks, key, slowPathCallThunkGenerator);82 m_slowPathCallThunks, key, slowPathCallThunkGenerator); 83 83 } 84 84
Note:
See TracChangeset
for help on using the changeset viewer.