Changeset 214531 in webkit for trunk/Source/JavaScriptCore/ftl
- Timestamp:
- Mar 28, 2017, 11:15:23 PM (8 years ago)
- Location:
- trunk/Source/JavaScriptCore/ftl
- Files:
-
- 10 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/ftl/FTLCompile.cpp
r214069 r214531 127 127 codeBlock->clearExceptionHandlers(); 128 128 129 CCallHelpers jit( &vm,codeBlock);129 CCallHelpers jit(codeBlock); 130 130 B3::generate(*state.proc, jit); 131 131 132 132 // Emit the exception handler. 133 133 *state.exceptionHandler = jit.label(); 134 jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer( );135 jit.move(MacroAssembler::TrustedImmPtr( jit.vm()), GPRInfo::argumentGPR0);134 jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(vm); 135 jit.move(MacroAssembler::TrustedImmPtr(&vm), GPRInfo::argumentGPR0); 136 136 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1); 137 137 CCallHelpers::Call call = jit.call(); 138 jit.jumpToExceptionHandler( );138 jit.jumpToExceptionHandler(vm); 139 139 jit.addLinkTask( 140 140 [=] (LinkBuffer& linkBuffer) { -
trunk/Source/JavaScriptCore/ftl/FTLJITFinalizer.h
r214069 r214531 31 31 #include "FTLGeneratedFunction.h" 32 32 #include "FTLJITCode.h" 33 #include "FTLSlowPathCall.h"34 33 #include "LinkBuffer.h" 35 34 #include "MacroAssembler.h" -
trunk/Source/JavaScriptCore/ftl/FTLLazySlowPath.cpp
r196729 r214531 58 58 VM& vm = *codeBlock->vm(); 59 59 60 CCallHelpers jit( &vm,codeBlock);60 CCallHelpers jit(codeBlock); 61 61 GenerationParams params; 62 62 CCallHelpers::JumpList exceptionJumps; -
trunk/Source/JavaScriptCore/ftl/FTLLazySlowPathCall.h
r206525 r214531 39 39 template<typename ResultType, typename... ArgumentTypes> 40 40 RefPtr<LazySlowPath::Generator> createLazyCallGenerator( 41 FunctionPtr function, ResultType result, ArgumentTypes... arguments)41 VM& vm, FunctionPtr function, ResultType result, ArgumentTypes... arguments) 42 42 { 43 43 return LazySlowPath::createGenerator( 44 [= ] (CCallHelpers& jit, LazySlowPath::GenerationParams& params) {44 [=, &vm] (CCallHelpers& jit, LazySlowPath::GenerationParams& params) { 45 45 callOperation( 46 params.lazySlowPath->usedRegisters(), jit, params.lazySlowPath->callSiteIndex(),46 vm, params.lazySlowPath->usedRegisters(), jit, params.lazySlowPath->callSiteIndex(), 47 47 params.exceptionJumps, function, result, arguments...); 48 48 params.doneJumps.append(jit.jump()); -
trunk/Source/JavaScriptCore/ftl/FTLLink.cpp
r214069 r214531 61 61 // Create the entrypoint. Note that we use this entrypoint totally differently 62 62 // depending on whether we're doing OSR entry or not. 63 CCallHelpers jit( &vm,codeBlock);63 CCallHelpers jit(codeBlock); 64 64 65 65 std::unique_ptr<LinkBuffer> linkBuffer; … … 143 143 144 144 auto noException = jit.branch32(CCallHelpers::GreaterThanOrEqual, GPRInfo::returnValueGPR, CCallHelpers::TrustedImm32(0)); 145 jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer( );146 jit.move(CCallHelpers::TrustedImmPtr( jit.vm()), GPRInfo::argumentGPR0);145 jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(vm); 146 jit.move(CCallHelpers::TrustedImmPtr(&vm), GPRInfo::argumentGPR0); 147 147 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1); 148 148 CCallHelpers::Call callLookupExceptionHandlerFromCallerFrame = jit.call(); 149 jit.jumpToExceptionHandler( );149 jit.jumpToExceptionHandler(vm); 150 150 noException.link(&jit); 151 151 -
trunk/Source/JavaScriptCore/ftl/FTLLowerDFGToB3.cpp
r214296 r214531 201 201 m_out.storePtr(m_out.constIntPtr(bitwise_cast<intptr_t>(codeBlock())), addressFor(CallFrameSlot::codeBlock)); 202 202 203 VM* vm = &this->vm(); 204 203 205 // Stack Overflow Check. 204 206 unsigned exitFrameSize = m_graph.requiredRegisterCountForExit() * sizeof(Register); 205 MacroAssembler::AbsoluteAddress addressOfStackLimit(vm ().addressOfSoftStackLimit());207 MacroAssembler::AbsoluteAddress addressOfStackLimit(vm->addressOfSoftStackLimit()); 206 208 PatchpointValue* stackOverflowHandler = m_out.patchpoint(Void); 207 209 CallSiteIndex callSiteIndex = callSiteIndexForCodeOrigin(m_ftlState, CodeOrigin(0)); … … 227 229 MacroAssembler::TrustedImm32(callSiteIndex.bits()), 228 230 CCallHelpers::tagFor(VirtualRegister(CallFrameSlot::argumentCount))); 229 jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer( );231 jit.copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm); 230 232 231 233 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0); … … 233 235 CCallHelpers::Call throwCall = jit.call(); 234 236 235 jit.move(CCallHelpers::TrustedImmPtr( jit.vm()), GPRInfo::argumentGPR0);237 jit.move(CCallHelpers::TrustedImmPtr(vm), GPRInfo::argumentGPR0); 236 238 jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1); 237 239 CCallHelpers::Call lookupExceptionHandlerCall = jit.call(); 238 jit.jumpToExceptionHandler( );240 jit.jumpToExceptionHandler(*vm); 239 241 240 242 jit.addLinkTask( … … 4073 4075 4074 4076 m_out.appendTo(slowPath, continuation); 4077 VM& vm = this->vm(); 4075 4078 LValue callResult = lazySlowPath( 4076 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {4077 return createLazyCallGenerator( 4079 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 4080 return createLazyCallGenerator(vm, 4078 4081 operationCreateActivationDirect, locations[0].directGPR(), 4079 4082 CCallHelpers::TrustedImmPtr(structure.get()), locations[1].directGPR(), … … 4137 4140 Vector<LValue> slowPathArguments; 4138 4141 slowPathArguments.append(scope); 4142 VM& vm = this->vm(); 4139 4143 LValue callResult = lazySlowPath( 4140 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {4144 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 4141 4145 if (isGeneratorFunction) { 4142 return createLazyCallGenerator( 4146 return createLazyCallGenerator(vm, 4143 4147 operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint, 4144 4148 locations[0].directGPR(), locations[1].directGPR(), … … 4146 4150 } 4147 4151 if (isAsyncFunction) { 4148 return createLazyCallGenerator( 4152 return createLazyCallGenerator(vm, 4149 4153 operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint, 4150 4154 locations[0].directGPR(), locations[1].directGPR(), 4151 4155 CCallHelpers::TrustedImmPtr(executable)); 4152 4156 } 4153 return createLazyCallGenerator( 4157 return createLazyCallGenerator(vm, 4154 4158 operationNewFunctionWithInvalidatedReallocationWatchpoint, 4155 4159 locations[0].directGPR(), locations[1].directGPR(), … … 4209 4213 4210 4214 m_out.appendTo(slowPath, continuation); 4215 VM& vm = this->vm(); 4211 4216 LValue callResult = lazySlowPath( 4212 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {4213 return createLazyCallGenerator( 4217 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 4218 return createLazyCallGenerator(vm, 4214 4219 operationCreateDirectArguments, locations[0].directGPR(), 4215 4220 CCallHelpers::TrustedImmPtr(structure.get()), locations[1].directGPR(), … … 4821 4826 LValue storageValue = m_out.phi(pointerType(), noStorage, haveStorage); 4822 4827 4828 VM& vm = this->vm(); 4823 4829 LValue slowResultValue = lazySlowPath( 4824 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {4825 return createLazyCallGenerator( 4830 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 4831 return createLazyCallGenerator(vm, 4826 4832 operationNewTypedArrayWithSizeForType(type), locations[0].directGPR(), 4827 4833 CCallHelpers::TrustedImmPtr(structure.get()), locations[1].directGPR(), … … 5084 5090 m_out.appendTo(slowPath, continuation); 5085 5091 LValue slowResultValue; 5092 VM& vm = this->vm(); 5086 5093 switch (numKids) { 5087 5094 case 2: 5088 5095 slowResultValue = lazySlowPath( 5089 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {5090 return createLazyCallGenerator( 5096 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 5097 return createLazyCallGenerator(vm, 5091 5098 operationMakeRope2, locations[0].directGPR(), locations[1].directGPR(), 5092 5099 locations[2].directGPR()); … … 5095 5102 case 3: 5096 5103 slowResultValue = lazySlowPath( 5097 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {5098 return createLazyCallGenerator( 5104 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 5105 return createLazyCallGenerator(vm, 5099 5106 operationMakeRope3, locations[0].directGPR(), locations[1].directGPR(), 5100 5107 locations[2].directGPR(), locations[3].directGPR()); … … 5513 5520 LBasicBlock lastNext = m_out.appendTo(isNotInvalidated, continuation); 5514 5521 5522 VM& vm = this->vm(); 5515 5523 lazySlowPath( 5516 [= ] (const Vector<Location>&) -> RefPtr<LazySlowPath::Generator> {5517 return createLazyCallGenerator( 5524 [=, &vm] (const Vector<Location>&) -> RefPtr<LazySlowPath::Generator> { 5525 return createLazyCallGenerator(vm, 5518 5526 operationNotifyWrite, InvalidGPRReg, CCallHelpers::TrustedImmPtr(set)); 5519 5527 }); … … 6449 6457 jit.move(CCallHelpers::TrustedImmPtr(callee), GPRInfo::nonPreservedNonArgumentGPR); 6450 6458 jit.call(GPRInfo::nonPreservedNonArgumentGPR); 6451 exceptions->append(jit.emitExceptionCheck( AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));6459 exceptions->append(jit.emitExceptionCheck(state->vm(), AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth)); 6452 6460 }; 6453 6461 … … 6756 6764 jit.move(CCallHelpers::TrustedImmPtr(callee), GPRInfo::nonPreservedNonArgumentGPR); 6757 6765 jit.call(GPRInfo::nonPreservedNonArgumentGPR); 6758 exceptions->append(jit.emitExceptionCheck( AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));6766 exceptions->append(jit.emitExceptionCheck(state->vm(), AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth)); 6759 6767 }; 6760 6768 … … 6772 6780 6773 6781 // emitSetupVarargsFrameFastCase modifies the stack pointer if it succeeds. 6774 emitSetupVarargsFrameFastCase( jit, scratchGPR2, scratchGPR1, scratchGPR2, scratchGPR3, inlineCallFrame, data->firstVarArgOffset, slowCase);6782 emitSetupVarargsFrameFastCase(state->vm(), jit, scratchGPR2, scratchGPR1, scratchGPR2, scratchGPR3, inlineCallFrame, data->firstVarArgOffset, slowCase); 6775 6783 6776 6784 CCallHelpers::Jump done = jit.jump(); … … 6943 6951 jit.move(CCallHelpers::TrustedImmPtr(bitwise_cast<void*>(operationCallEval)), GPRInfo::nonPreservedNonArgumentGPR); 6944 6952 jit.call(GPRInfo::nonPreservedNonArgumentGPR); 6945 exceptions->append(jit.emitExceptionCheck( AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));6953 exceptions->append(jit.emitExceptionCheck(state->vm(), AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth)); 6946 6954 6947 6955 CCallHelpers::Jump done = jit.branchTest64(CCallHelpers::NonZero, GPRInfo::returnValueGPR); … … 7986 7994 7987 7995 m_out.appendTo(slowPath, notCellCase); 7996 VM& vm = this->vm(); 7988 7997 LValue slowResultValue = lazySlowPath( 7989 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {7990 return createLazyCallGenerator( 7998 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 7999 return createLazyCallGenerator(vm, 7991 8000 operationObjectIsObject, locations[0].directGPR(), 7992 8001 CCallHelpers::TrustedImmPtr(globalObject), locations[1].directGPR()); … … 8036 8045 8037 8046 m_out.appendTo(slowPath, continuation); 8047 VM& vm = this->vm(); 8038 8048 LValue slowResultValue = lazySlowPath( 8039 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {8040 return createLazyCallGenerator( 8049 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 8050 return createLazyCallGenerator(vm, 8041 8051 operationObjectIsFunction, locations[0].directGPR(), 8042 8052 CCallHelpers::TrustedImmPtr(globalObject), locations[1].directGPR()); … … 8780 8790 LValue butterflyValue = m_out.phi(pointerType(), noButterfly, haveButterfly); 8781 8791 8792 VM& vm = this->vm(); 8782 8793 LValue slowObjectValue; 8783 8794 if (hasIndexingHeader) { 8784 8795 slowObjectValue = lazySlowPath( 8785 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {8786 return createLazyCallGenerator( 8796 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 8797 return createLazyCallGenerator(vm, 8787 8798 operationNewObjectWithButterflyWithIndexingHeaderAndVectorLength, 8788 8799 locations[0].directGPR(), CCallHelpers::TrustedImmPtr(structure.get()), … … 8792 8803 } else { 8793 8804 slowObjectValue = lazySlowPath( 8794 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {8795 return createLazyCallGenerator( 8805 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 8806 return createLazyCallGenerator(vm, 8796 8807 operationNewObjectWithButterfly, locations[0].directGPR(), 8797 8808 CCallHelpers::TrustedImmPtr(structure.get()), locations[1].directGPR()); … … 8991 9002 // FIXME: It may be worth creating an operation that calls a constructor on JSLexicalEnvironment that 8992 9003 // doesn't initialize every slot because we are guaranteed to do that here. 9004 VM& vm = this->vm(); 8993 9005 LValue callResult = lazySlowPath( 8994 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {8995 return createLazyCallGenerator( 9006 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 9007 return createLazyCallGenerator(vm, 8996 9008 operationCreateActivationDirect, locations[0].directGPR(), 8997 9009 CCallHelpers::TrustedImmPtr(structure.get()), locations[1].directGPR(), … … 9046 9058 LBasicBlock lastNext = m_out.appendTo(needTrapHandling, continuation); 9047 9059 9060 VM& vm = this->vm(); 9048 9061 lazySlowPath( 9049 [= ] (const Vector<Location>&) -> RefPtr<LazySlowPath::Generator> {9050 return createLazyCallGenerator( operationHandleTraps, InvalidGPRReg);9062 [=, &vm] (const Vector<Location>&) -> RefPtr<LazySlowPath::Generator> { 9063 return createLazyCallGenerator(vm, operationHandleTraps, InvalidGPRReg); 9051 9064 }); 9052 9065 m_out.jump(continuation); … … 9713 9726 9714 9727 LValue slowButterflyValue; 9728 VM& vm = this->vm(); 9715 9729 if (sizeInValues == initialOutOfLineCapacity) { 9716 9730 slowButterflyValue = lazySlowPath( 9717 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {9718 return createLazyCallGenerator( 9731 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 9732 return createLazyCallGenerator(vm, 9719 9733 operationAllocateSimplePropertyStorageWithInitialCapacity, 9720 9734 locations[0].directGPR()); … … 9722 9736 } else { 9723 9737 slowButterflyValue = lazySlowPath( 9724 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {9725 return createLazyCallGenerator( 9738 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 9739 return createLazyCallGenerator(vm, 9726 9740 operationAllocateSimplePropertyStorage, locations[0].directGPR(), 9727 9741 CCallHelpers::TrustedImmPtr(sizeInValues)); … … 10802 10816 m_out.appendTo(slowPath, continuation); 10803 10817 10818 VM& vm = this->vm(); 10804 10819 LValue slowResultValue = lazySlowPath( 10805 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {10806 return createLazyCallGenerator( 10820 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 10821 return createLazyCallGenerator(vm, 10807 10822 operationNewObject, locations[0].directGPR(), 10808 10823 CCallHelpers::TrustedImmPtr(structure.get())); … … 10920 10935 LValue butterflyValue = m_out.phi(pointerType(), noButterfly, haveButterfly); 10921 10936 10937 VM& vm = this->vm(); 10922 10938 LValue slowResultValue = lazySlowPath( 10923 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {10924 return createLazyCallGenerator( 10939 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 10940 return createLazyCallGenerator(vm, 10925 10941 operationNewArrayWithSize, locations[0].directGPR(), 10926 10942 locations[1].directGPR(), locations[2].directGPR(), locations[3].directGPR()); … … 11668 11684 11669 11685 m_out.appendTo(slowPath, unreachable); 11686 VM& vm = this->vm(); 11670 11687 LValue result = lazySlowPath( 11671 [= ] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {11672 return createLazyCallGenerator( 11688 [=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> { 11689 return createLazyCallGenerator(vm, 11673 11690 operationTypeOfObjectAsTypeofType, locations[0].directGPR(), 11674 11691 CCallHelpers::TrustedImmPtr(globalObject), locations[1].directGPR()); -
trunk/Source/JavaScriptCore/ftl/FTLOSRExitCompiler.cpp
r214069 r214531 182 182 static_assert(MacroAssembler::framePointerRegister == GPRInfo::callFrameRegister, "MacroAssembler::framePointerRegister and GPRInfo::callFrameRegister must be the same"); 183 183 184 CCallHelpers jit( vm,codeBlock);184 CCallHelpers jit(codeBlock); 185 185 186 186 // The first thing we need to do is restablish our frame in the case of an exception. 187 187 if (exit.isGenericUnwindHandler()) { 188 188 RELEASE_ASSERT(vm->callFrameForCatch); // The first time we hit this exit, like at all other times, this field should be non-null. 189 jit.restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer( );189 jit.restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(*vm); 190 190 jit.loadPtr(vm->addressOfCallFrameForCatch(), MacroAssembler::framePointerRegister); 191 191 jit.addPtr(CCallHelpers::TrustedImm32(codeBlock->stackPointerOffset() * sizeof(Register)), … … 490 490 handleExitCounts(jit, exit); 491 491 reifyInlinedCallFrames(jit, exit); 492 adjustAndJumpToTarget( jit, exit);492 adjustAndJumpToTarget(*vm, jit, exit); 493 493 494 494 LinkBuffer patchBuffer(*vm, jit, codeBlock); -
trunk/Source/JavaScriptCore/ftl/FTLSlowPathCall.h
r206525 r214531 30 30 #include "CCallHelpers.h" 31 31 #include "FTLSlowPathCallKey.h" 32 #include " JITOperations.h"32 #include "FTLState.h" 33 33 34 34 namespace JSC { namespace FTL { 35 36 class State;37 35 38 36 class SlowPathCall { … … 80 78 template<typename... ArgumentTypes> 81 79 SlowPathCall callOperation( 82 const RegisterSet& usedRegisters, CCallHelpers& jit, CCallHelpers::JumpList* exceptionTarget,80 VM& vm, const RegisterSet& usedRegisters, CCallHelpers& jit, CCallHelpers::JumpList* exceptionTarget, 83 81 FunctionPtr function, GPRReg resultGPR, ArgumentTypes... arguments) 84 82 { … … 90 88 } 91 89 if (exceptionTarget) 92 exceptionTarget->append(jit.emitExceptionCheck( ));90 exceptionTarget->append(jit.emitExceptionCheck(vm)); 93 91 return call; 94 92 } … … 96 94 template<typename... ArgumentTypes> 97 95 SlowPathCall callOperation( 98 const RegisterSet& usedRegisters, CCallHelpers& jit, CallSiteIndex callSiteIndex,96 VM& vm, const RegisterSet& usedRegisters, CCallHelpers& jit, CallSiteIndex callSiteIndex, 99 97 CCallHelpers::JumpList* exceptionTarget, FunctionPtr function, GPRReg resultGPR, 100 98 ArgumentTypes... arguments) … … 105 103 CCallHelpers::tagFor(CallFrameSlot::argumentCount)); 106 104 } 107 return callOperation( usedRegisters, jit, exceptionTarget, function, resultGPR, arguments...);105 return callOperation(vm, usedRegisters, jit, exceptionTarget, function, resultGPR, arguments...); 108 106 } 109 107 … … 116 114 { 117 115 return callOperation( 118 usedRegisters, jit, callSiteIndexForCodeOrigin(state, codeOrigin), exceptionTarget, function,116 state.vm(), usedRegisters, jit, callSiteIndexForCodeOrigin(state, codeOrigin), exceptionTarget, function, 119 117 result, arguments...); 120 118 } -
trunk/Source/JavaScriptCore/ftl/FTLState.h
r206525 r214531 65 65 State(DFG::Graph& graph); 66 66 ~State(); 67 68 VM& vm() { return graph.m_vm; } 67 69 68 70 // None of these things is owned by State. It is the responsibility of -
trunk/Source/JavaScriptCore/ftl/FTLThunks.cpp
r193485 r214531 50 50 VM* vm, FunctionPtr generationFunction, const char* name, unsigned extraPopsToRestore, FrameAndStackAdjustmentRequirement frameAndStackAdjustmentRequirement) 51 51 { 52 AssemblyHelpers jit( vm, 0);52 AssemblyHelpers jit(nullptr); 53 53 54 54 if (frameAndStackAdjustmentRequirement == FrameAndStackAdjustmentRequirement::Needed) { … … 167 167 MacroAssemblerCodeRef slowPathCallThunkGenerator(VM& vm, const SlowPathCallKey& key) 168 168 { 169 AssemblyHelpers jit( &vm, 0);169 AssemblyHelpers jit(nullptr); 170 170 171 171 // We want to save the given registers at the given offset, then we want to save the
Note:
See TracChangeset
for help on using the changeset viewer.