Changeset 279049 in webkit for trunk/Source/JavaScriptCore/jit/JIT.cpp
- Timestamp:
- Jun 19, 2021, 1:25:14 AM (4 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/jit/JIT.cpp
r278656 r279049 56 56 } 57 57 58 #if ENABLE(EXTRA_CTI_THUNKS)59 #if CPU(ARM64) || (CPU(X86_64) && !OS(WINDOWS))60 // These are supported ports.61 #else62 // This is a courtesy reminder (and warning) that the implementation of EXTRA_CTI_THUNKS can63 // use up to 6 argument registers and/or 6/7 temp registers, and make use of ARM64 like64 // features. Hence, it may not work for many other ports without significant work. If you65 // plan on adding EXTRA_CTI_THUNKS support for your port, please remember to search the66 // EXTRA_CTI_THUNKS code for CPU(ARM64) and CPU(X86_64) conditional code, and add support67 // for your port there as well.68 #error "unsupported architecture"69 #endif70 #endif // ENABLE(EXTRA_CTI_THUNKS)71 72 58 Seconds totalBaselineCompileTime; 73 59 Seconds totalDFGCompileTime; … … 98 84 } 99 85 100 #if ENABLE(DFG_JIT) && !ENABLE(EXTRA_CTI_THUNKS)86 #if ENABLE(DFG_JIT) 101 87 void JIT::emitEnterOptimizationCheck() 102 88 { … … 116 102 skipOptimize.link(this); 117 103 } 118 #endif // ENABLE(DFG_JIT) && !ENABLE(EXTRA_CTI_THUNKS)(104 #endif 119 105 120 106 void JIT::emitNotifyWrite(WatchpointSet* set) … … 697 683 } 698 684 699 static inline unsigned prologueGeneratorSelector(bool doesProfiling, bool isConstructor, bool hasHugeFrame)700 {701 return doesProfiling << 2 | isConstructor << 1 | hasHugeFrame << 0;702 }703 704 #define FOR_EACH_NON_PROFILING_PROLOGUE_GENERATOR(v) \705 v(!doesProfiling, !isConstructor, !hasHugeFrame, prologueGenerator0, arityFixup_prologueGenerator0) \706 v(!doesProfiling, !isConstructor, hasHugeFrame, prologueGenerator1, arityFixup_prologueGenerator1) \707 v(!doesProfiling, isConstructor, !hasHugeFrame, prologueGenerator2, arityFixup_prologueGenerator2) \708 v(!doesProfiling, isConstructor, hasHugeFrame, prologueGenerator3, arityFixup_prologueGenerator3)709 710 #if ENABLE(DFG_JIT)711 #define FOR_EACH_PROFILING_PROLOGUE_GENERATOR(v) \712 v( doesProfiling, !isConstructor, !hasHugeFrame, prologueGenerator4, arityFixup_prologueGenerator4) \713 v( doesProfiling, !isConstructor, hasHugeFrame, prologueGenerator5, arityFixup_prologueGenerator5) \714 v( doesProfiling, isConstructor, !hasHugeFrame, prologueGenerator6, arityFixup_prologueGenerator6) \715 v( doesProfiling, isConstructor, hasHugeFrame, prologueGenerator7, arityFixup_prologueGenerator7)716 717 #else // not ENABLE(DFG_JIT)718 #define FOR_EACH_PROFILING_PROLOGUE_GENERATOR(v)719 #endif // ENABLE(DFG_JIT)720 721 #define FOR_EACH_PROLOGUE_GENERATOR(v) \722 FOR_EACH_NON_PROFILING_PROLOGUE_GENERATOR(v) \723 FOR_EACH_PROFILING_PROLOGUE_GENERATOR(v)724 725 685 void JIT::compileAndLinkWithoutFinalizing(JITCompilationEffort effort) 726 686 { … … 791 751 792 752 emitFunctionPrologue(); 793 794 #if !ENABLE(EXTRA_CTI_THUNKS)795 753 emitPutToCallFrameHeader(m_codeBlock, CallFrameSlot::codeBlock); 796 754 … … 814 772 ASSERT(!m_bytecodeIndex); 815 773 if (shouldEmitProfiling()) { 816 // If this is a constructor, then we want to put in a dummy profiling site (to 817 // keep things consistent) but we don't actually want to record the dummy value. 818 unsigned startArgument = m_codeBlock->isConstructor() ? 1 : 0; 819 for (unsigned argument = startArgument; argument < m_codeBlock->numParameters(); ++argument) { 774 for (unsigned argument = 0; argument < m_codeBlock->numParameters(); ++argument) { 775 // If this is a constructor, then we want to put in a dummy profiling site (to 776 // keep things consistent) but we don't actually want to record the dummy value. 777 if (m_codeBlock->isConstructor() && !argument) 778 continue; 820 779 int offset = CallFrame::argumentOffsetIncludingThis(argument) * static_cast<int>(sizeof(Register)); 821 780 #if USE(JSVALUE64) … … 831 790 } 832 791 } 833 #else // ENABLE(EXTRA_CTI_THUNKS) 834 constexpr GPRReg codeBlockGPR = regT7; 835 ASSERT(!m_bytecodeIndex); 836 837 int frameTopOffset = stackPointerOffsetFor(m_codeBlock) * sizeof(Register); 838 unsigned maxFrameSize = -frameTopOffset; 839 840 bool doesProfiling = (m_codeBlock->codeType() == FunctionCode) && shouldEmitProfiling(); 841 bool isConstructor = m_codeBlock->isConstructor(); 842 bool hasHugeFrame = maxFrameSize > Options::reservedZoneSize(); 843 844 static constexpr ThunkGenerator generators[] = { 845 #define USE_PROLOGUE_GENERATOR(doesProfiling, isConstructor, hasHugeFrame, name, arityFixupName) name, 846 FOR_EACH_PROLOGUE_GENERATOR(USE_PROLOGUE_GENERATOR) 847 #undef USE_PROLOGUE_GENERATOR 848 }; 849 static constexpr unsigned numberOfGenerators = sizeof(generators) / sizeof(generators[0]); 850 851 move(TrustedImmPtr(m_codeBlock), codeBlockGPR); 852 853 unsigned generatorSelector = prologueGeneratorSelector(doesProfiling, isConstructor, hasHugeFrame); 854 RELEASE_ASSERT(generatorSelector < numberOfGenerators); 855 auto generator = generators[generatorSelector]; 856 emitNakedNearCall(vm().getCTIStub(generator).retaggedCode<NoPtrTag>()); 857 858 Label bodyLabel(this); 859 #endif // !ENABLE(EXTRA_CTI_THUNKS) 860 792 861 793 RELEASE_ASSERT(!JITCode::isJIT(m_codeBlock->jitType())); 862 794 … … 872 804 m_pcToCodeOriginMapBuilder.appendItem(label(), PCToCodeOriginMapBuilder::defaultCodeOrigin()); 873 805 874 #if !ENABLE(EXTRA_CTI_THUNKS)875 806 stackOverflow.link(this); 876 807 m_bytecodeIndex = BytecodeIndex(0); … … 878 809 addPtr(TrustedImm32(-static_cast<int32_t>(maxFrameExtentForSlowPathCall)), stackPointerRegister); 879 810 callOperationWithCallFrameRollbackOnException(operationThrowStackOverflowError, m_codeBlock); 880 #endif881 811 882 812 // If the number of parameters is 1, we never require arity fixup. … … 884 814 if (m_codeBlock->codeType() == FunctionCode && requiresArityFixup) { 885 815 m_arityCheck = label(); 886 #if !ENABLE(EXTRA_CTI_THUNKS)887 816 store8(TrustedImm32(0), &m_codeBlock->m_shouldAlwaysBeInlined); 888 817 emitFunctionPrologue(); … … 903 832 emitNakedNearCall(m_vm->getCTIStub(arityFixupGenerator).retaggedCode<NoPtrTag>()); 904 833 905 jump(beginLabel);906 907 #else // ENABLE(EXTRA_CTI_THUNKS)908 emitFunctionPrologue();909 910 static_assert(codeBlockGPR == regT7);911 ASSERT(!m_bytecodeIndex);912 913 static constexpr ThunkGenerator generators[] = {914 #define USE_PROLOGUE_GENERATOR(doesProfiling, isConstructor, hasHugeFrame, name, arityFixupName) arityFixupName,915 FOR_EACH_PROLOGUE_GENERATOR(USE_PROLOGUE_GENERATOR)916 #undef USE_PROLOGUE_GENERATOR917 };918 static constexpr unsigned numberOfGenerators = sizeof(generators) / sizeof(generators[0]);919 920 move(TrustedImmPtr(m_codeBlock), codeBlockGPR);921 922 RELEASE_ASSERT(generatorSelector < numberOfGenerators);923 auto generator = generators[generatorSelector];924 RELEASE_ASSERT(generator);925 emitNakedNearCall(vm().getCTIStub(generator).retaggedCode<NoPtrTag>());926 927 jump(bodyLabel);928 #endif // !ENABLE(EXTRA_CTI_THUNKS)929 930 834 #if ASSERT_ENABLED 931 835 m_bytecodeIndex = BytecodeIndex(); // Reset this, in order to guard its use with ASSERTs. 932 836 #endif 837 838 jump(beginLabel); 933 839 } else 934 840 m_arityCheck = entryLabel; // Never require arity fixup. … … 936 842 ASSERT(m_jmpTable.isEmpty()); 937 843 938 #if !ENABLE(EXTRA_CTI_THUNKS)939 844 privateCompileExceptionHandlers(); 940 #endif941 845 942 846 if (m_disassembler) … … 947 851 link(); 948 852 } 949 950 #if ENABLE(EXTRA_CTI_THUNKS)951 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::prologueGenerator(VM& vm, bool doesProfiling, bool isConstructor, bool hasHugeFrame, const char* thunkName)952 {953 // This function generates the Baseline JIT's prologue code. It is not useable by other tiers.954 constexpr GPRReg codeBlockGPR = regT7; // incoming.955 956 constexpr int virtualRegisterSize = static_cast<int>(sizeof(Register));957 constexpr int virtualRegisterSizeShift = 3;958 static_assert((1 << virtualRegisterSizeShift) == virtualRegisterSize);959 960 tagReturnAddress();961 962 storePtr(codeBlockGPR, addressFor(CallFrameSlot::codeBlock));963 964 load32(Address(codeBlockGPR, CodeBlock::offsetOfNumCalleeLocals()), regT1);965 if constexpr (maxFrameExtentForSlowPathCallInRegisters)966 add32(TrustedImm32(maxFrameExtentForSlowPathCallInRegisters), regT1);967 lshift32(TrustedImm32(virtualRegisterSizeShift), regT1);968 neg64(regT1);969 #if ASSERT_ENABLED970 Probe::Function probeFunction = [] (Probe::Context& context) {971 CodeBlock* codeBlock = context.fp<CallFrame*>()->codeBlock();972 int64_t frameTopOffset = stackPointerOffsetFor(codeBlock) * sizeof(Register);973 RELEASE_ASSERT(context.gpr<intptr_t>(regT1) == frameTopOffset);974 };975 probe(tagCFunctionPtr<JITProbePtrTag>(probeFunction), nullptr);976 #endif977 978 addPtr(callFrameRegister, regT1);979 980 JumpList stackOverflow;981 if (hasHugeFrame)982 stackOverflow.append(branchPtr(Above, regT1, callFrameRegister));983 stackOverflow.append(branchPtr(Above, AbsoluteAddress(vm.addressOfSoftStackLimit()), regT1));984 985 // We'll be imminently returning with a `retab` (ARM64E's return with authentication986 // using the B key) in the normal path (see MacroAssemblerARM64E's implementation of987 // ret()), which will do validation. So, extra validation here is redundant and unnecessary.988 untagReturnAddressWithoutExtraValidation();989 #if CPU(X86_64)990 pop(regT2); // Save the return address.991 #endif992 move(regT1, stackPointerRegister);993 tagReturnAddress();994 checkStackPointerAlignment();995 #if CPU(X86_64)996 push(regT2); // Restore the return address.997 #endif998 999 emitSaveCalleeSavesForBaselineJIT();1000 emitMaterializeTagCheckRegisters();1001 1002 if (doesProfiling) {1003 constexpr GPRReg argumentValueProfileGPR = regT6;1004 constexpr GPRReg numParametersGPR = regT5;1005 constexpr GPRReg argumentGPR = regT4;1006 1007 load32(Address(codeBlockGPR, CodeBlock::offsetOfNumParameters()), numParametersGPR);1008 loadPtr(Address(codeBlockGPR, CodeBlock::offsetOfArgumentValueProfiles()), argumentValueProfileGPR);1009 if (isConstructor)1010 addPtr(TrustedImm32(sizeof(ValueProfile)), argumentValueProfileGPR);1011 1012 int startArgument = CallFrameSlot::thisArgument + (isConstructor ? 1 : 0);1013 int startArgumentOffset = startArgument * virtualRegisterSize;1014 move(TrustedImm64(startArgumentOffset), argumentGPR);1015 1016 add32(TrustedImm32(static_cast<int>(CallFrameSlot::thisArgument)), numParametersGPR);1017 lshift32(TrustedImm32(virtualRegisterSizeShift), numParametersGPR);1018 1019 addPtr(callFrameRegister, argumentGPR);1020 addPtr(callFrameRegister, numParametersGPR);1021 1022 Label loopStart(this);1023 Jump done = branchPtr(AboveOrEqual, argumentGPR, numParametersGPR);1024 {1025 load64(Address(argumentGPR), regT0);1026 store64(regT0, Address(argumentValueProfileGPR, OBJECT_OFFSETOF(ValueProfile, m_buckets)));1027 1028 // The argument ValueProfiles are stored in a FixedVector. Hence, the1029 // address of the next profile can be trivially computed with an increment.1030 addPtr(TrustedImm32(sizeof(ValueProfile)), argumentValueProfileGPR);1031 addPtr(TrustedImm32(virtualRegisterSize), argumentGPR);1032 jump().linkTo(loopStart, this);1033 }1034 done.link(this);1035 }1036 ret();1037 1038 stackOverflow.link(this);1039 #if CPU(X86_64)1040 addPtr(TrustedImm32(1 * sizeof(CPURegister)), stackPointerRegister); // discard return address.1041 #endif1042 1043 uint32_t locationBits = CallSiteIndex(0).bits();1044 store32(TrustedImm32(locationBits), tagFor(CallFrameSlot::argumentCountIncludingThis));1045 1046 if (maxFrameExtentForSlowPathCall)1047 addPtr(TrustedImm32(-static_cast<int32_t>(maxFrameExtentForSlowPathCall)), stackPointerRegister);1048 1049 setupArguments<decltype(operationThrowStackOverflowError)>(codeBlockGPR);1050 prepareCallOperation(vm);1051 MacroAssembler::Call operationCall = call(OperationPtrTag);1052 Jump handleExceptionJump = jump();1053 1054 auto handler = vm.getCTIStub(handleExceptionWithCallFrameRollbackGenerator);1055 1056 LinkBuffer patchBuffer(*this, GLOBAL_THUNK_ID, LinkBuffer::Profile::ExtraCTIThunk);1057 patchBuffer.link(operationCall, FunctionPtr<OperationPtrTag>(operationThrowStackOverflowError));1058 patchBuffer.link(handleExceptionJump, CodeLocationLabel(handler.retaggedCode<NoPtrTag>()));1059 return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, thunkName);1060 }1061 1062 static constexpr bool doesProfiling = true;1063 static constexpr bool isConstructor = true;1064 static constexpr bool hasHugeFrame = true;1065 1066 #define DEFINE_PROGLOGUE_GENERATOR(doesProfiling, isConstructor, hasHugeFrame, name, arityFixupName) \1067 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::name(VM& vm) \1068 { \1069 JIT jit(vm); \1070 return jit.prologueGenerator(vm, doesProfiling, isConstructor, hasHugeFrame, "Baseline: " #name); \1071 }1072 1073 FOR_EACH_PROLOGUE_GENERATOR(DEFINE_PROGLOGUE_GENERATOR)1074 #undef DEFINE_PROGLOGUE_GENERATOR1075 1076 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::arityFixupPrologueGenerator(VM& vm, bool isConstructor, ThunkGenerator normalPrologueGenerator, const char* thunkName)1077 {1078 // This function generates the Baseline JIT's prologue code. It is not useable by other tiers.1079 constexpr GPRReg codeBlockGPR = regT7; // incoming.1080 constexpr GPRReg numParametersGPR = regT6;1081 1082 tagReturnAddress();1083 #if CPU(X86_64)1084 push(framePointerRegister);1085 #elif CPU(ARM64)1086 pushPair(framePointerRegister, linkRegister);1087 #endif1088 1089 storePtr(codeBlockGPR, addressFor(CallFrameSlot::codeBlock));1090 store8(TrustedImm32(0), Address(codeBlockGPR, CodeBlock::offsetOfShouldAlwaysBeInlined()));1091 1092 load32(payloadFor(CallFrameSlot::argumentCountIncludingThis), regT1);1093 load32(Address(codeBlockGPR, CodeBlock::offsetOfNumParameters()), numParametersGPR);1094 Jump noFixupNeeded = branch32(AboveOrEqual, regT1, numParametersGPR);1095 1096 if constexpr (maxFrameExtentForSlowPathCall)1097 addPtr(TrustedImm32(-static_cast<int32_t>(maxFrameExtentForSlowPathCall)), stackPointerRegister);1098 1099 loadPtr(Address(codeBlockGPR, CodeBlock::offsetOfGlobalObject()), argumentGPR0);1100 1101 static_assert(std::is_same<decltype(operationConstructArityCheck), decltype(operationCallArityCheck)>::value);1102 setupArguments<decltype(operationCallArityCheck)>(argumentGPR0);1103 prepareCallOperation(vm);1104 1105 MacroAssembler::Call arityCheckCall = call(OperationPtrTag);1106 Jump handleExceptionJump = emitNonPatchableExceptionCheck(vm);1107 1108 if constexpr (maxFrameExtentForSlowPathCall)1109 addPtr(TrustedImm32(maxFrameExtentForSlowPathCall), stackPointerRegister);1110 Jump needFixup = branchTest32(NonZero, returnValueGPR);1111 noFixupNeeded.link(this);1112 1113 // The normal prologue expects incoming codeBlockGPR.1114 load64(addressFor(CallFrameSlot::codeBlock), codeBlockGPR);1115 1116 #if CPU(X86_64)1117 pop(framePointerRegister);1118 #elif CPU(ARM64)1119 popPair(framePointerRegister, linkRegister);1120 #endif1121 untagReturnAddress();1122 1123 JumpList normalPrologueJump;1124 normalPrologueJump.append(jump());1125 1126 needFixup.link(this);1127 1128 // Restore the stack for arity fixup, and preserve the return address.1129 // arityFixupGenerator will be shifting the stack. So, we can't use the stack to1130 // preserve the return address. We also can't use callee saved registers because1131 // they haven't been saved yet.1132 //1133 // arityFixupGenerator is carefully crafted to only use a0, a1, a2, t3, t4 and t5.1134 // So, the return address can be preserved in regT7.1135 #if CPU(X86_64)1136 pop(argumentGPR2); // discard.1137 pop(regT7); // save return address.1138 #elif CPU(ARM64)1139 popPair(framePointerRegister, linkRegister);1140 untagReturnAddress();1141 move(linkRegister, regT7);1142 auto randomReturnAddressTag = random();1143 move(TrustedImm32(randomReturnAddressTag), regT1);1144 tagPtr(regT1, regT7);1145 #endif1146 move(returnValueGPR, GPRInfo::argumentGPR0);1147 Call arityFixupCall = nearCall();1148 1149 #if CPU(X86_64)1150 push(regT7); // restore return address.1151 #elif CPU(ARM64)1152 move(TrustedImm32(randomReturnAddressTag), regT1);1153 untagPtr(regT1, regT7);1154 move(regT7, linkRegister);1155 #endif1156 1157 load64(addressFor(CallFrameSlot::codeBlock), codeBlockGPR);1158 normalPrologueJump.append(jump());1159 1160 auto arityCheckOperation = isConstructor ? operationConstructArityCheck : operationCallArityCheck;1161 auto arityFixup = vm.getCTIStub(arityFixupGenerator);1162 auto normalPrologue = vm.getCTIStub(normalPrologueGenerator);1163 auto exceptionHandler = vm.getCTIStub(popThunkStackPreservesAndHandleExceptionGenerator);1164 1165 LinkBuffer patchBuffer(*this, GLOBAL_THUNK_ID, LinkBuffer::Profile::ExtraCTIThunk);1166 patchBuffer.link(arityCheckCall, FunctionPtr<OperationPtrTag>(arityCheckOperation));1167 patchBuffer.link(arityFixupCall, FunctionPtr(arityFixup.retaggedCode<NoPtrTag>()));1168 patchBuffer.link(normalPrologueJump, CodeLocationLabel(normalPrologue.retaggedCode<NoPtrTag>()));1169 patchBuffer.link(handleExceptionJump, CodeLocationLabel(exceptionHandler.retaggedCode<NoPtrTag>()));1170 return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, thunkName);1171 }1172 1173 #define DEFINE_ARITY_PROGLOGUE_GENERATOR(doesProfiling, isConstructor, hasHugeFrame, name, arityFixupName) \1174 MacroAssemblerCodeRef<JITThunkPtrTag> JIT::arityFixupName(VM& vm) \1175 { \1176 JIT jit(vm); \1177 return jit.arityFixupPrologueGenerator(vm, isConstructor, name, "Baseline: " #arityFixupName); \1178 }1179 1180 FOR_EACH_PROLOGUE_GENERATOR(DEFINE_ARITY_PROGLOGUE_GENERATOR)1181 #undef DEFINE_ARITY_PROGLOGUE_GENERATOR1182 1183 #endif // ENABLE(EXTRA_CTI_THUNKS)1184 853 1185 854 void JIT::link() … … 1383 1052 } 1384 1053 1054 void JIT::privateCompileExceptionHandlers() 1055 { 1385 1056 #if !ENABLE(EXTRA_CTI_THUNKS) 1386 void JIT::privateCompileExceptionHandlers()1387 {1388 1057 if (!m_exceptionChecksWithCallFrameRollback.empty()) { 1389 1058 m_exceptionChecksWithCallFrameRollback.link(this); … … 1410 1079 jumpToExceptionHandler(vm()); 1411 1080 } 1412 } 1413 #endif // !ENABLE(EXTRA_CTI_THUNKS) 1081 #endif // ENABLE(EXTRA_CTI_THUNKS) 1082 } 1414 1083 1415 1084 void JIT::doMainThreadPreparationBeforeCompile()
Note:
See TracChangeset
for help on using the changeset viewer.