source: webkit/trunk/Source/JavaScriptCore/jit/JITMulGenerator.cpp

Last change on this file was 281355, checked in by [email protected], 4 years ago

[JSC] Simplify moveIntsToDouble
https://bugs.webkit.org/show_bug.cgi?id=229351

Reviewed by Saam Barati.

MacroAssembler::moveIntsToDouble required scratch FPRReg. But it was only required for MacroAssemblerX86, and it is already removed.
This means that we no longer need this scratch FPRReg. This change makes a lot of IC code, property access code simpler.
This patch removes that scratch FPRReg, and removed scratch FPRReg of many arithmetic ICs. This patch is important for PutByVal modern
IC since some of property access requires FPRReg because of MacroAssembler::moveIntsToDouble, and it requires adding new m_scratch2FPR
to AccessCase. But after this simplification, this is no longer necessary.

  • assembler/MacroAssemblerARMv7.h:

(JSC::MacroAssemblerARMv7::moveIntsToDouble):

  • assembler/MacroAssemblerMIPS.h:

(JSC::MacroAssemblerMIPS::moveIntsToDouble):

  • dfg/DFGSpeculativeJIT.cpp:

(JSC::DFG::SpeculativeJIT::compileValueToInt32):
(JSC::DFG::SpeculativeJIT::compileDoubleRep):
(JSC::DFG::SpeculativeJIT::emitUntypedOrBigIntRightShiftBitOp):
(JSC::DFG::SpeculativeJIT::compileValueAdd):
(JSC::DFG::SpeculativeJIT::compileValueSub):
(JSC::DFG::SpeculativeJIT::compileMathIC):
(JSC::DFG::SpeculativeJIT::compileValueNegate):
(JSC::DFG::SpeculativeJIT::compileValueMul):
(JSC::DFG::SpeculativeJIT::speculateRealNumber):
(JSC::DFG::SpeculativeJIT::compileNormalizeMapKey):

  • dfg/DFGSpeculativeJIT.h:

(JSC::DFG::SpeculativeJIT::unboxDouble):

  • ftl/FTLLowerDFGToB3.cpp:

(JSC::FTL::DFG::LowerDFGToB3::compileBinaryMathIC):
(JSC::FTL::DFG::LowerDFGToB3::compileCompareStrictEq):

  • jit/AssemblyHelpers.cpp:

(JSC::AssemblyHelpers::emitConvertValueToBoolean):
(JSC::AssemblyHelpers::branchIfValue):

  • jit/AssemblyHelpers.h:

(JSC::AssemblyHelpers::unboxDoubleNonDestructive):
(JSC::AssemblyHelpers::unboxDouble):

  • jit/JITAddGenerator.cpp:

(JSC::JITAddGenerator::generateFastPath):

  • jit/JITAddGenerator.h:

(JSC::JITAddGenerator::JITAddGenerator):

  • jit/JITArithmetic.cpp:

(JSC::JIT::emitRightShiftFastPath):
(JSC::JIT::emitMathICFast):

  • jit/JITDivGenerator.cpp:

(JSC::JITDivGenerator::loadOperand):

  • jit/JITMulGenerator.cpp:

(JSC::JITMulGenerator::generateInline):
(JSC::JITMulGenerator::generateFastPath):

  • jit/JITMulGenerator.h:

(JSC::JITMulGenerator::JITMulGenerator):

  • jit/JITPropertyAccess.cpp:

(JSC::JIT::emitFloatTypedArrayPutByVal):

  • jit/JITPropertyAccess32_64.cpp:

(JSC::JIT::emitGenericContiguousPutByVal):

  • jit/JITRightShiftGenerator.cpp:

(JSC::JITRightShiftGenerator::generateFastPath):

  • jit/JITRightShiftGenerator.h:

(JSC::JITRightShiftGenerator::JITRightShiftGenerator):

  • jit/JITSubGenerator.cpp:

(JSC::JITSubGenerator::generateInline):
(JSC::JITSubGenerator::generateFastPath):

  • jit/JITSubGenerator.h:

(JSC::JITSubGenerator::JITSubGenerator):

File size: 11.0 KB
Line 
1/*
2 * Copyright (C) 2015-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "JITMulGenerator.h"
28
29#if ENABLE(JIT)
30
31#include "ArithProfile.h"
32#include "JITMathIC.h"
33
34namespace JSC {
35
36JITMathICInlineResult JITMulGenerator::generateInline(CCallHelpers& jit, MathICGenerationState& state, const BinaryArithProfile* arithProfile)
37{
38 // We default to speculating int32.
39 ObservedType lhs = ObservedType().withInt32();
40 ObservedType rhs = ObservedType().withInt32();
41 if (arithProfile) {
42 lhs = arithProfile->lhsObservedType();
43 rhs = arithProfile->rhsObservedType();
44 }
45
46 if (lhs.isOnlyNonNumber() && rhs.isOnlyNonNumber())
47 return JITMathICInlineResult::DontGenerate;
48
49 if (lhs.isOnlyNumber() && rhs.isOnlyNumber() && !m_leftOperand.isConst() && !m_rightOperand.isConst()) {
50 if (!jit.supportsFloatingPoint())
51 return JITMathICInlineResult::DontGenerate;
52
53 ASSERT(m_left);
54 ASSERT(m_right);
55 if (!m_leftOperand.definitelyIsNumber())
56 state.slowPathJumps.append(jit.branchIfNotNumber(m_left, m_scratchGPR));
57 if (!m_rightOperand.definitelyIsNumber())
58 state.slowPathJumps.append(jit.branchIfNotNumber(m_right, m_scratchGPR));
59 state.slowPathJumps.append(jit.branchIfInt32(m_left));
60 state.slowPathJumps.append(jit.branchIfInt32(m_right));
61 jit.unboxDoubleNonDestructive(m_left, m_leftFPR, m_scratchGPR);
62 jit.unboxDoubleNonDestructive(m_right, m_rightFPR, m_scratchGPR);
63 jit.mulDouble(m_rightFPR, m_leftFPR);
64 jit.boxDouble(m_leftFPR, m_result);
65
66 return JITMathICInlineResult::GeneratedFastPath;
67 }
68
69 if ((lhs.isOnlyInt32() || m_leftOperand.isPositiveConstInt32()) && (rhs.isOnlyInt32() || m_rightOperand.isPositiveConstInt32())) {
70 ASSERT(!m_leftOperand.isPositiveConstInt32() || !m_rightOperand.isPositiveConstInt32());
71 if (!m_leftOperand.isPositiveConstInt32())
72 state.slowPathJumps.append(jit.branchIfNotInt32(m_left));
73 if (!m_rightOperand.isPositiveConstInt32())
74 state.slowPathJumps.append(jit.branchIfNotInt32(m_right));
75
76 if (m_leftOperand.isPositiveConstInt32() || m_rightOperand.isPositiveConstInt32()) {
77 JSValueRegs var = m_leftOperand.isPositiveConstInt32() ? m_right : m_left;
78 int32_t constValue = m_leftOperand.isPositiveConstInt32() ? m_leftOperand.asConstInt32() : m_rightOperand.asConstInt32();
79 state.slowPathJumps.append(jit.branchMul32(CCallHelpers::Overflow, var.payloadGPR(), CCallHelpers::Imm32(constValue), m_scratchGPR));
80 } else {
81 state.slowPathJumps.append(jit.branchMul32(CCallHelpers::Overflow, m_right.payloadGPR(), m_left.payloadGPR(), m_scratchGPR));
82 state.slowPathJumps.append(jit.branchTest32(CCallHelpers::Zero, m_scratchGPR)); // Go slow if potential negative zero.
83 }
84 jit.boxInt32(m_scratchGPR, m_result);
85
86 return JITMathICInlineResult::GeneratedFastPath;
87 }
88
89 return JITMathICInlineResult::GenerateFullSnippet;
90}
91
92bool JITMulGenerator::generateFastPath(CCallHelpers& jit, CCallHelpers::JumpList& endJumpList, CCallHelpers::JumpList& slowPathJumpList, const BinaryArithProfile* arithProfile, bool shouldEmitProfiling)
93{
94 ASSERT(m_scratchGPR != InvalidGPRReg);
95 ASSERT(m_scratchGPR != m_left.payloadGPR());
96 ASSERT(m_scratchGPR != m_right.payloadGPR());
97#if USE(JSVALUE64)
98 ASSERT(m_scratchGPR != m_result.payloadGPR());
99#else
100 ASSERT(m_scratchGPR != m_left.tagGPR());
101 ASSERT(m_scratchGPR != m_right.tagGPR());
102#endif
103
104 ASSERT(!m_leftOperand.isPositiveConstInt32() || !m_rightOperand.isPositiveConstInt32());
105
106 if (!m_leftOperand.mightBeNumber() || !m_rightOperand.mightBeNumber())
107 return false;
108
109 if (m_leftOperand.isPositiveConstInt32() || m_rightOperand.isPositiveConstInt32()) {
110 JSValueRegs var = m_leftOperand.isPositiveConstInt32() ? m_right : m_left;
111 SnippetOperand& varOpr = m_leftOperand.isPositiveConstInt32() ? m_rightOperand : m_leftOperand;
112 SnippetOperand& constOpr = m_leftOperand.isPositiveConstInt32() ? m_leftOperand : m_rightOperand;
113
114 // Try to do intVar * intConstant.
115 CCallHelpers::Jump notInt32 = jit.branchIfNotInt32(var);
116
117 GPRReg multiplyResultGPR = m_result.payloadGPR();
118 if (multiplyResultGPR == var.payloadGPR())
119 multiplyResultGPR = m_scratchGPR;
120
121 slowPathJumpList.append(jit.branchMul32(CCallHelpers::Overflow, var.payloadGPR(), CCallHelpers::Imm32(constOpr.asConstInt32()), multiplyResultGPR));
122
123 jit.boxInt32(multiplyResultGPR, m_result);
124 endJumpList.append(jit.jump());
125
126 if (!jit.supportsFloatingPoint()) {
127 slowPathJumpList.append(notInt32);
128 return true;
129 }
130
131 // Try to do doubleVar * double(intConstant).
132 notInt32.link(&jit);
133 if (!varOpr.definitelyIsNumber())
134 slowPathJumpList.append(jit.branchIfNotNumber(var, m_scratchGPR));
135
136 jit.unboxDoubleNonDestructive(var, m_leftFPR, m_scratchGPR);
137
138 jit.move(CCallHelpers::Imm32(constOpr.asConstInt32()), m_scratchGPR);
139 jit.convertInt32ToDouble(m_scratchGPR, m_rightFPR);
140
141 // Fall thru to doubleVar * doubleVar.
142
143 } else {
144 ASSERT(!m_leftOperand.isPositiveConstInt32() && !m_rightOperand.isPositiveConstInt32());
145
146 CCallHelpers::Jump leftNotInt;
147 CCallHelpers::Jump rightNotInt;
148
149 // Try to do intVar * intVar.
150 leftNotInt = jit.branchIfNotInt32(m_left);
151 rightNotInt = jit.branchIfNotInt32(m_right);
152
153 slowPathJumpList.append(jit.branchMul32(CCallHelpers::Overflow, m_right.payloadGPR(), m_left.payloadGPR(), m_scratchGPR));
154 slowPathJumpList.append(jit.branchTest32(CCallHelpers::Zero, m_scratchGPR)); // Go slow if potential negative zero.
155
156 jit.boxInt32(m_scratchGPR, m_result);
157 endJumpList.append(jit.jump());
158
159 if (!jit.supportsFloatingPoint()) {
160 slowPathJumpList.append(leftNotInt);
161 slowPathJumpList.append(rightNotInt);
162 return true;
163 }
164
165 leftNotInt.link(&jit);
166 if (!m_leftOperand.definitelyIsNumber())
167 slowPathJumpList.append(jit.branchIfNotNumber(m_left, m_scratchGPR));
168 if (!m_rightOperand.definitelyIsNumber())
169 slowPathJumpList.append(jit.branchIfNotNumber(m_right, m_scratchGPR));
170
171 jit.unboxDoubleNonDestructive(m_left, m_leftFPR, m_scratchGPR);
172 CCallHelpers::Jump rightIsDouble = jit.branchIfNotInt32(m_right);
173
174 jit.convertInt32ToDouble(m_right.payloadGPR(), m_rightFPR);
175 CCallHelpers::Jump rightWasInteger = jit.jump();
176
177 rightNotInt.link(&jit);
178 if (!m_rightOperand.definitelyIsNumber())
179 slowPathJumpList.append(jit.branchIfNotNumber(m_right, m_scratchGPR));
180
181 jit.convertInt32ToDouble(m_left.payloadGPR(), m_leftFPR);
182
183 rightIsDouble.link(&jit);
184 jit.unboxDoubleNonDestructive(m_right, m_rightFPR, m_scratchGPR);
185
186 rightWasInteger.link(&jit);
187
188 // Fall thru to doubleVar * doubleVar.
189 }
190
191 // Do doubleVar * doubleVar.
192 jit.mulDouble(m_rightFPR, m_leftFPR);
193
194 if (!arithProfile || !shouldEmitProfiling)
195 jit.boxDouble(m_leftFPR, m_result);
196 else {
197 // The Int52 overflow check below intentionally omits 1ll << 51 as a valid negative Int52 value.
198 // Therefore, we will get a false positive if the result is that value. This is intentionally
199 // done to simplify the checking algorithm.
200
201 const int64_t negativeZeroBits = 1ll << 63;
202#if USE(JSVALUE64)
203 jit.moveDoubleTo64(m_leftFPR, m_result.payloadGPR());
204
205 CCallHelpers::Jump notNegativeZero = jit.branch64(CCallHelpers::NotEqual, m_result.payloadGPR(), CCallHelpers::TrustedImm64(negativeZeroBits));
206
207 arithProfile->emitUnconditionalSet(jit, ObservedResults::NegZeroDouble);
208 CCallHelpers::Jump done = jit.jump();
209
210 notNegativeZero.link(&jit);
211 arithProfile->emitUnconditionalSet(jit, ObservedResults::NonNegZeroDouble);
212
213 jit.move(m_result.payloadGPR(), m_scratchGPR);
214 jit.urshiftPtr(CCallHelpers::Imm32(52), m_scratchGPR);
215 jit.and32(CCallHelpers::Imm32(0x7ff), m_scratchGPR);
216 CCallHelpers::Jump noInt52Overflow = jit.branch32(CCallHelpers::LessThanOrEqual, m_scratchGPR, CCallHelpers::TrustedImm32(0x431));
217
218 arithProfile->emitUnconditionalSet(jit, ObservedResults::Int52Overflow);
219 noInt52Overflow.link(&jit);
220
221 done.link(&jit);
222 jit.sub64(GPRInfo::numberTagRegister, m_result.payloadGPR()); // Box the double.
223#else
224 jit.boxDouble(m_leftFPR, m_result);
225 CCallHelpers::JumpList notNegativeZero;
226 notNegativeZero.append(jit.branch32(CCallHelpers::NotEqual, m_result.payloadGPR(), CCallHelpers::TrustedImm32(0)));
227 notNegativeZero.append(jit.branch32(CCallHelpers::NotEqual, m_result.tagGPR(), CCallHelpers::TrustedImm32(negativeZeroBits >> 32)));
228
229 arithProfile->emitUnconditionalSet(jit, ObservedResults::NegZeroDouble);
230 CCallHelpers::Jump done = jit.jump();
231
232 notNegativeZero.link(&jit);
233 arithProfile->emitUnconditionalSet(jit, ObservedResults::NonNegZeroDouble);
234
235 jit.move(m_result.tagGPR(), m_scratchGPR);
236 jit.urshiftPtr(CCallHelpers::Imm32(52 - 32), m_scratchGPR);
237 jit.and32(CCallHelpers::Imm32(0x7ff), m_scratchGPR);
238 CCallHelpers::Jump noInt52Overflow = jit.branch32(CCallHelpers::LessThanOrEqual, m_scratchGPR, CCallHelpers::TrustedImm32(0x431));
239
240 arithProfile->emitUnconditionalSet(jit, ObservedResults::Int52Overflow);
241
242 endJumpList.append(noInt52Overflow);
243 if (m_scratchGPR == m_result.tagGPR() || m_scratchGPR == m_result.payloadGPR())
244 jit.boxDouble(m_leftFPR, m_result);
245
246 endJumpList.append(done);
247#endif
248 }
249
250 return true;
251}
252
253} // namespace JSC
254
255#endif // ENABLE(JIT)
Note: See TracBrowser for help on using the repository browser.