1 | /*
|
---|
2 | * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
|
---|
3 | *
|
---|
4 | * Redistribution and use in source and binary forms, with or without
|
---|
5 | * modification, are permitted provided that the following conditions
|
---|
6 | * are met:
|
---|
7 | * 1. Redistributions of source code must retain the above copyright
|
---|
8 | * notice, this list of conditions and the following disclaimer.
|
---|
9 | * 2. Redistributions in binary form must reproduce the above copyright
|
---|
10 | * notice, this list of conditions and the following disclaimer in the
|
---|
11 | * documentation and/or other materials provided with the distribution.
|
---|
12 | *
|
---|
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
|
---|
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
---|
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
---|
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
|
---|
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
---|
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
---|
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
---|
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
---|
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
---|
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
---|
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
---|
24 | */
|
---|
25 |
|
---|
26 | #include "config.h"
|
---|
27 | #include "JITInlineCacheGenerator.h"
|
---|
28 |
|
---|
29 | #if ENABLE(JIT)
|
---|
30 |
|
---|
31 | #include "BaselineJITRegisters.h"
|
---|
32 | #include "CCallHelpers.h"
|
---|
33 | #include "CacheableIdentifierInlines.h"
|
---|
34 | #include "CodeBlock.h"
|
---|
35 | #include "DFGJITCompiler.h"
|
---|
36 | #include "InlineAccess.h"
|
---|
37 | #include "JITInlines.h"
|
---|
38 | #include "LinkBuffer.h"
|
---|
39 | #include "StructureStubInfo.h"
|
---|
40 |
|
---|
41 | namespace JSC {
|
---|
42 |
|
---|
43 | JITInlineCacheGenerator::JITInlineCacheGenerator(
|
---|
44 | CodeBlock*, CompileTimeStructureStubInfo stubInfo, JITType, CodeOrigin, AccessType)
|
---|
45 | {
|
---|
46 | std::visit(WTF::makeVisitor(
|
---|
47 | [&](StructureStubInfo* stubInfo) {
|
---|
48 | m_stubInfo = stubInfo;
|
---|
49 | },
|
---|
50 | [&](BaselineUnlinkedStructureStubInfo* stubInfo) {
|
---|
51 | m_unlinkedStubInfo = stubInfo;
|
---|
52 | }
|
---|
53 | #if ENABLE(DFG_JIT)
|
---|
54 | ,
|
---|
55 | [&](DFG::UnlinkedStructureStubInfo* stubInfo) {
|
---|
56 | m_unlinkedStubInfo = stubInfo;
|
---|
57 | }
|
---|
58 | #endif
|
---|
59 | ), stubInfo);
|
---|
60 | }
|
---|
61 |
|
---|
62 | void JITInlineCacheGenerator::finalize(
|
---|
63 | LinkBuffer& fastPath, LinkBuffer& slowPath, CodeLocationLabel<JITStubRoutinePtrTag> start)
|
---|
64 | {
|
---|
65 | ASSERT(m_stubInfo);
|
---|
66 | m_stubInfo->startLocation = start;
|
---|
67 | m_stubInfo->doneLocation = fastPath.locationOf<JSInternalPtrTag>(m_done);
|
---|
68 |
|
---|
69 | if (!m_stubInfo->useDataIC)
|
---|
70 | m_stubInfo->m_slowPathCallLocation = slowPath.locationOf<JSInternalPtrTag>(m_slowPathCall);
|
---|
71 | m_stubInfo->slowPathStartLocation = slowPath.locationOf<JITStubRoutinePtrTag>(m_slowPathBegin);
|
---|
72 | }
|
---|
73 |
|
---|
74 | #if ENABLE(DFG_JIT)
|
---|
75 | void JITInlineCacheGenerator::generateDFGDataICFastPath(DFG::JITCompiler& jit, unsigned stubInfoConstant, GPRReg stubInfoGPR)
|
---|
76 | {
|
---|
77 | m_start = jit.label();
|
---|
78 | jit.loadConstant(stubInfoConstant, stubInfoGPR);
|
---|
79 | jit.farJump(CCallHelpers::Address(stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
80 | m_done = jit.label();
|
---|
81 | }
|
---|
82 | #endif
|
---|
83 |
|
---|
84 | void JITInlineCacheGenerator::generateBaselineDataICFastPath(JIT& jit, unsigned stubInfo, GPRReg stubInfoGPR)
|
---|
85 | {
|
---|
86 | m_start = jit.label();
|
---|
87 | jit.loadConstant(stubInfo, stubInfoGPR);
|
---|
88 | jit.farJump(CCallHelpers::Address(stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
89 | m_done = jit.label();
|
---|
90 | }
|
---|
91 |
|
---|
92 | JITByIdGenerator::JITByIdGenerator(
|
---|
93 | CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, AccessType accessType,
|
---|
94 | JSValueRegs base, JSValueRegs value)
|
---|
95 | : JITInlineCacheGenerator(codeBlock, stubInfo, jitType, codeOrigin, accessType)
|
---|
96 | , m_base(base)
|
---|
97 | , m_value(value)
|
---|
98 | {
|
---|
99 | }
|
---|
100 |
|
---|
101 | void JITByIdGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath)
|
---|
102 | {
|
---|
103 | ASSERT(m_stubInfo);
|
---|
104 | JITInlineCacheGenerator::finalize(fastPath, slowPath, fastPath.locationOf<JITStubRoutinePtrTag>(m_start));
|
---|
105 | if (m_stubInfo->useDataIC)
|
---|
106 | m_stubInfo->m_codePtr = m_stubInfo->slowPathStartLocation;
|
---|
107 | }
|
---|
108 |
|
---|
109 | void JITByIdGenerator::generateFastCommon(CCallHelpers& jit, size_t inlineICSize)
|
---|
110 | {
|
---|
111 | m_start = jit.label();
|
---|
112 | if (m_stubInfo->useDataIC) {
|
---|
113 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
114 | jit.farJump(CCallHelpers::Address(m_stubInfo->m_stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
115 | } else {
|
---|
116 | size_t startSize = jit.m_assembler.buffer().codeSize();
|
---|
117 | m_slowPathJump = jit.jump();
|
---|
118 | size_t jumpSize = jit.m_assembler.buffer().codeSize() - startSize;
|
---|
119 | size_t nopsToEmitInBytes = inlineICSize - jumpSize;
|
---|
120 | jit.emitNops(nopsToEmitInBytes);
|
---|
121 | ASSERT(jit.m_assembler.buffer().codeSize() - startSize == inlineICSize);
|
---|
122 | }
|
---|
123 | m_done = jit.label();
|
---|
124 | }
|
---|
125 |
|
---|
126 | JITGetByIdGenerator::JITGetByIdGenerator(
|
---|
127 | CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSite, const RegisterSet& usedRegisters,
|
---|
128 | CacheableIdentifier propertyName, JSValueRegs base, JSValueRegs value, GPRReg stubInfoGPR, AccessType accessType)
|
---|
129 | : JITByIdGenerator(codeBlock, stubInfo, jitType, codeOrigin, accessType, base, value)
|
---|
130 | , m_isLengthAccess(codeBlock && propertyName.uid() == codeBlock->vm().propertyNames->length.impl())
|
---|
131 | {
|
---|
132 | RELEASE_ASSERT(base.payloadGPR() != value.tagGPR());
|
---|
133 | std::visit([&](auto* stubInfo) {
|
---|
134 | setUpStubInfo(*stubInfo, accessType, codeOrigin, callSite, usedRegisters, base, value, stubInfoGPR);
|
---|
135 | }, stubInfo);
|
---|
136 | }
|
---|
137 |
|
---|
138 | static void generateGetByIdInlineAccess(CCallHelpers& jit, GPRReg stubInfoGPR, JSValueRegs baseJSR, GPRReg scratchGPR, JSValueRegs resultJSR)
|
---|
139 | {
|
---|
140 | jit.load32(CCallHelpers::Address(baseJSR.payloadGPR(), JSCell::structureIDOffset()), scratchGPR);
|
---|
141 | auto doInlineAccess = jit.branch32(CCallHelpers::Equal, scratchGPR, CCallHelpers::Address(stubInfoGPR, StructureStubInfo::offsetOfInlineAccessBaseStructureID()));
|
---|
142 | jit.farJump(CCallHelpers::Address(stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
143 | doInlineAccess.link(&jit);
|
---|
144 | jit.load32(CCallHelpers::Address(stubInfoGPR, StructureStubInfo::offsetOfByIdSelfOffset()), scratchGPR);
|
---|
145 | jit.loadProperty(baseJSR.payloadGPR(), scratchGPR, resultJSR);
|
---|
146 | }
|
---|
147 |
|
---|
148 | void JITGetByIdGenerator::generateFastPath(CCallHelpers& jit, GPRReg scratchGPR)
|
---|
149 | {
|
---|
150 | ASSERT(m_stubInfo);
|
---|
151 | if (!m_stubInfo->useDataIC) {
|
---|
152 | generateFastCommon(jit, m_isLengthAccess ? InlineAccess::sizeForLengthAccess() : InlineAccess::sizeForPropertyAccess());
|
---|
153 | return;
|
---|
154 | }
|
---|
155 |
|
---|
156 | ASSERT(scratchGPR != InvalidGPRReg);
|
---|
157 | m_start = jit.label();
|
---|
158 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
159 | generateGetByIdInlineAccess(jit, m_stubInfo->m_stubInfoGPR, m_base, scratchGPR, m_value);
|
---|
160 | m_done = jit.label();
|
---|
161 | }
|
---|
162 |
|
---|
163 | void JITGetByIdGenerator::generateBaselineDataICFastPath(JIT& jit, unsigned stubInfo, GPRReg stubInfoGPR)
|
---|
164 | {
|
---|
165 | m_start = jit.label();
|
---|
166 |
|
---|
167 | using BaselineJITRegisters::GetById::baseJSR;
|
---|
168 | using BaselineJITRegisters::GetById::resultJSR;
|
---|
169 | using BaselineJITRegisters::GetById::FastPath::scratchGPR;
|
---|
170 |
|
---|
171 | jit.loadConstant(stubInfo, stubInfoGPR);
|
---|
172 | generateGetByIdInlineAccess(jit, stubInfoGPR, baseJSR, scratchGPR, resultJSR);
|
---|
173 |
|
---|
174 | m_done = jit.label();
|
---|
175 | }
|
---|
176 |
|
---|
177 | #if ENABLE(DFG_JIT)
|
---|
178 | void JITGetByIdGenerator::generateDFGDataICFastPath(DFG::JITCompiler& jit, unsigned stubInfoConstant, JSValueRegs baseJSR, JSValueRegs resultJSR, GPRReg stubInfoGPR, GPRReg scratchGPR)
|
---|
179 | {
|
---|
180 | m_start = jit.label();
|
---|
181 | jit.loadConstant(stubInfoConstant, stubInfoGPR);
|
---|
182 | generateGetByIdInlineAccess(jit, stubInfoGPR, baseJSR, scratchGPR, resultJSR);
|
---|
183 | m_done = jit.label();
|
---|
184 | }
|
---|
185 | #endif
|
---|
186 |
|
---|
187 | JITGetByIdWithThisGenerator::JITGetByIdWithThisGenerator(
|
---|
188 | CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSite, const RegisterSet& usedRegisters,
|
---|
189 | CacheableIdentifier, JSValueRegs value, JSValueRegs base, JSValueRegs thisRegs, GPRReg stubInfoGPR)
|
---|
190 | : JITByIdGenerator(codeBlock, stubInfo, jitType, codeOrigin, AccessType::GetByIdWithThis, base, value)
|
---|
191 | {
|
---|
192 | RELEASE_ASSERT(thisRegs.payloadGPR() != thisRegs.tagGPR());
|
---|
193 | std::visit([&](auto* stubInfo) {
|
---|
194 | setUpStubInfo(*stubInfo, AccessType::GetByIdWithThis, codeOrigin, callSite, usedRegisters, value, base, thisRegs, stubInfoGPR);
|
---|
195 | }, stubInfo);
|
---|
196 | }
|
---|
197 |
|
---|
198 | void JITGetByIdWithThisGenerator::generateFastPath(CCallHelpers& jit, GPRReg scratchGPR)
|
---|
199 | {
|
---|
200 | ASSERT(m_stubInfo);
|
---|
201 | if (!m_stubInfo->useDataIC) {
|
---|
202 | generateFastCommon(jit, InlineAccess::sizeForPropertyAccess());
|
---|
203 | return;
|
---|
204 | }
|
---|
205 |
|
---|
206 | ASSERT(scratchGPR != InvalidGPRReg);
|
---|
207 | m_start = jit.label();
|
---|
208 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
209 | generateGetByIdInlineAccess(jit, m_stubInfo->m_stubInfoGPR, m_base, scratchGPR, m_value);
|
---|
210 | m_done = jit.label();
|
---|
211 | }
|
---|
212 |
|
---|
213 | void JITGetByIdWithThisGenerator::generateBaselineDataICFastPath(JIT& jit, unsigned stubInfo, GPRReg stubInfoGPR)
|
---|
214 | {
|
---|
215 | m_start = jit.label();
|
---|
216 |
|
---|
217 | using BaselineJITRegisters::GetByIdWithThis::baseJSR;
|
---|
218 | using BaselineJITRegisters::GetByIdWithThis::resultJSR;
|
---|
219 | using BaselineJITRegisters::GetByIdWithThis::FastPath::scratchGPR;
|
---|
220 |
|
---|
221 | jit.loadConstant(stubInfo, stubInfoGPR);
|
---|
222 | generateGetByIdInlineAccess(jit, stubInfoGPR, baseJSR, scratchGPR, resultJSR);
|
---|
223 |
|
---|
224 | m_done = jit.label();
|
---|
225 | }
|
---|
226 |
|
---|
227 | #if ENABLE(DFG_JIT)
|
---|
228 | void JITGetByIdWithThisGenerator::generateDFGDataICFastPath(DFG::JITCompiler& jit, unsigned stubInfoConstant, JSValueRegs baseJSR, JSValueRegs resultJSR, GPRReg stubInfoGPR, GPRReg scratchGPR)
|
---|
229 | {
|
---|
230 | m_start = jit.label();
|
---|
231 | jit.loadConstant(stubInfoConstant, stubInfoGPR);
|
---|
232 | generateGetByIdInlineAccess(jit, stubInfoGPR, baseJSR, scratchGPR, resultJSR);
|
---|
233 | m_done = jit.label();
|
---|
234 | }
|
---|
235 | #endif
|
---|
236 |
|
---|
237 | JITPutByIdGenerator::JITPutByIdGenerator(
|
---|
238 | CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSite, const RegisterSet& usedRegisters, CacheableIdentifier,
|
---|
239 | JSValueRegs base, JSValueRegs value, GPRReg stubInfoGPR, GPRReg scratch,
|
---|
240 | ECMAMode ecmaMode, PutKind putKind)
|
---|
241 | : JITByIdGenerator(codeBlock, stubInfo, jitType, codeOrigin, AccessType::PutById, base, value)
|
---|
242 | , m_ecmaMode(ecmaMode)
|
---|
243 | , m_putKind(putKind)
|
---|
244 | {
|
---|
245 | std::visit([&](auto* stubInfo) {
|
---|
246 | setUpStubInfo(*stubInfo, AccessType::PutById, codeOrigin, callSite, usedRegisters, base, value, stubInfoGPR, scratch, ecmaMode, putKind);
|
---|
247 | }, stubInfo);
|
---|
248 | }
|
---|
249 |
|
---|
250 | static void generatePutByIdInlineAccess(CCallHelpers& jit, GPRReg stubInfoGPR, JSValueRegs baseJSR, JSValueRegs valueJSR, GPRReg scratchGPR, GPRReg scratch2GPR)
|
---|
251 | {
|
---|
252 | jit.load32(CCallHelpers::Address(baseJSR.payloadGPR(), JSCell::structureIDOffset()), scratchGPR);
|
---|
253 | auto doInlineAccess = jit.branch32(CCallHelpers::Equal, scratchGPR, CCallHelpers::Address(stubInfoGPR, StructureStubInfo::offsetOfInlineAccessBaseStructureID()));
|
---|
254 | jit.farJump(CCallHelpers::Address(stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
255 | doInlineAccess.link(&jit);
|
---|
256 | jit.load32(CCallHelpers::Address(stubInfoGPR, StructureStubInfo::offsetOfByIdSelfOffset()), scratchGPR);
|
---|
257 | jit.storeProperty(valueJSR, baseJSR.payloadGPR(), scratchGPR, scratch2GPR);
|
---|
258 | }
|
---|
259 |
|
---|
260 | void JITPutByIdGenerator::generateBaselineDataICFastPath(JIT& jit, unsigned stubInfo, GPRReg stubInfoGPR)
|
---|
261 | {
|
---|
262 | m_start = jit.label();
|
---|
263 |
|
---|
264 | jit.loadConstant(stubInfo, stubInfoGPR);
|
---|
265 |
|
---|
266 | using BaselineJITRegisters::PutById::baseJSR;
|
---|
267 | using BaselineJITRegisters::PutById::valueJSR;
|
---|
268 | using BaselineJITRegisters::PutById::FastPath::scratchGPR;
|
---|
269 | using BaselineJITRegisters::PutById::FastPath::scratch2GPR;
|
---|
270 |
|
---|
271 | generatePutByIdInlineAccess(jit, stubInfoGPR, baseJSR, valueJSR, scratchGPR, scratch2GPR);
|
---|
272 | m_done = jit.label();
|
---|
273 | }
|
---|
274 |
|
---|
275 | #if ENABLE(DFG_JIT)
|
---|
276 | void JITPutByIdGenerator::generateDFGDataICFastPath(DFG::JITCompiler& jit, unsigned stubInfoConstant, JSValueRegs baseJSR, JSValueRegs valueJSR, GPRReg stubInfoGPR, GPRReg scratchGPR, GPRReg scratch2GPR)
|
---|
277 | {
|
---|
278 | m_start = jit.label();
|
---|
279 | jit.loadConstant(stubInfoConstant, stubInfoGPR);
|
---|
280 | generatePutByIdInlineAccess(jit, stubInfoGPR, baseJSR, valueJSR, scratchGPR, scratch2GPR);
|
---|
281 | m_done = jit.label();
|
---|
282 | }
|
---|
283 | #endif
|
---|
284 |
|
---|
285 |
|
---|
286 | void JITPutByIdGenerator::generateFastPath(CCallHelpers& jit, GPRReg scratchGPR, GPRReg scratch2GPR)
|
---|
287 | {
|
---|
288 | ASSERT(m_stubInfo);
|
---|
289 | if (!m_stubInfo->useDataIC) {
|
---|
290 | generateFastCommon(jit, InlineAccess::sizeForPropertyReplace());
|
---|
291 | return;
|
---|
292 | }
|
---|
293 |
|
---|
294 | ASSERT(scratchGPR != InvalidGPRReg);
|
---|
295 | m_start = jit.label();
|
---|
296 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
297 | generatePutByIdInlineAccess(jit, m_stubInfo->m_stubInfoGPR, m_base, m_value, scratchGPR, scratch2GPR);
|
---|
298 | m_done = jit.label();
|
---|
299 | }
|
---|
300 |
|
---|
301 | V_JITOperation_GSsiJJC JITPutByIdGenerator::slowPathFunction()
|
---|
302 | {
|
---|
303 | switch (m_putKind) {
|
---|
304 | case PutKind::NotDirect:
|
---|
305 | if (m_ecmaMode.isStrict())
|
---|
306 | return operationPutByIdStrictOptimize;
|
---|
307 | return operationPutByIdNonStrictOptimize;
|
---|
308 | case PutKind::Direct:
|
---|
309 | if (m_ecmaMode.isStrict())
|
---|
310 | return operationPutByIdDirectStrictOptimize;
|
---|
311 | return operationPutByIdDirectNonStrictOptimize;
|
---|
312 | case PutKind::DirectPrivateFieldDefine:
|
---|
313 | ASSERT(m_ecmaMode.isStrict());
|
---|
314 | return operationPutByIdDefinePrivateFieldStrictOptimize;
|
---|
315 | case PutKind::DirectPrivateFieldSet:
|
---|
316 | ASSERT(m_ecmaMode.isStrict());
|
---|
317 | return operationPutByIdSetPrivateFieldStrictOptimize;
|
---|
318 | }
|
---|
319 | // Make win port compiler happy
|
---|
320 | RELEASE_ASSERT_NOT_REACHED();
|
---|
321 | return nullptr;
|
---|
322 | }
|
---|
323 |
|
---|
324 | JITDelByValGenerator::JITDelByValGenerator(CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs property, JSValueRegs result, GPRReg stubInfoGPR)
|
---|
325 | : Base(codeBlock, stubInfo, jitType, codeOrigin, AccessType::DeleteByVal)
|
---|
326 | {
|
---|
327 | ASSERT(base.payloadGPR() != result.payloadGPR());
|
---|
328 | #if USE(JSVALUE32_64)
|
---|
329 | ASSERT(base.tagGPR() != result.tagGPR());
|
---|
330 | #endif
|
---|
331 | std::visit([&](auto* stubInfo) {
|
---|
332 | setUpStubInfo(*stubInfo, AccessType::DeleteByVal, codeOrigin, callSiteIndex, usedRegisters, base, property, result, stubInfoGPR);
|
---|
333 | }, stubInfo);
|
---|
334 | }
|
---|
335 |
|
---|
336 | void JITDelByValGenerator::generateFastPath(CCallHelpers& jit)
|
---|
337 | {
|
---|
338 | ASSERT(m_stubInfo);
|
---|
339 | m_start = jit.label();
|
---|
340 | if (m_stubInfo->useDataIC) {
|
---|
341 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
342 | jit.farJump(CCallHelpers::Address(m_stubInfo->m_stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
343 | } else
|
---|
344 | m_slowPathJump = jit.patchableJump();
|
---|
345 | m_done = jit.label();
|
---|
346 | }
|
---|
347 |
|
---|
348 | void JITDelByValGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath)
|
---|
349 | {
|
---|
350 | ASSERT(m_stubInfo);
|
---|
351 | Base::finalize(fastPath, slowPath, fastPath.locationOf<JITStubRoutinePtrTag>(m_start));
|
---|
352 | if (m_stubInfo->useDataIC)
|
---|
353 | m_stubInfo->m_codePtr = m_stubInfo->slowPathStartLocation;
|
---|
354 | }
|
---|
355 |
|
---|
356 | JITDelByIdGenerator::JITDelByIdGenerator(CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, const RegisterSet& usedRegisters, CacheableIdentifier, JSValueRegs base, JSValueRegs result, GPRReg stubInfoGPR)
|
---|
357 | : Base(codeBlock, stubInfo, jitType, codeOrigin, AccessType::DeleteByID)
|
---|
358 | {
|
---|
359 | ASSERT(base.payloadGPR() != result.payloadGPR());
|
---|
360 | #if USE(JSVALUE32_64)
|
---|
361 | ASSERT(base.tagGPR() != result.tagGPR());
|
---|
362 | #endif
|
---|
363 | std::visit([&](auto* stubInfo) {
|
---|
364 | setUpStubInfo(*stubInfo, AccessType::DeleteByID, codeOrigin, callSiteIndex, usedRegisters, base, result, stubInfoGPR);
|
---|
365 | }, stubInfo);
|
---|
366 | }
|
---|
367 |
|
---|
368 | void JITDelByIdGenerator::generateFastPath(CCallHelpers& jit)
|
---|
369 | {
|
---|
370 | ASSERT(m_stubInfo);
|
---|
371 | m_start = jit.label();
|
---|
372 | if (m_stubInfo->useDataIC) {
|
---|
373 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
374 | jit.farJump(CCallHelpers::Address(m_stubInfo->m_stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
375 | } else
|
---|
376 | m_slowPathJump = jit.patchableJump();
|
---|
377 | m_done = jit.label();
|
---|
378 | }
|
---|
379 |
|
---|
380 | void JITDelByIdGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath)
|
---|
381 | {
|
---|
382 | ASSERT(m_stubInfo);
|
---|
383 | Base::finalize(fastPath, slowPath, fastPath.locationOf<JITStubRoutinePtrTag>(m_start));
|
---|
384 | if (m_stubInfo->useDataIC)
|
---|
385 | m_stubInfo->m_codePtr = m_stubInfo->slowPathStartLocation;
|
---|
386 | }
|
---|
387 |
|
---|
388 | JITInByValGenerator::JITInByValGenerator(CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, AccessType accessType, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs property, JSValueRegs result, GPRReg stubInfoGPR)
|
---|
389 | : Base(codeBlock, stubInfo, jitType, codeOrigin, accessType)
|
---|
390 | {
|
---|
391 | std::visit([&](auto* stubInfo) {
|
---|
392 | setUpStubInfo(*stubInfo, accessType, codeOrigin, callSiteIndex, usedRegisters, base, property, result, stubInfoGPR);
|
---|
393 | }, stubInfo);
|
---|
394 | }
|
---|
395 |
|
---|
396 | void JITInByValGenerator::generateFastPath(CCallHelpers& jit)
|
---|
397 | {
|
---|
398 | ASSERT(m_stubInfo);
|
---|
399 | m_start = jit.label();
|
---|
400 | if (m_stubInfo->useDataIC) {
|
---|
401 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
402 | jit.farJump(CCallHelpers::Address(m_stubInfo->m_stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
403 | } else
|
---|
404 | m_slowPathJump = jit.patchableJump();
|
---|
405 | m_done = jit.label();
|
---|
406 | }
|
---|
407 |
|
---|
408 | void JITInByValGenerator::finalize(
|
---|
409 | LinkBuffer& fastPath, LinkBuffer& slowPath)
|
---|
410 | {
|
---|
411 | ASSERT(m_start.isSet());
|
---|
412 | ASSERT(m_stubInfo);
|
---|
413 | Base::finalize(fastPath, slowPath, fastPath.locationOf<JITStubRoutinePtrTag>(m_start));
|
---|
414 | if (m_stubInfo->useDataIC)
|
---|
415 | m_stubInfo->m_codePtr = m_stubInfo->slowPathStartLocation;
|
---|
416 | }
|
---|
417 |
|
---|
418 | JITInByIdGenerator::JITInByIdGenerator(
|
---|
419 | CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSite, const RegisterSet& usedRegisters,
|
---|
420 | CacheableIdentifier propertyName, JSValueRegs base, JSValueRegs value, GPRReg stubInfoGPR)
|
---|
421 | : JITByIdGenerator(codeBlock, stubInfo, jitType, codeOrigin, AccessType::InById, base, value)
|
---|
422 | {
|
---|
423 | // FIXME: We are not supporting fast path for "length" property.
|
---|
424 | UNUSED_PARAM(propertyName);
|
---|
425 | RELEASE_ASSERT(base.payloadGPR() != value.tagGPR());
|
---|
426 | std::visit([&](auto* stubInfo) {
|
---|
427 | setUpStubInfo(*stubInfo, AccessType::InById, codeOrigin, callSite, usedRegisters, base, value, stubInfoGPR);
|
---|
428 | }, stubInfo);
|
---|
429 | }
|
---|
430 |
|
---|
431 | static void generateInByIdInlineAccess(CCallHelpers& jit, GPRReg stubInfoGPR, JSValueRegs baseJSR, GPRReg scratchGPR, JSValueRegs resultJSR)
|
---|
432 | {
|
---|
433 | jit.load32(CCallHelpers::Address(baseJSR.payloadGPR(), JSCell::structureIDOffset()), scratchGPR);
|
---|
434 | auto skipInlineAccess = jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::Address(stubInfoGPR, StructureStubInfo::offsetOfInlineAccessBaseStructureID()));
|
---|
435 | jit.boxBoolean(true, resultJSR);
|
---|
436 | auto finished = jit.jump();
|
---|
437 |
|
---|
438 | skipInlineAccess.link(&jit);
|
---|
439 | jit.farJump(CCallHelpers::Address(stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
440 |
|
---|
441 | finished.link(&jit);
|
---|
442 | }
|
---|
443 |
|
---|
444 | void JITInByIdGenerator::generateFastPath(CCallHelpers& jit, GPRReg scratchGPR)
|
---|
445 | {
|
---|
446 | ASSERT(m_stubInfo);
|
---|
447 | if (!m_stubInfo->useDataIC) {
|
---|
448 | generateFastCommon(jit, InlineAccess::sizeForPropertyAccess());
|
---|
449 | return;
|
---|
450 | }
|
---|
451 |
|
---|
452 | ASSERT(scratchGPR != InvalidGPRReg);
|
---|
453 | m_start = jit.label();
|
---|
454 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
455 | generateInByIdInlineAccess(jit, m_stubInfo->m_stubInfoGPR, m_base, scratchGPR, m_value);
|
---|
456 | m_done = jit.label();
|
---|
457 | }
|
---|
458 |
|
---|
459 | void JITInByIdGenerator::generateBaselineDataICFastPath(JIT& jit, unsigned stubInfo, GPRReg stubInfoGPR)
|
---|
460 | {
|
---|
461 | m_start = jit.label();
|
---|
462 |
|
---|
463 | jit.loadConstant(stubInfo, stubInfoGPR);
|
---|
464 |
|
---|
465 | using BaselineJITRegisters::InById::baseJSR;
|
---|
466 | using BaselineJITRegisters::InById::resultJSR;
|
---|
467 | using BaselineJITRegisters::InById::scratchGPR;
|
---|
468 |
|
---|
469 | generateInByIdInlineAccess(jit, stubInfoGPR, baseJSR, scratchGPR, resultJSR);
|
---|
470 |
|
---|
471 | m_done = jit.label();
|
---|
472 | }
|
---|
473 |
|
---|
474 | #if ENABLE(DFG_JIT)
|
---|
475 | void JITInByIdGenerator::generateDFGDataICFastPath(DFG::JITCompiler& jit, unsigned stubInfoConstant, JSValueRegs baseJSR, JSValueRegs resultJSR, GPRReg stubInfoGPR, GPRReg scratchGPR)
|
---|
476 | {
|
---|
477 | m_start = jit.label();
|
---|
478 | jit.loadConstant(stubInfoConstant, stubInfoGPR);
|
---|
479 | generateInByIdInlineAccess(jit, stubInfoGPR, baseJSR, scratchGPR, resultJSR);
|
---|
480 | m_done = jit.label();
|
---|
481 | }
|
---|
482 | #endif
|
---|
483 |
|
---|
484 | JITInstanceOfGenerator::JITInstanceOfGenerator(
|
---|
485 | CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex,
|
---|
486 | const RegisterSet& usedRegisters, GPRReg result, GPRReg value, GPRReg prototype, GPRReg stubInfoGPR,
|
---|
487 | bool prototypeIsKnownObject)
|
---|
488 | : JITInlineCacheGenerator(codeBlock, stubInfo, jitType, codeOrigin, AccessType::InstanceOf)
|
---|
489 | {
|
---|
490 | std::visit([&](auto* stubInfo) {
|
---|
491 | setUpStubInfo(*stubInfo, AccessType::InstanceOf, codeOrigin, callSiteIndex, usedRegisters, result, value, prototype, stubInfoGPR, prototypeIsKnownObject);
|
---|
492 | }, stubInfo);
|
---|
493 | }
|
---|
494 |
|
---|
495 | void JITInstanceOfGenerator::generateFastPath(CCallHelpers& jit)
|
---|
496 | {
|
---|
497 | ASSERT(m_stubInfo);
|
---|
498 | m_start = jit.label();
|
---|
499 | if (m_stubInfo->useDataIC) {
|
---|
500 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
501 | jit.farJump(CCallHelpers::Address(m_stubInfo->m_stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
502 | } else
|
---|
503 | m_slowPathJump = jit.patchableJump();
|
---|
504 | m_done = jit.label();
|
---|
505 | }
|
---|
506 |
|
---|
507 | void JITInstanceOfGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath)
|
---|
508 | {
|
---|
509 | ASSERT(m_stubInfo);
|
---|
510 | Base::finalize(fastPath, slowPath, fastPath.locationOf<JITStubRoutinePtrTag>(m_start));
|
---|
511 | if (m_stubInfo->useDataIC)
|
---|
512 | m_stubInfo->m_codePtr = m_stubInfo->slowPathStartLocation;
|
---|
513 | }
|
---|
514 |
|
---|
515 | JITGetByValGenerator::JITGetByValGenerator(CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, AccessType accessType, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs property, JSValueRegs result, GPRReg stubInfoGPR)
|
---|
516 | : Base(codeBlock, stubInfo, jitType, codeOrigin, accessType)
|
---|
517 | , m_base(base)
|
---|
518 | , m_result(result)
|
---|
519 | {
|
---|
520 | std::visit([&](auto* stubInfo) {
|
---|
521 | setUpStubInfo(*stubInfo, accessType, codeOrigin, callSiteIndex, usedRegisters, base, property, result, stubInfoGPR);
|
---|
522 | }, stubInfo);
|
---|
523 | }
|
---|
524 |
|
---|
525 | void JITGetByValGenerator::generateFastPath(CCallHelpers& jit)
|
---|
526 | {
|
---|
527 | ASSERT(m_stubInfo);
|
---|
528 | m_start = jit.label();
|
---|
529 | if (m_stubInfo->useDataIC) {
|
---|
530 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
531 | jit.farJump(CCallHelpers::Address(m_stubInfo->m_stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
532 | } else
|
---|
533 | m_slowPathJump = jit.patchableJump();
|
---|
534 | m_done = jit.label();
|
---|
535 | }
|
---|
536 |
|
---|
537 | void JITGetByValGenerator::generateEmptyPath(CCallHelpers& jit)
|
---|
538 | {
|
---|
539 | m_start = jit.label();
|
---|
540 | m_done = jit.label();
|
---|
541 | }
|
---|
542 |
|
---|
543 | void JITGetByValGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath)
|
---|
544 | {
|
---|
545 | ASSERT(m_stubInfo);
|
---|
546 | Base::finalize(fastPath, slowPath, fastPath.locationOf<JITStubRoutinePtrTag>(m_start));
|
---|
547 | if (m_stubInfo->useDataIC)
|
---|
548 | m_stubInfo->m_codePtr = m_stubInfo->slowPathStartLocation;
|
---|
549 | }
|
---|
550 |
|
---|
551 | JITPutByValGenerator::JITPutByValGenerator(CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, AccessType accessType, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs property, JSValueRegs value, GPRReg arrayProfileGPR, GPRReg stubInfoGPR, PutKind putKind, ECMAMode ecmaMode, PrivateFieldPutKind privateFieldPutKind)
|
---|
552 | : Base(codeBlock, stubInfo, jitType, codeOrigin, accessType)
|
---|
553 | , m_base(base)
|
---|
554 | , m_value(value)
|
---|
555 | {
|
---|
556 | std::visit([&](auto* stubInfo) {
|
---|
557 | setUpStubInfo(*stubInfo, accessType, codeOrigin, callSiteIndex, usedRegisters, base, property, value, arrayProfileGPR, stubInfoGPR, putKind, ecmaMode, privateFieldPutKind);
|
---|
558 | }, stubInfo);
|
---|
559 | }
|
---|
560 |
|
---|
561 | void JITPutByValGenerator::generateFastPath(CCallHelpers& jit)
|
---|
562 | {
|
---|
563 | ASSERT(m_stubInfo);
|
---|
564 | m_start = jit.label();
|
---|
565 | if (m_stubInfo->useDataIC) {
|
---|
566 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
567 | jit.farJump(CCallHelpers::Address(m_stubInfo->m_stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
568 | } else
|
---|
569 | m_slowPathJump = jit.patchableJump();
|
---|
570 | m_done = jit.label();
|
---|
571 | }
|
---|
572 |
|
---|
573 | void JITPutByValGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath)
|
---|
574 | {
|
---|
575 | ASSERT(m_stubInfo);
|
---|
576 | Base::finalize(fastPath, slowPath, fastPath.locationOf<JITStubRoutinePtrTag>(m_start));
|
---|
577 | if (m_stubInfo->useDataIC)
|
---|
578 | m_stubInfo->m_codePtr = m_stubInfo->slowPathStartLocation;
|
---|
579 | }
|
---|
580 |
|
---|
581 | JITPrivateBrandAccessGenerator::JITPrivateBrandAccessGenerator(CodeBlock* codeBlock, CompileTimeStructureStubInfo stubInfo, JITType jitType, CodeOrigin codeOrigin, CallSiteIndex callSiteIndex, AccessType accessType, const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs brand, GPRReg stubInfoGPR)
|
---|
582 | : Base(codeBlock, stubInfo, jitType, codeOrigin, accessType)
|
---|
583 | {
|
---|
584 | ASSERT(accessType == AccessType::CheckPrivateBrand || accessType == AccessType::SetPrivateBrand);
|
---|
585 | std::visit([&](auto* stubInfo) {
|
---|
586 | setUpStubInfo(*stubInfo, accessType, codeOrigin, callSiteIndex, usedRegisters, base, brand, stubInfoGPR);
|
---|
587 | }, stubInfo);
|
---|
588 | }
|
---|
589 |
|
---|
590 | void JITPrivateBrandAccessGenerator::generateFastPath(CCallHelpers& jit)
|
---|
591 | {
|
---|
592 | ASSERT(m_stubInfo);
|
---|
593 | m_start = jit.label();
|
---|
594 | if (m_stubInfo->useDataIC) {
|
---|
595 | jit.move(CCallHelpers::TrustedImmPtr(m_stubInfo), m_stubInfo->m_stubInfoGPR);
|
---|
596 | jit.farJump(CCallHelpers::Address(m_stubInfo->m_stubInfoGPR, StructureStubInfo::offsetOfCodePtr()), JITStubRoutinePtrTag);
|
---|
597 | } else
|
---|
598 | m_slowPathJump = jit.patchableJump();
|
---|
599 | m_done = jit.label();
|
---|
600 | }
|
---|
601 |
|
---|
602 | void JITPrivateBrandAccessGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath)
|
---|
603 | {
|
---|
604 | ASSERT(m_stubInfo);
|
---|
605 | Base::finalize(fastPath, slowPath, fastPath.locationOf<JITStubRoutinePtrTag>(m_start));
|
---|
606 | if (m_stubInfo->useDataIC)
|
---|
607 | m_stubInfo->m_codePtr = m_stubInfo->slowPathStartLocation;
|
---|
608 | }
|
---|
609 |
|
---|
610 | } // namespace JSC
|
---|
611 |
|
---|
612 | #endif // ENABLE(JIT)
|
---|
613 |
|
---|