1 | /*
|
---|
2 | * Copyright (C) 2008-2022 Apple Inc. All rights reserved.
|
---|
3 | *
|
---|
4 | * Redistribution and use in source and binary forms, with or without
|
---|
5 | * modification, are permitted provided that the following conditions
|
---|
6 | * are met:
|
---|
7 | * 1. Redistributions of source code must retain the above copyright
|
---|
8 | * notice, this list of conditions and the following disclaimer.
|
---|
9 | * 2. Redistributions in binary form must reproduce the above copyright
|
---|
10 | * notice, this list of conditions and the following disclaimer in the
|
---|
11 | * documentation and/or other materials provided with the distribution.
|
---|
12 | *
|
---|
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
|
---|
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
---|
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
---|
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
|
---|
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
---|
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
---|
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
---|
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
---|
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
---|
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
---|
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
---|
24 | */
|
---|
25 |
|
---|
26 | #include "config.h"
|
---|
27 |
|
---|
28 | #if ENABLE(JIT)
|
---|
29 |
|
---|
30 | #include "JIT.h"
|
---|
31 |
|
---|
32 | #include "BytecodeGraph.h"
|
---|
33 | #include "CodeBlock.h"
|
---|
34 | #include "CodeBlockWithJITType.h"
|
---|
35 | #include "DFGCapabilities.h"
|
---|
36 | #include "JITInlines.h"
|
---|
37 | #include "JITOperations.h"
|
---|
38 | #include "JITSizeStatistics.h"
|
---|
39 | #include "LinkBuffer.h"
|
---|
40 | #include "MaxFrameExtentForSlowPathCall.h"
|
---|
41 | #include "ModuleProgramCodeBlock.h"
|
---|
42 | #include "PCToCodeOriginMap.h"
|
---|
43 | #include "ProbeContext.h"
|
---|
44 | #include "ProfilerDatabase.h"
|
---|
45 | #include "ProgramCodeBlock.h"
|
---|
46 | #include "SlowPathCall.h"
|
---|
47 | #include "StackAlignment.h"
|
---|
48 | #include "ThunkGenerators.h"
|
---|
49 | #include "TypeProfilerLog.h"
|
---|
50 | #include <wtf/GraphNodeWorklist.h>
|
---|
51 | #include <wtf/SimpleStats.h>
|
---|
52 |
|
---|
53 | namespace JSC {
|
---|
54 | namespace JITInternal {
|
---|
55 | static constexpr const bool verbose = false;
|
---|
56 | }
|
---|
57 |
|
---|
58 | Seconds totalBaselineCompileTime;
|
---|
59 | Seconds totalDFGCompileTime;
|
---|
60 | Seconds totalFTLCompileTime;
|
---|
61 | Seconds totalFTLDFGCompileTime;
|
---|
62 | Seconds totalFTLB3CompileTime;
|
---|
63 |
|
---|
64 | void ctiPatchCallByReturnAddress(ReturnAddressPtr returnAddress, FunctionPtr<CFunctionPtrTag> newCalleeFunction)
|
---|
65 | {
|
---|
66 | MacroAssembler::repatchCall(
|
---|
67 | CodeLocationCall<ReturnAddressPtrTag>(MacroAssemblerCodePtr<ReturnAddressPtrTag>(returnAddress)),
|
---|
68 | newCalleeFunction.retagged<OperationPtrTag>());
|
---|
69 | }
|
---|
70 |
|
---|
71 | JIT::JIT(VM& vm, CodeBlock* codeBlock, BytecodeIndex loopOSREntryBytecodeIndex)
|
---|
72 | : JSInterfaceJIT(&vm, nullptr)
|
---|
73 | , m_interpreter(vm.interpreter)
|
---|
74 | , m_labels(codeBlock ? codeBlock->instructions().size() : 0)
|
---|
75 | , m_pcToCodeOriginMapBuilder(vm)
|
---|
76 | , m_canBeOptimized(false)
|
---|
77 | , m_shouldEmitProfiling(false)
|
---|
78 | , m_loopOSREntryBytecodeIndex(loopOSREntryBytecodeIndex)
|
---|
79 | {
|
---|
80 | auto globalObjectConstant = addToConstantPool(JITConstantPool::Type::GlobalObject);
|
---|
81 | ASSERT_UNUSED(globalObjectConstant, globalObjectConstant == s_globalObjectConstant);
|
---|
82 | m_profiledCodeBlock = codeBlock;
|
---|
83 | m_unlinkedCodeBlock = codeBlock->unlinkedCodeBlock();
|
---|
84 | }
|
---|
85 |
|
---|
86 | JIT::~JIT()
|
---|
87 | {
|
---|
88 | }
|
---|
89 |
|
---|
90 | JITConstantPool::Constant JIT::addToConstantPool(JITConstantPool::Type type, void* payload)
|
---|
91 | {
|
---|
92 | unsigned result = m_constantPool.size();
|
---|
93 | m_constantPool.append(JITConstantPool::Value { payload, type });
|
---|
94 | return result;
|
---|
95 | }
|
---|
96 |
|
---|
97 | std::tuple<BaselineUnlinkedStructureStubInfo*, JITConstantPool::Constant> JIT::addUnlinkedStructureStubInfo()
|
---|
98 | {
|
---|
99 | void* unlinkedStubInfoIndex = bitwise_cast<void*>(static_cast<uintptr_t>(m_unlinkedStubInfos.size()));
|
---|
100 | BaselineUnlinkedStructureStubInfo* stubInfo = &m_unlinkedStubInfos.alloc();
|
---|
101 | JITConstantPool::Constant stubInfoIndex = addToConstantPool(JITConstantPool::Type::StructureStubInfo, unlinkedStubInfoIndex);
|
---|
102 | return std::tuple { stubInfo, stubInfoIndex };
|
---|
103 | }
|
---|
104 |
|
---|
105 | UnlinkedCallLinkInfo* JIT::addUnlinkedCallLinkInfo()
|
---|
106 | {
|
---|
107 | return &m_unlinkedCalls.alloc();
|
---|
108 | }
|
---|
109 |
|
---|
110 | #if ENABLE(DFG_JIT)
|
---|
111 | void JIT::emitEnterOptimizationCheck()
|
---|
112 | {
|
---|
113 | if (!canBeOptimized())
|
---|
114 | return;
|
---|
115 |
|
---|
116 | JumpList skipOptimize;
|
---|
117 | loadPtr(addressFor(CallFrameSlot::codeBlock), regT0);
|
---|
118 | skipOptimize.append(branchAdd32(Signed, TrustedImm32(Options::executionCounterIncrementForEntry()), Address(regT0, CodeBlock::offsetOfJITExecuteCounter())));
|
---|
119 | ASSERT(!m_bytecodeIndex.offset());
|
---|
120 |
|
---|
121 | copyLLIntBaselineCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm().topEntryFrame);
|
---|
122 |
|
---|
123 | callOperationNoExceptionCheck(operationOptimize, TrustedImmPtr(&vm()), m_bytecodeIndex.asBits());
|
---|
124 | skipOptimize.append(branchTestPtr(Zero, returnValueGPR));
|
---|
125 | farJump(returnValueGPR, GPRInfo::callFrameRegister);
|
---|
126 | skipOptimize.link(this);
|
---|
127 | }
|
---|
128 | #endif
|
---|
129 |
|
---|
130 | void JIT::emitNotifyWriteWatchpoint(GPRReg pointerToSet)
|
---|
131 | {
|
---|
132 | auto ok = branchTestPtr(Zero, pointerToSet);
|
---|
133 | addSlowCase(branch8(NotEqual, Address(pointerToSet, WatchpointSet::offsetOfState()), TrustedImm32(IsInvalidated)));
|
---|
134 | ok.link(this);
|
---|
135 | }
|
---|
136 |
|
---|
137 | void JIT::emitVarReadOnlyCheck(ResolveType resolveType, GPRReg scratchGPR)
|
---|
138 | {
|
---|
139 | if (resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks) {
|
---|
140 | loadGlobalObject(scratchGPR);
|
---|
141 | loadPtr(Address(scratchGPR, JSGlobalObject::offsetOfVarReadOnlyWatchpoint()), scratchGPR);
|
---|
142 | addSlowCase(branch8(Equal, Address(scratchGPR, WatchpointSet::offsetOfState()), TrustedImm32(IsInvalidated)));
|
---|
143 | }
|
---|
144 | }
|
---|
145 |
|
---|
146 | void JIT::resetSP()
|
---|
147 | {
|
---|
148 | addPtr(TrustedImm32(stackPointerOffsetFor(m_unlinkedCodeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
|
---|
149 | checkStackPointerAlignment();
|
---|
150 | }
|
---|
151 |
|
---|
152 | #define NEXT_OPCODE_IN_MAIN(name) \
|
---|
153 | if (previousSlowCasesSize != m_slowCases.size()) \
|
---|
154 | ++m_bytecodeCountHavingSlowCase; \
|
---|
155 | m_bytecodeIndex = BytecodeIndex(m_bytecodeIndex.offset() + currentInstruction->size()); \
|
---|
156 | break;
|
---|
157 |
|
---|
158 | #define DEFINE_SLOW_OP(name) \
|
---|
159 | case op_##name: { \
|
---|
160 | if (m_bytecodeIndex >= startBytecodeIndex) { \
|
---|
161 | JITSlowPathCall slowPathCall(this, slow_path_##name); \
|
---|
162 | slowPathCall.call(); \
|
---|
163 | } \
|
---|
164 | NEXT_OPCODE_IN_MAIN(op_##name); \
|
---|
165 | }
|
---|
166 |
|
---|
167 | #define DEFINE_OP(name) \
|
---|
168 | case name: { \
|
---|
169 | if (m_bytecodeIndex >= startBytecodeIndex) { \
|
---|
170 | emit_##name(currentInstruction); \
|
---|
171 | } \
|
---|
172 | NEXT_OPCODE_IN_MAIN(name); \
|
---|
173 | }
|
---|
174 |
|
---|
175 | #define DEFINE_SLOWCASE_OP(name) \
|
---|
176 | case name: { \
|
---|
177 | emitSlow_##name(currentInstruction, iter); \
|
---|
178 | break; \
|
---|
179 | }
|
---|
180 |
|
---|
181 | #define DEFINE_SLOWCASE_SLOW_OP(name) \
|
---|
182 | case op_##name: { \
|
---|
183 | emitSlowCaseCall(iter, slow_path_##name); \
|
---|
184 | break; \
|
---|
185 | }
|
---|
186 |
|
---|
187 | void JIT::emitSlowCaseCall(Vector<SlowCaseEntry>::iterator& iter, SlowPathFunction stub)
|
---|
188 | {
|
---|
189 | linkAllSlowCases(iter);
|
---|
190 |
|
---|
191 | JITSlowPathCall slowPathCall(this, stub);
|
---|
192 | slowPathCall.call();
|
---|
193 | }
|
---|
194 |
|
---|
195 | void JIT::privateCompileMainPass()
|
---|
196 | {
|
---|
197 | if (JITInternal::verbose)
|
---|
198 | dataLog("Compiling ", *m_profiledCodeBlock, "\n");
|
---|
199 |
|
---|
200 | jitAssertTagsInPlace();
|
---|
201 | jitAssertArgumentCountSane();
|
---|
202 |
|
---|
203 | auto& instructions = m_unlinkedCodeBlock->instructions();
|
---|
204 | unsigned instructionCount = m_unlinkedCodeBlock->instructions().size();
|
---|
205 |
|
---|
206 | m_callLinkInfoIndex = 0;
|
---|
207 |
|
---|
208 | BytecodeIndex startBytecodeIndex(0);
|
---|
209 |
|
---|
210 | m_bytecodeCountHavingSlowCase = 0;
|
---|
211 | for (m_bytecodeIndex = BytecodeIndex(0); m_bytecodeIndex.offset() < instructionCount; ) {
|
---|
212 | unsigned previousSlowCasesSize = m_slowCases.size();
|
---|
213 | if (m_bytecodeIndex == startBytecodeIndex && startBytecodeIndex.offset() > 0) {
|
---|
214 | // We've proven all bytecode instructions up until here are unreachable.
|
---|
215 | // Let's ensure that by crashing if it's ever hit.
|
---|
216 | breakpoint();
|
---|
217 | }
|
---|
218 |
|
---|
219 | if (m_disassembler)
|
---|
220 | m_disassembler->setForBytecodeMainPath(m_bytecodeIndex.offset(), label());
|
---|
221 | const auto* currentInstruction = instructions.at(m_bytecodeIndex).ptr();
|
---|
222 | ASSERT(currentInstruction->size());
|
---|
223 |
|
---|
224 | m_pcToCodeOriginMapBuilder.appendItem(label(), CodeOrigin(m_bytecodeIndex));
|
---|
225 |
|
---|
226 | m_labels[m_bytecodeIndex.offset()] = label();
|
---|
227 |
|
---|
228 | if (JITInternal::verbose)
|
---|
229 | dataLogLn("Baseline JIT emitting code for ", m_bytecodeIndex, " at offset ", (long)debugOffset());
|
---|
230 |
|
---|
231 | OpcodeID opcodeID = currentInstruction->opcodeID();
|
---|
232 |
|
---|
233 | std::optional<JITSizeStatistics::Marker> sizeMarker;
|
---|
234 | if (UNLIKELY(m_bytecodeIndex >= startBytecodeIndex && Options::dumpBaselineJITSizeStatistics())) {
|
---|
235 | String id = makeString("Baseline_fast_", opcodeNames[opcodeID]);
|
---|
236 | sizeMarker = m_vm->jitSizeStatistics->markStart(id, *this);
|
---|
237 | }
|
---|
238 |
|
---|
239 | #if ASSERT_ENABLED
|
---|
240 | if (opcodeID != op_catch) {
|
---|
241 | loadPtr(addressFor(CallFrameSlot::codeBlock), regT0);
|
---|
242 | loadPtr(Address(regT0, CodeBlock::offsetOfMetadataTable()), regT1);
|
---|
243 | loadPtr(Address(regT0, CodeBlock::offsetOfJITData()), regT2);
|
---|
244 |
|
---|
245 | m_consistencyCheckCalls.append(nearCall());
|
---|
246 | }
|
---|
247 | #endif
|
---|
248 |
|
---|
249 | if (UNLIKELY(m_compilation)) {
|
---|
250 | add64(
|
---|
251 | TrustedImm32(1),
|
---|
252 | AbsoluteAddress(m_compilation->executionCounterFor(Profiler::OriginStack(Profiler::Origin(
|
---|
253 | m_compilation->bytecodes(), m_bytecodeIndex)))->address()));
|
---|
254 | }
|
---|
255 |
|
---|
256 | if (Options::eagerlyUpdateTopCallFrame())
|
---|
257 | updateTopCallFrame();
|
---|
258 |
|
---|
259 | unsigned bytecodeOffset = m_bytecodeIndex.offset();
|
---|
260 | if (UNLIKELY(Options::traceBaselineJITExecution())) {
|
---|
261 | probeDebug([=] (Probe::Context& ctx) {
|
---|
262 | CodeBlock* codeBlock = ctx.fp<CallFrame*>()->codeBlock();
|
---|
263 | dataLogLn("JIT [", bytecodeOffset, "] ", opcodeNames[opcodeID], " cfr ", RawPointer(ctx.fp()), " @ ", codeBlock);
|
---|
264 | });
|
---|
265 | }
|
---|
266 |
|
---|
267 | switch (opcodeID) {
|
---|
268 | DEFINE_SLOW_OP(less)
|
---|
269 | DEFINE_SLOW_OP(lesseq)
|
---|
270 | DEFINE_SLOW_OP(greater)
|
---|
271 | DEFINE_SLOW_OP(greatereq)
|
---|
272 | DEFINE_SLOW_OP(instanceof_custom)
|
---|
273 | DEFINE_SLOW_OP(is_callable)
|
---|
274 | DEFINE_SLOW_OP(is_constructor)
|
---|
275 | DEFINE_SLOW_OP(typeof)
|
---|
276 | DEFINE_SLOW_OP(typeof_is_object)
|
---|
277 | DEFINE_SLOW_OP(typeof_is_function)
|
---|
278 | DEFINE_SLOW_OP(strcat)
|
---|
279 | DEFINE_SLOW_OP(push_with_scope)
|
---|
280 | DEFINE_SLOW_OP(create_lexical_environment)
|
---|
281 | DEFINE_SLOW_OP(get_by_val_with_this)
|
---|
282 | DEFINE_SLOW_OP(put_by_id_with_this)
|
---|
283 | DEFINE_SLOW_OP(put_by_val_with_this)
|
---|
284 | DEFINE_SLOW_OP(resolve_scope_for_hoisting_func_decl_in_eval)
|
---|
285 | DEFINE_SLOW_OP(define_data_property)
|
---|
286 | DEFINE_SLOW_OP(define_accessor_property)
|
---|
287 | DEFINE_SLOW_OP(unreachable)
|
---|
288 | DEFINE_SLOW_OP(throw_static_error)
|
---|
289 | DEFINE_SLOW_OP(new_array_with_spread)
|
---|
290 | DEFINE_SLOW_OP(new_array_buffer)
|
---|
291 | DEFINE_SLOW_OP(spread)
|
---|
292 | DEFINE_SLOW_OP(create_direct_arguments)
|
---|
293 | DEFINE_SLOW_OP(create_scoped_arguments)
|
---|
294 | DEFINE_SLOW_OP(create_cloned_arguments)
|
---|
295 | DEFINE_SLOW_OP(create_arguments_butterfly)
|
---|
296 | DEFINE_SLOW_OP(create_rest)
|
---|
297 | DEFINE_SLOW_OP(create_promise)
|
---|
298 | DEFINE_SLOW_OP(new_promise)
|
---|
299 | DEFINE_SLOW_OP(create_generator)
|
---|
300 | DEFINE_SLOW_OP(create_async_generator)
|
---|
301 | DEFINE_SLOW_OP(new_generator)
|
---|
302 |
|
---|
303 | DEFINE_OP(op_add)
|
---|
304 | DEFINE_OP(op_bitnot)
|
---|
305 | DEFINE_OP(op_bitand)
|
---|
306 | DEFINE_OP(op_bitor)
|
---|
307 | DEFINE_OP(op_bitxor)
|
---|
308 | DEFINE_OP(op_call)
|
---|
309 | DEFINE_OP(op_tail_call)
|
---|
310 | DEFINE_OP(op_call_eval)
|
---|
311 | DEFINE_OP(op_call_varargs)
|
---|
312 | DEFINE_OP(op_tail_call_varargs)
|
---|
313 | DEFINE_OP(op_tail_call_forward_arguments)
|
---|
314 | DEFINE_OP(op_construct_varargs)
|
---|
315 | DEFINE_OP(op_catch)
|
---|
316 | DEFINE_OP(op_construct)
|
---|
317 | DEFINE_OP(op_create_this)
|
---|
318 | DEFINE_OP(op_to_this)
|
---|
319 | DEFINE_OP(op_get_argument)
|
---|
320 | DEFINE_OP(op_argument_count)
|
---|
321 | DEFINE_OP(op_get_rest_length)
|
---|
322 | DEFINE_OP(op_check_tdz)
|
---|
323 | DEFINE_OP(op_identity_with_profile)
|
---|
324 | DEFINE_OP(op_debug)
|
---|
325 | DEFINE_OP(op_del_by_id)
|
---|
326 | DEFINE_OP(op_del_by_val)
|
---|
327 | DEFINE_OP(op_div)
|
---|
328 | DEFINE_OP(op_end)
|
---|
329 | DEFINE_OP(op_enter)
|
---|
330 | DEFINE_OP(op_get_scope)
|
---|
331 | DEFINE_OP(op_eq)
|
---|
332 | DEFINE_OP(op_eq_null)
|
---|
333 | DEFINE_OP(op_below)
|
---|
334 | DEFINE_OP(op_beloweq)
|
---|
335 | DEFINE_OP(op_try_get_by_id)
|
---|
336 | DEFINE_OP(op_in_by_id)
|
---|
337 | DEFINE_OP(op_in_by_val)
|
---|
338 | DEFINE_OP(op_has_private_name)
|
---|
339 | DEFINE_OP(op_has_private_brand)
|
---|
340 | DEFINE_OP(op_get_by_id)
|
---|
341 | DEFINE_OP(op_get_by_id_with_this)
|
---|
342 | DEFINE_OP(op_get_by_id_direct)
|
---|
343 | DEFINE_OP(op_get_by_val)
|
---|
344 | DEFINE_OP(op_get_property_enumerator)
|
---|
345 | DEFINE_OP(op_enumerator_next)
|
---|
346 | DEFINE_OP(op_enumerator_get_by_val)
|
---|
347 | DEFINE_OP(op_enumerator_in_by_val)
|
---|
348 | DEFINE_OP(op_enumerator_has_own_property)
|
---|
349 | DEFINE_OP(op_get_private_name)
|
---|
350 | DEFINE_OP(op_set_private_brand)
|
---|
351 | DEFINE_OP(op_check_private_brand)
|
---|
352 | DEFINE_OP(op_get_prototype_of)
|
---|
353 | DEFINE_OP(op_overrides_has_instance)
|
---|
354 | DEFINE_OP(op_instanceof)
|
---|
355 | DEFINE_OP(op_is_empty)
|
---|
356 | DEFINE_OP(op_typeof_is_undefined)
|
---|
357 | DEFINE_OP(op_is_undefined_or_null)
|
---|
358 | DEFINE_OP(op_is_boolean)
|
---|
359 | DEFINE_OP(op_is_number)
|
---|
360 | DEFINE_OP(op_is_big_int)
|
---|
361 | DEFINE_OP(op_is_object)
|
---|
362 | DEFINE_OP(op_is_cell_with_type)
|
---|
363 | DEFINE_OP(op_jeq_null)
|
---|
364 | DEFINE_OP(op_jfalse)
|
---|
365 | DEFINE_OP(op_jmp)
|
---|
366 | DEFINE_OP(op_jneq_null)
|
---|
367 | DEFINE_OP(op_jundefined_or_null)
|
---|
368 | DEFINE_OP(op_jnundefined_or_null)
|
---|
369 | DEFINE_OP(op_jeq_ptr)
|
---|
370 | DEFINE_OP(op_jneq_ptr)
|
---|
371 | DEFINE_OP(op_jless)
|
---|
372 | DEFINE_OP(op_jlesseq)
|
---|
373 | DEFINE_OP(op_jgreater)
|
---|
374 | DEFINE_OP(op_jgreatereq)
|
---|
375 | DEFINE_OP(op_jnless)
|
---|
376 | DEFINE_OP(op_jnlesseq)
|
---|
377 | DEFINE_OP(op_jngreater)
|
---|
378 | DEFINE_OP(op_jngreatereq)
|
---|
379 | DEFINE_OP(op_jeq)
|
---|
380 | DEFINE_OP(op_jneq)
|
---|
381 | DEFINE_OP(op_jstricteq)
|
---|
382 | DEFINE_OP(op_jnstricteq)
|
---|
383 | DEFINE_OP(op_jbelow)
|
---|
384 | DEFINE_OP(op_jbeloweq)
|
---|
385 | DEFINE_OP(op_jtrue)
|
---|
386 | DEFINE_OP(op_loop_hint)
|
---|
387 | DEFINE_OP(op_check_traps)
|
---|
388 | DEFINE_OP(op_nop)
|
---|
389 | DEFINE_OP(op_super_sampler_begin)
|
---|
390 | DEFINE_OP(op_super_sampler_end)
|
---|
391 | DEFINE_OP(op_lshift)
|
---|
392 | DEFINE_OP(op_mod)
|
---|
393 | DEFINE_OP(op_pow)
|
---|
394 | DEFINE_OP(op_mov)
|
---|
395 | DEFINE_OP(op_mul)
|
---|
396 | DEFINE_OP(op_negate)
|
---|
397 | DEFINE_OP(op_neq)
|
---|
398 | DEFINE_OP(op_neq_null)
|
---|
399 | DEFINE_OP(op_new_array)
|
---|
400 | DEFINE_OP(op_new_array_with_size)
|
---|
401 | DEFINE_OP(op_new_func)
|
---|
402 | DEFINE_OP(op_new_func_exp)
|
---|
403 | DEFINE_OP(op_new_generator_func)
|
---|
404 | DEFINE_OP(op_new_generator_func_exp)
|
---|
405 | DEFINE_OP(op_new_async_func)
|
---|
406 | DEFINE_OP(op_new_async_func_exp)
|
---|
407 | DEFINE_OP(op_new_async_generator_func)
|
---|
408 | DEFINE_OP(op_new_async_generator_func_exp)
|
---|
409 | DEFINE_OP(op_new_object)
|
---|
410 | DEFINE_OP(op_new_regexp)
|
---|
411 | DEFINE_OP(op_not)
|
---|
412 | DEFINE_OP(op_nstricteq)
|
---|
413 | DEFINE_OP(op_dec)
|
---|
414 | DEFINE_OP(op_inc)
|
---|
415 | DEFINE_OP(op_profile_type)
|
---|
416 | DEFINE_OP(op_profile_control_flow)
|
---|
417 | DEFINE_OP(op_get_parent_scope)
|
---|
418 | DEFINE_OP(op_put_by_id)
|
---|
419 | DEFINE_OP(op_put_by_val_direct)
|
---|
420 | DEFINE_OP(op_put_by_val)
|
---|
421 | DEFINE_OP(op_put_private_name)
|
---|
422 | DEFINE_OP(op_put_getter_by_id)
|
---|
423 | DEFINE_OP(op_put_setter_by_id)
|
---|
424 | DEFINE_OP(op_put_getter_setter_by_id)
|
---|
425 | DEFINE_OP(op_put_getter_by_val)
|
---|
426 | DEFINE_OP(op_put_setter_by_val)
|
---|
427 | DEFINE_OP(op_to_property_key)
|
---|
428 |
|
---|
429 | DEFINE_OP(op_get_internal_field)
|
---|
430 | DEFINE_OP(op_put_internal_field)
|
---|
431 |
|
---|
432 | DEFINE_OP(op_iterator_open)
|
---|
433 | DEFINE_OP(op_iterator_next)
|
---|
434 |
|
---|
435 | DEFINE_OP(op_ret)
|
---|
436 | DEFINE_OP(op_rshift)
|
---|
437 | DEFINE_OP(op_unsigned)
|
---|
438 | DEFINE_OP(op_urshift)
|
---|
439 | DEFINE_OP(op_set_function_name)
|
---|
440 | DEFINE_OP(op_stricteq)
|
---|
441 | DEFINE_OP(op_sub)
|
---|
442 | DEFINE_OP(op_switch_char)
|
---|
443 | DEFINE_OP(op_switch_imm)
|
---|
444 | DEFINE_OP(op_switch_string)
|
---|
445 | DEFINE_OP(op_throw)
|
---|
446 | DEFINE_OP(op_to_number)
|
---|
447 | DEFINE_OP(op_to_numeric)
|
---|
448 | DEFINE_OP(op_to_string)
|
---|
449 | DEFINE_OP(op_to_object)
|
---|
450 | DEFINE_OP(op_to_primitive)
|
---|
451 |
|
---|
452 | DEFINE_OP(op_resolve_scope)
|
---|
453 | DEFINE_OP(op_get_from_scope)
|
---|
454 | DEFINE_OP(op_put_to_scope)
|
---|
455 | DEFINE_OP(op_get_from_arguments)
|
---|
456 | DEFINE_OP(op_put_to_arguments)
|
---|
457 |
|
---|
458 | DEFINE_OP(op_log_shadow_chicken_prologue)
|
---|
459 | DEFINE_OP(op_log_shadow_chicken_tail)
|
---|
460 |
|
---|
461 | default:
|
---|
462 | RELEASE_ASSERT_NOT_REACHED();
|
---|
463 | }
|
---|
464 |
|
---|
465 | if (UNLIKELY(sizeMarker))
|
---|
466 | m_vm->jitSizeStatistics->markEnd(WTFMove(*sizeMarker), *this);
|
---|
467 |
|
---|
468 | if (JITInternal::verbose)
|
---|
469 | dataLog("At ", bytecodeOffset, ": ", m_slowCases.size(), "\n");
|
---|
470 | }
|
---|
471 |
|
---|
472 | RELEASE_ASSERT(m_callLinkInfoIndex == m_callCompilationInfo.size());
|
---|
473 |
|
---|
474 | #ifndef NDEBUG
|
---|
475 | // Reset this, in order to guard its use with ASSERTs.
|
---|
476 | m_bytecodeIndex = BytecodeIndex();
|
---|
477 | #endif
|
---|
478 | }
|
---|
479 |
|
---|
480 | void JIT::privateCompileLinkPass()
|
---|
481 | {
|
---|
482 | unsigned jmpTableCount = m_jmpTable.size();
|
---|
483 | for (unsigned i = 0; i < jmpTableCount; ++i)
|
---|
484 | m_jmpTable[i].from.linkTo(m_labels[m_jmpTable[i].toBytecodeOffset], this);
|
---|
485 | m_jmpTable.clear();
|
---|
486 | }
|
---|
487 |
|
---|
488 | void JIT::privateCompileSlowCases()
|
---|
489 | {
|
---|
490 | m_getByIdIndex = 0;
|
---|
491 | m_getByValIndex = 0;
|
---|
492 | m_getByIdWithThisIndex = 0;
|
---|
493 | m_putByIdIndex = 0;
|
---|
494 | m_putByValIndex = 0;
|
---|
495 | m_inByIdIndex = 0;
|
---|
496 | m_inByValIndex = 0;
|
---|
497 | m_delByIdIndex = 0;
|
---|
498 | m_delByValIndex = 0;
|
---|
499 | m_instanceOfIndex = 0;
|
---|
500 | m_privateBrandAccessIndex = 0;
|
---|
501 | m_callLinkInfoIndex = 0;
|
---|
502 |
|
---|
503 | unsigned bytecodeCountHavingSlowCase = 0;
|
---|
504 | for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
|
---|
505 | m_bytecodeIndex = iter->to;
|
---|
506 |
|
---|
507 | m_pcToCodeOriginMapBuilder.appendItem(label(), CodeOrigin(m_bytecodeIndex));
|
---|
508 |
|
---|
509 | BytecodeIndex firstTo = m_bytecodeIndex;
|
---|
510 |
|
---|
511 | const auto* currentInstruction = m_unlinkedCodeBlock->instructions().at(m_bytecodeIndex).ptr();
|
---|
512 |
|
---|
513 | if (JITInternal::verbose)
|
---|
514 | dataLogLn("Baseline JIT emitting slow code for ", m_bytecodeIndex, " at offset ", (long)debugOffset());
|
---|
515 |
|
---|
516 | if (m_disassembler)
|
---|
517 | m_disassembler->setForBytecodeSlowPath(m_bytecodeIndex.offset(), label());
|
---|
518 |
|
---|
519 | OpcodeID opcodeID = currentInstruction->opcodeID();
|
---|
520 |
|
---|
521 | std::optional<JITSizeStatistics::Marker> sizeMarker;
|
---|
522 | if (UNLIKELY(Options::dumpBaselineJITSizeStatistics())) {
|
---|
523 | String id = makeString("Baseline_slow_", opcodeNames[opcodeID]);
|
---|
524 | sizeMarker = m_vm->jitSizeStatistics->markStart(id, *this);
|
---|
525 | }
|
---|
526 |
|
---|
527 | if (UNLIKELY(Options::traceBaselineJITExecution())) {
|
---|
528 | unsigned bytecodeOffset = m_bytecodeIndex.offset();
|
---|
529 | probeDebug([=] (Probe::Context& ctx) {
|
---|
530 | CodeBlock* codeBlock = ctx.fp<CallFrame*>()->codeBlock();
|
---|
531 | dataLogLn("JIT [", bytecodeOffset, "] SLOW ", opcodeNames[opcodeID], " cfr ", RawPointer(ctx.fp()), " @ ", codeBlock);
|
---|
532 | });
|
---|
533 | }
|
---|
534 |
|
---|
535 | switch (currentInstruction->opcodeID()) {
|
---|
536 | DEFINE_SLOWCASE_OP(op_add)
|
---|
537 | DEFINE_SLOWCASE_OP(op_call)
|
---|
538 | DEFINE_SLOWCASE_OP(op_tail_call)
|
---|
539 | DEFINE_SLOWCASE_OP(op_call_eval)
|
---|
540 | DEFINE_SLOWCASE_OP(op_call_varargs)
|
---|
541 | DEFINE_SLOWCASE_OP(op_tail_call_varargs)
|
---|
542 | DEFINE_SLOWCASE_OP(op_tail_call_forward_arguments)
|
---|
543 | DEFINE_SLOWCASE_OP(op_construct_varargs)
|
---|
544 | DEFINE_SLOWCASE_OP(op_construct)
|
---|
545 | DEFINE_SLOWCASE_OP(op_eq)
|
---|
546 | DEFINE_SLOWCASE_OP(op_try_get_by_id)
|
---|
547 | DEFINE_SLOWCASE_OP(op_in_by_id)
|
---|
548 | DEFINE_SLOWCASE_OP(op_in_by_val)
|
---|
549 | DEFINE_SLOWCASE_OP(op_has_private_name)
|
---|
550 | DEFINE_SLOWCASE_OP(op_has_private_brand)
|
---|
551 | DEFINE_SLOWCASE_OP(op_get_by_id)
|
---|
552 | DEFINE_SLOWCASE_OP(op_get_by_id_with_this)
|
---|
553 | DEFINE_SLOWCASE_OP(op_get_by_id_direct)
|
---|
554 | DEFINE_SLOWCASE_OP(op_get_by_val)
|
---|
555 | DEFINE_SLOWCASE_OP(op_enumerator_get_by_val)
|
---|
556 | DEFINE_SLOWCASE_OP(op_get_private_name)
|
---|
557 | DEFINE_SLOWCASE_OP(op_set_private_brand)
|
---|
558 | DEFINE_SLOWCASE_OP(op_check_private_brand)
|
---|
559 | DEFINE_SLOWCASE_OP(op_instanceof)
|
---|
560 | DEFINE_SLOWCASE_OP(op_jless)
|
---|
561 | DEFINE_SLOWCASE_OP(op_jlesseq)
|
---|
562 | DEFINE_SLOWCASE_OP(op_jgreater)
|
---|
563 | DEFINE_SLOWCASE_OP(op_jgreatereq)
|
---|
564 | DEFINE_SLOWCASE_OP(op_jnless)
|
---|
565 | DEFINE_SLOWCASE_OP(op_jnlesseq)
|
---|
566 | DEFINE_SLOWCASE_OP(op_jngreater)
|
---|
567 | DEFINE_SLOWCASE_OP(op_jngreatereq)
|
---|
568 | DEFINE_SLOWCASE_OP(op_jeq)
|
---|
569 | DEFINE_SLOWCASE_OP(op_jneq)
|
---|
570 | DEFINE_SLOWCASE_OP(op_jstricteq)
|
---|
571 | DEFINE_SLOWCASE_OP(op_jnstricteq)
|
---|
572 | DEFINE_SLOWCASE_OP(op_loop_hint)
|
---|
573 | DEFINE_SLOWCASE_OP(op_check_traps)
|
---|
574 | DEFINE_SLOWCASE_OP(op_mod)
|
---|
575 | DEFINE_SLOWCASE_OP(op_pow)
|
---|
576 | DEFINE_SLOWCASE_OP(op_mul)
|
---|
577 | DEFINE_SLOWCASE_OP(op_negate)
|
---|
578 | DEFINE_SLOWCASE_OP(op_neq)
|
---|
579 | DEFINE_SLOWCASE_OP(op_new_object)
|
---|
580 | DEFINE_SLOWCASE_OP(op_put_by_id)
|
---|
581 | case op_put_by_val_direct:
|
---|
582 | DEFINE_SLOWCASE_OP(op_put_by_val)
|
---|
583 | DEFINE_SLOWCASE_OP(op_put_private_name)
|
---|
584 | DEFINE_SLOWCASE_OP(op_del_by_val)
|
---|
585 | DEFINE_SLOWCASE_OP(op_del_by_id)
|
---|
586 | DEFINE_SLOWCASE_OP(op_sub)
|
---|
587 | DEFINE_SLOWCASE_OP(op_put_to_scope)
|
---|
588 |
|
---|
589 | DEFINE_SLOWCASE_OP(op_iterator_open)
|
---|
590 | DEFINE_SLOWCASE_OP(op_iterator_next)
|
---|
591 |
|
---|
592 | DEFINE_SLOWCASE_SLOW_OP(unsigned)
|
---|
593 | DEFINE_SLOWCASE_SLOW_OP(inc)
|
---|
594 | DEFINE_SLOWCASE_SLOW_OP(dec)
|
---|
595 | DEFINE_SLOWCASE_SLOW_OP(bitnot)
|
---|
596 | DEFINE_SLOWCASE_SLOW_OP(bitand)
|
---|
597 | DEFINE_SLOWCASE_SLOW_OP(bitor)
|
---|
598 | DEFINE_SLOWCASE_SLOW_OP(bitxor)
|
---|
599 | DEFINE_SLOWCASE_SLOW_OP(lshift)
|
---|
600 | DEFINE_SLOWCASE_SLOW_OP(rshift)
|
---|
601 | DEFINE_SLOWCASE_SLOW_OP(urshift)
|
---|
602 | DEFINE_SLOWCASE_SLOW_OP(div)
|
---|
603 | DEFINE_SLOWCASE_SLOW_OP(create_this)
|
---|
604 | DEFINE_SLOWCASE_SLOW_OP(create_promise)
|
---|
605 | DEFINE_SLOWCASE_SLOW_OP(create_generator)
|
---|
606 | DEFINE_SLOWCASE_SLOW_OP(create_async_generator)
|
---|
607 | DEFINE_SLOWCASE_SLOW_OP(to_this)
|
---|
608 | DEFINE_SLOWCASE_SLOW_OP(to_primitive)
|
---|
609 | DEFINE_SLOWCASE_SLOW_OP(to_number)
|
---|
610 | DEFINE_SLOWCASE_SLOW_OP(to_numeric)
|
---|
611 | DEFINE_SLOWCASE_SLOW_OP(to_string)
|
---|
612 | DEFINE_SLOWCASE_SLOW_OP(to_object)
|
---|
613 | DEFINE_SLOWCASE_SLOW_OP(not)
|
---|
614 | DEFINE_SLOWCASE_SLOW_OP(stricteq)
|
---|
615 | DEFINE_SLOWCASE_SLOW_OP(nstricteq)
|
---|
616 | DEFINE_SLOWCASE_SLOW_OP(get_prototype_of)
|
---|
617 | DEFINE_SLOWCASE_SLOW_OP(check_tdz)
|
---|
618 | DEFINE_SLOWCASE_SLOW_OP(to_property_key)
|
---|
619 | default:
|
---|
620 | RELEASE_ASSERT_NOT_REACHED();
|
---|
621 | }
|
---|
622 |
|
---|
623 | if (JITInternal::verbose)
|
---|
624 | dataLog("At ", firstTo, " slow: ", iter - m_slowCases.begin(), "\n");
|
---|
625 |
|
---|
626 | RELEASE_ASSERT_WITH_MESSAGE(iter == m_slowCases.end() || firstTo.offset() != iter->to.offset(), "Not enough jumps linked in slow case codegen.");
|
---|
627 | RELEASE_ASSERT_WITH_MESSAGE(firstTo.offset() == (iter - 1)->to.offset(), "Too many jumps linked in slow case codegen.");
|
---|
628 |
|
---|
629 | jump().linkTo(fastPathResumePoint(), this);
|
---|
630 | ++bytecodeCountHavingSlowCase;
|
---|
631 |
|
---|
632 | if (UNLIKELY(sizeMarker)) {
|
---|
633 | m_bytecodeIndex = BytecodeIndex(m_bytecodeIndex.offset() + currentInstruction->size());
|
---|
634 | m_vm->jitSizeStatistics->markEnd(WTFMove(*sizeMarker), *this);
|
---|
635 | }
|
---|
636 | }
|
---|
637 |
|
---|
638 | RELEASE_ASSERT(bytecodeCountHavingSlowCase == m_bytecodeCountHavingSlowCase);
|
---|
639 | RELEASE_ASSERT(m_getByIdIndex == m_getByIds.size());
|
---|
640 | RELEASE_ASSERT(m_getByIdWithThisIndex == m_getByIdsWithThis.size());
|
---|
641 | RELEASE_ASSERT(m_putByIdIndex == m_putByIds.size());
|
---|
642 | RELEASE_ASSERT(m_putByValIndex == m_putByVals.size());
|
---|
643 | RELEASE_ASSERT(m_inByIdIndex == m_inByIds.size());
|
---|
644 | RELEASE_ASSERT(m_instanceOfIndex == m_instanceOfs.size());
|
---|
645 | RELEASE_ASSERT(m_privateBrandAccessIndex == m_privateBrandAccesses.size());
|
---|
646 | RELEASE_ASSERT(m_callLinkInfoIndex == m_callCompilationInfo.size());
|
---|
647 |
|
---|
648 | #ifndef NDEBUG
|
---|
649 | // Reset this, in order to guard its use with ASSERTs.
|
---|
650 | m_bytecodeIndex = BytecodeIndex();
|
---|
651 | #endif
|
---|
652 | }
|
---|
653 |
|
---|
654 | void JIT::emitMaterializeMetadataAndConstantPoolRegisters()
|
---|
655 | {
|
---|
656 | loadPtr(addressFor(CallFrameSlot::codeBlock), regT0);
|
---|
657 | loadPtr(Address(regT0, CodeBlock::offsetOfMetadataTable()), s_metadataGPR);
|
---|
658 | loadPtr(Address(regT0, CodeBlock::offsetOfJITData()), s_constantsGPR);
|
---|
659 | }
|
---|
660 |
|
---|
661 | void JIT::emitSaveCalleeSaves()
|
---|
662 | {
|
---|
663 | Base::emitSaveCalleeSavesFor(&RegisterAtOffsetList::llintBaselineCalleeSaveRegisters());
|
---|
664 | }
|
---|
665 |
|
---|
666 | void JIT::emitRestoreCalleeSaves()
|
---|
667 | {
|
---|
668 | Base::emitRestoreCalleeSavesFor(&RegisterAtOffsetList::llintBaselineCalleeSaveRegisters());
|
---|
669 | }
|
---|
670 |
|
---|
671 | #if ASSERT_ENABLED
|
---|
672 | MacroAssemblerCodeRef<JITThunkPtrTag> JIT::consistencyCheckGenerator(VM&)
|
---|
673 | {
|
---|
674 | CCallHelpers jit;
|
---|
675 |
|
---|
676 | constexpr GPRReg stackOffsetGPR = regT0; // Incoming
|
---|
677 | constexpr GPRReg expectedStackPointerGPR = regT1;
|
---|
678 | constexpr GPRReg expectedMetadataGPR = regT2;
|
---|
679 | constexpr GPRReg expectedConstantsGPR = regT3;
|
---|
680 |
|
---|
681 | jit.tagReturnAddress();
|
---|
682 |
|
---|
683 | jit.mul32(TrustedImm32(sizeof(Register)), stackOffsetGPR, stackOffsetGPR);
|
---|
684 | jit.subPtr(callFrameRegister, stackOffsetGPR, expectedStackPointerGPR);
|
---|
685 | // Fix up in case the call sequence (from the op) changed the stack pointer, e.g.: like on x86
|
---|
686 | if (constexpr size_t delta = sizeof(CallerFrameAndPC) - prologueStackPointerDelta())
|
---|
687 | jit.subPtr(TrustedImm32(delta), expectedStackPointerGPR);
|
---|
688 |
|
---|
689 | jit.loadPtr(addressFor(CallFrameSlot::codeBlock), expectedConstantsGPR);
|
---|
690 | jit.loadPtr(Address(expectedConstantsGPR, CodeBlock::offsetOfMetadataTable()), expectedMetadataGPR);
|
---|
691 | jit.loadPtr(Address(expectedConstantsGPR, CodeBlock::offsetOfJITData()), expectedConstantsGPR);
|
---|
692 |
|
---|
693 | auto stackPointerOK = jit.branchPtr(Equal, expectedStackPointerGPR, stackPointerRegister);
|
---|
694 | jit.breakpoint();
|
---|
695 | stackPointerOK.link(&jit);
|
---|
696 |
|
---|
697 | auto metadataOK = jit.branchPtr(Equal, expectedMetadataGPR, s_metadataGPR);
|
---|
698 | jit.breakpoint();
|
---|
699 | metadataOK.link(&jit);
|
---|
700 |
|
---|
701 | auto constantsOK = jit.branchPtr(Equal, expectedConstantsGPR, s_constantsGPR);
|
---|
702 | jit.breakpoint();
|
---|
703 | constantsOK.link(&jit);
|
---|
704 |
|
---|
705 | jit.ret();
|
---|
706 |
|
---|
707 | LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, LinkBuffer::Profile::ExtraCTIThunk);
|
---|
708 | return FINALIZE_THUNK(patchBuffer, JITThunkPtrTag, "Baseline: generateConsistencyCheck");
|
---|
709 | }
|
---|
710 |
|
---|
711 | void JIT::emitConsistencyCheck()
|
---|
712 | {
|
---|
713 | ASSERT(!m_consistencyCheckLabel.isSet());
|
---|
714 | m_consistencyCheckLabel = label();
|
---|
715 | move(TrustedImm32(-stackPointerOffsetFor(m_unlinkedCodeBlock)), regT0);
|
---|
716 | m_bytecodeIndex = BytecodeIndex(0);
|
---|
717 | emitNakedNearTailCall(vm().getCTIStub(consistencyCheckGenerator).retaggedCode<NoPtrTag>());
|
---|
718 | m_bytecodeIndex = BytecodeIndex(); // Reset this, in order to guard its use with ASSERTs.
|
---|
719 | }
|
---|
720 | #endif
|
---|
721 |
|
---|
722 | void JIT::compileAndLinkWithoutFinalizing(JITCompilationEffort effort)
|
---|
723 | {
|
---|
724 | DFG::CapabilityLevel level = m_profiledCodeBlock->capabilityLevel();
|
---|
725 | switch (level) {
|
---|
726 | case DFG::CannotCompile:
|
---|
727 | m_canBeOptimized = false;
|
---|
728 | m_shouldEmitProfiling = false;
|
---|
729 | break;
|
---|
730 | case DFG::CanCompile:
|
---|
731 | case DFG::CanCompileAndInline:
|
---|
732 | m_canBeOptimized = true;
|
---|
733 | m_shouldEmitProfiling = true;
|
---|
734 | break;
|
---|
735 | default:
|
---|
736 | RELEASE_ASSERT_NOT_REACHED();
|
---|
737 | break;
|
---|
738 | }
|
---|
739 |
|
---|
740 | if (m_unlinkedCodeBlock->numberOfUnlinkedSwitchJumpTables() || m_unlinkedCodeBlock->numberOfUnlinkedStringSwitchJumpTables()) {
|
---|
741 | if (m_unlinkedCodeBlock->numberOfUnlinkedSwitchJumpTables())
|
---|
742 | m_switchJumpTables = FixedVector<SimpleJumpTable>(m_unlinkedCodeBlock->numberOfUnlinkedSwitchJumpTables());
|
---|
743 | if (m_unlinkedCodeBlock->numberOfUnlinkedStringSwitchJumpTables())
|
---|
744 | m_stringSwitchJumpTables = FixedVector<StringJumpTable>(m_unlinkedCodeBlock->numberOfUnlinkedStringSwitchJumpTables());
|
---|
745 | }
|
---|
746 |
|
---|
747 | if (UNLIKELY(Options::dumpDisassembly() || (m_vm->m_perBytecodeProfiler && Options::disassembleBaselineForProfiler()))) {
|
---|
748 | // FIXME: build a disassembler off of UnlinkedCodeBlock.
|
---|
749 | m_disassembler = makeUnique<JITDisassembler>(m_profiledCodeBlock);
|
---|
750 | }
|
---|
751 | if (UNLIKELY(m_vm->m_perBytecodeProfiler)) {
|
---|
752 | // FIXME: build profiler disassembler off UnlinkedCodeBlock.
|
---|
753 | m_compilation = adoptRef(
|
---|
754 | new Profiler::Compilation(
|
---|
755 | m_vm->m_perBytecodeProfiler->ensureBytecodesFor(m_profiledCodeBlock),
|
---|
756 | Profiler::Baseline));
|
---|
757 | m_compilation->addProfiledBytecodes(*m_vm->m_perBytecodeProfiler, m_profiledCodeBlock);
|
---|
758 | }
|
---|
759 |
|
---|
760 | m_pcToCodeOriginMapBuilder.appendItem(label(), CodeOrigin(BytecodeIndex(0)));
|
---|
761 |
|
---|
762 | std::optional<JITSizeStatistics::Marker> sizeMarker;
|
---|
763 | if (UNLIKELY(Options::dumpBaselineJITSizeStatistics()))
|
---|
764 | sizeMarker = m_vm->jitSizeStatistics->markStart("Baseline_prologue"_s, *this);
|
---|
765 |
|
---|
766 | Label entryLabel(this);
|
---|
767 | if (m_disassembler)
|
---|
768 | m_disassembler->setStartOfCode(entryLabel);
|
---|
769 |
|
---|
770 | // Just add a little bit of randomness to the codegen
|
---|
771 | if (random() & 1)
|
---|
772 | nop();
|
---|
773 |
|
---|
774 | emitFunctionPrologue();
|
---|
775 | jitAssertCodeBlockOnCallFrameWithType(regT2, JITType::BaselineJIT);
|
---|
776 |
|
---|
777 | Label beginLabel(this);
|
---|
778 |
|
---|
779 | int frameTopOffset = stackPointerOffsetFor(m_unlinkedCodeBlock) * sizeof(Register);
|
---|
780 | unsigned maxFrameSize = -frameTopOffset;
|
---|
781 | addPtr(TrustedImm32(frameTopOffset), callFrameRegister, regT1);
|
---|
782 | JumpList stackOverflow;
|
---|
783 | if (UNLIKELY(maxFrameSize > Options::reservedZoneSize()))
|
---|
784 | stackOverflow.append(branchPtr(Above, regT1, callFrameRegister));
|
---|
785 | stackOverflow.append(branchPtr(Above, AbsoluteAddress(m_vm->addressOfSoftStackLimit()), regT1));
|
---|
786 |
|
---|
787 | move(regT1, stackPointerRegister);
|
---|
788 | checkStackPointerAlignment();
|
---|
789 |
|
---|
790 | emitSaveCalleeSaves();
|
---|
791 | emitMaterializeTagCheckRegisters();
|
---|
792 | emitMaterializeMetadataAndConstantPoolRegisters();
|
---|
793 |
|
---|
794 | if (m_unlinkedCodeBlock->codeType() == FunctionCode) {
|
---|
795 | ASSERT(!m_bytecodeIndex);
|
---|
796 | if (shouldEmitProfiling() && (!m_unlinkedCodeBlock->isConstructor() || m_unlinkedCodeBlock->numParameters() > 1)) {
|
---|
797 | emitGetFromCallFrameHeaderPtr(CallFrameSlot::codeBlock, regT2);
|
---|
798 | loadPtr(Address(regT2, CodeBlock::offsetOfArgumentValueProfiles() + FixedVector<ValueProfile>::offsetOfStorage()), regT2);
|
---|
799 |
|
---|
800 | for (unsigned argument = 0; argument < m_unlinkedCodeBlock->numParameters(); ++argument) {
|
---|
801 | // If this is a constructor, then we want to put in a dummy profiling site (to
|
---|
802 | // keep things consistent) but we don't actually want to record the dummy value.
|
---|
803 | if (m_unlinkedCodeBlock->isConstructor() && !argument)
|
---|
804 | continue;
|
---|
805 | int offset = CallFrame::argumentOffsetIncludingThis(argument) * static_cast<int>(sizeof(Register));
|
---|
806 | loadValue(Address(callFrameRegister, offset), jsRegT10);
|
---|
807 | storeValue(jsRegT10, Address(regT2, FixedVector<ValueProfile>::Storage::offsetOfData() + argument * sizeof(ValueProfile) + ValueProfile::offsetOfFirstBucket()));
|
---|
808 | }
|
---|
809 | }
|
---|
810 | }
|
---|
811 |
|
---|
812 | RELEASE_ASSERT(!JITCode::isJIT(m_profiledCodeBlock->jitType()));
|
---|
813 |
|
---|
814 | if (UNLIKELY(sizeMarker))
|
---|
815 | m_vm->jitSizeStatistics->markEnd(WTFMove(*sizeMarker), *this);
|
---|
816 |
|
---|
817 | privateCompileMainPass();
|
---|
818 | privateCompileLinkPass();
|
---|
819 | privateCompileSlowCases();
|
---|
820 |
|
---|
821 | if (m_disassembler)
|
---|
822 | m_disassembler->setEndOfSlowPath(label());
|
---|
823 | m_pcToCodeOriginMapBuilder.appendItem(label(), PCToCodeOriginMapBuilder::defaultCodeOrigin());
|
---|
824 |
|
---|
825 | #if ASSERT_ENABLED
|
---|
826 | emitConsistencyCheck();
|
---|
827 | #endif
|
---|
828 |
|
---|
829 | stackOverflow.link(this);
|
---|
830 | m_bytecodeIndex = BytecodeIndex(0);
|
---|
831 | if (maxFrameExtentForSlowPathCall)
|
---|
832 | addPtr(TrustedImm32(-static_cast<int32_t>(maxFrameExtentForSlowPathCall)), stackPointerRegister);
|
---|
833 | emitGetFromCallFrameHeaderPtr(CallFrameSlot::codeBlock, regT0);
|
---|
834 | callOperationWithCallFrameRollbackOnException(operationThrowStackOverflowError, regT0);
|
---|
835 |
|
---|
836 | // If the number of parameters is 1, we never require arity fixup.
|
---|
837 | bool requiresArityFixup = m_unlinkedCodeBlock->numParameters() != 1;
|
---|
838 | if (m_unlinkedCodeBlock->codeType() == FunctionCode && requiresArityFixup) {
|
---|
839 | m_arityCheck = label();
|
---|
840 |
|
---|
841 | emitFunctionPrologue();
|
---|
842 | RELEASE_ASSERT(m_unlinkedCodeBlock->codeType() == FunctionCode);
|
---|
843 | jitAssertCodeBlockOnCallFrameWithType(regT2, JITType::BaselineJIT);
|
---|
844 | emitGetFromCallFrameHeaderPtr(CallFrameSlot::codeBlock, regT0);
|
---|
845 | store8(TrustedImm32(0), Address(regT0, CodeBlock::offsetOfShouldAlwaysBeInlined()));
|
---|
846 |
|
---|
847 | load32(payloadFor(CallFrameSlot::argumentCountIncludingThis), regT1);
|
---|
848 | branch32(AboveOrEqual, regT1, TrustedImm32(m_unlinkedCodeBlock->numParameters())).linkTo(beginLabel, this);
|
---|
849 |
|
---|
850 | m_bytecodeIndex = BytecodeIndex(0);
|
---|
851 |
|
---|
852 | if (maxFrameExtentForSlowPathCall)
|
---|
853 | addPtr(TrustedImm32(-static_cast<int32_t>(maxFrameExtentForSlowPathCall)), stackPointerRegister);
|
---|
854 | loadPtr(Address(regT0, CodeBlock::offsetOfGlobalObject()), argumentGPR0);
|
---|
855 | callOperationWithCallFrameRollbackOnException(m_unlinkedCodeBlock->isConstructor() ? operationConstructArityCheck : operationCallArityCheck, argumentGPR0);
|
---|
856 | if (maxFrameExtentForSlowPathCall)
|
---|
857 | addPtr(TrustedImm32(maxFrameExtentForSlowPathCall), stackPointerRegister);
|
---|
858 | branchTest32(Zero, returnValueGPR).linkTo(beginLabel, this);
|
---|
859 | move(returnValueGPR, GPRInfo::argumentGPR0);
|
---|
860 | emitNakedNearCall(m_vm->getCTIStub(arityFixupGenerator).retaggedCode<NoPtrTag>());
|
---|
861 |
|
---|
862 | #if ASSERT_ENABLED
|
---|
863 | m_bytecodeIndex = BytecodeIndex(); // Reset this, in order to guard its use with ASSERTs.
|
---|
864 | #endif
|
---|
865 |
|
---|
866 | jump(beginLabel);
|
---|
867 | } else
|
---|
868 | m_arityCheck = entryLabel; // Never require arity fixup.
|
---|
869 |
|
---|
870 | ASSERT(m_jmpTable.isEmpty());
|
---|
871 |
|
---|
872 | if (m_disassembler)
|
---|
873 | m_disassembler->setEndOfCode(label());
|
---|
874 | m_pcToCodeOriginMapBuilder.appendItem(label(), PCToCodeOriginMapBuilder::defaultCodeOrigin());
|
---|
875 |
|
---|
876 | m_linkBuffer = std::unique_ptr<LinkBuffer>(new LinkBuffer(*this, m_unlinkedCodeBlock, LinkBuffer::Profile::BaselineJIT, effort));
|
---|
877 | link();
|
---|
878 | }
|
---|
879 |
|
---|
880 | void JIT::link()
|
---|
881 | {
|
---|
882 | LinkBuffer& patchBuffer = *m_linkBuffer;
|
---|
883 |
|
---|
884 | if (patchBuffer.didFailToAllocate())
|
---|
885 | return;
|
---|
886 |
|
---|
887 | // Translate vPC offsets into addresses in JIT generated code, for switch tables.
|
---|
888 | for (auto& record : m_switches) {
|
---|
889 | unsigned bytecodeOffset = record.bytecodeIndex.offset();
|
---|
890 | unsigned tableIndex = record.tableIndex;
|
---|
891 |
|
---|
892 | switch (record.type) {
|
---|
893 | case SwitchRecord::Immediate:
|
---|
894 | case SwitchRecord::Character: {
|
---|
895 | const UnlinkedSimpleJumpTable& unlinkedTable = m_unlinkedCodeBlock->unlinkedSwitchJumpTable(tableIndex);
|
---|
896 | SimpleJumpTable& linkedTable = m_switchJumpTables[tableIndex];
|
---|
897 | linkedTable.m_ctiDefault = patchBuffer.locationOf<JSSwitchPtrTag>(m_labels[bytecodeOffset + record.defaultOffset]);
|
---|
898 | for (unsigned j = 0; j < unlinkedTable.m_branchOffsets.size(); ++j) {
|
---|
899 | unsigned offset = unlinkedTable.m_branchOffsets[j];
|
---|
900 | linkedTable.m_ctiOffsets[j] = offset
|
---|
901 | ? patchBuffer.locationOf<JSSwitchPtrTag>(m_labels[bytecodeOffset + offset])
|
---|
902 | : linkedTable.m_ctiDefault;
|
---|
903 | }
|
---|
904 | break;
|
---|
905 | }
|
---|
906 |
|
---|
907 | case SwitchRecord::String: {
|
---|
908 | const UnlinkedStringJumpTable& unlinkedTable = m_unlinkedCodeBlock->unlinkedStringSwitchJumpTable(tableIndex);
|
---|
909 | StringJumpTable& linkedTable = m_stringSwitchJumpTables[tableIndex];
|
---|
910 | auto ctiDefault = patchBuffer.locationOf<JSSwitchPtrTag>(m_labels[bytecodeOffset + record.defaultOffset]);
|
---|
911 | for (auto& location : unlinkedTable.m_offsetTable.values()) {
|
---|
912 | unsigned offset = location.m_branchOffset;
|
---|
913 | linkedTable.m_ctiOffsets[location.m_indexInTable] = offset
|
---|
914 | ? patchBuffer.locationOf<JSSwitchPtrTag>(m_labels[bytecodeOffset + offset])
|
---|
915 | : ctiDefault;
|
---|
916 | }
|
---|
917 | linkedTable.m_ctiOffsets[unlinkedTable.m_offsetTable.size()] = ctiDefault;
|
---|
918 | break;
|
---|
919 | }
|
---|
920 | }
|
---|
921 | }
|
---|
922 |
|
---|
923 | if (!m_exceptionChecks.empty())
|
---|
924 | patchBuffer.link(m_exceptionChecks, CodeLocationLabel(vm().getCTIStub(handleExceptionGenerator).retaggedCode<NoPtrTag>()));
|
---|
925 | if (!m_exceptionChecksWithCallFrameRollback.empty())
|
---|
926 | patchBuffer.link(m_exceptionChecksWithCallFrameRollback, CodeLocationLabel(vm().getCTIStub(handleExceptionWithCallFrameRollbackGenerator).retaggedCode<NoPtrTag>()));
|
---|
927 |
|
---|
928 | for (auto& record : m_nearJumps) {
|
---|
929 | if (record.target)
|
---|
930 | patchBuffer.link(record.from, record.target);
|
---|
931 | }
|
---|
932 | for (auto& record : m_nearCalls) {
|
---|
933 | if (record.callee)
|
---|
934 | patchBuffer.link(record.from, record.callee);
|
---|
935 | }
|
---|
936 | for (auto& record : m_farCalls) {
|
---|
937 | if (record.callee)
|
---|
938 | patchBuffer.link(record.from, record.callee);
|
---|
939 | }
|
---|
940 |
|
---|
941 | #if ASSERT_ENABLED
|
---|
942 | const auto consistencyCheck = patchBuffer.locationOf<JSInternalPtrTag>(m_consistencyCheckLabel);
|
---|
943 | for (auto& call : m_consistencyCheckCalls)
|
---|
944 | patchBuffer.link<JSInternalPtrTag>(call, consistencyCheck);
|
---|
945 | #endif
|
---|
946 |
|
---|
947 | auto finalizeICs = [&] (auto& generators) {
|
---|
948 | for (auto& gen : generators) {
|
---|
949 | gen.m_unlinkedStubInfo->doneLocation = patchBuffer.locationOf<JSInternalPtrTag>(gen.m_done);
|
---|
950 | gen.m_unlinkedStubInfo->slowPathStartLocation = patchBuffer.locationOf<JITStubRoutinePtrTag>(gen.m_slowPathBegin);
|
---|
951 | }
|
---|
952 | };
|
---|
953 |
|
---|
954 | finalizeICs(m_getByIds);
|
---|
955 | finalizeICs(m_getByVals);
|
---|
956 | finalizeICs(m_getByIdsWithThis);
|
---|
957 | finalizeICs(m_putByIds);
|
---|
958 | finalizeICs(m_putByVals);
|
---|
959 | finalizeICs(m_delByIds);
|
---|
960 | finalizeICs(m_delByVals);
|
---|
961 | finalizeICs(m_inByIds);
|
---|
962 | finalizeICs(m_inByVals);
|
---|
963 | finalizeICs(m_instanceOfs);
|
---|
964 | finalizeICs(m_privateBrandAccesses);
|
---|
965 |
|
---|
966 | for (auto& compilationInfo : m_callCompilationInfo) {
|
---|
967 | UnlinkedCallLinkInfo& info = *compilationInfo.unlinkedCallLinkInfo;
|
---|
968 | info.doneLocation = patchBuffer.locationOf<JSInternalPtrTag>(compilationInfo.doneLocation);
|
---|
969 | }
|
---|
970 |
|
---|
971 | JITCodeMapBuilder jitCodeMapBuilder;
|
---|
972 | for (unsigned bytecodeOffset = 0; bytecodeOffset < m_labels.size(); ++bytecodeOffset) {
|
---|
973 | if (m_labels[bytecodeOffset].isSet())
|
---|
974 | jitCodeMapBuilder.append(BytecodeIndex(bytecodeOffset), patchBuffer.locationOf<JSEntryPtrTag>(m_labels[bytecodeOffset]));
|
---|
975 | }
|
---|
976 |
|
---|
977 | if (UNLIKELY(Options::dumpDisassembly())) {
|
---|
978 | m_disassembler->dump(patchBuffer);
|
---|
979 | patchBuffer.didAlreadyDisassemble();
|
---|
980 | }
|
---|
981 |
|
---|
982 | if (UNLIKELY(m_compilation)) {
|
---|
983 | // FIXME: should we make the bytecode profiler know about UnlinkedCodeBlock?
|
---|
984 | if (Options::disassembleBaselineForProfiler())
|
---|
985 | m_disassembler->reportToProfiler(m_compilation.get(), patchBuffer);
|
---|
986 | m_vm->m_perBytecodeProfiler->addCompilation(m_profiledCodeBlock, *m_compilation);
|
---|
987 | }
|
---|
988 |
|
---|
989 | if (m_pcToCodeOriginMapBuilder.didBuildMapping())
|
---|
990 | m_pcToCodeOriginMap = makeUnique<PCToCodeOriginMap>(WTFMove(m_pcToCodeOriginMapBuilder), patchBuffer);
|
---|
991 |
|
---|
992 | // FIXME: Make a version of CodeBlockWithJITType that knows about UnlinkedCodeBlock.
|
---|
993 | CodeRef<JSEntryPtrTag> result = FINALIZE_CODE(
|
---|
994 | patchBuffer, JSEntryPtrTag,
|
---|
995 | "Baseline JIT code for %s", toCString(CodeBlockWithJITType(m_profiledCodeBlock, JITType::BaselineJIT)).data());
|
---|
996 |
|
---|
997 | MacroAssemblerCodePtr<JSEntryPtrTag> withArityCheck = patchBuffer.locationOf<JSEntryPtrTag>(m_arityCheck);
|
---|
998 | m_jitCode = adoptRef(*new BaselineJITCode(result, withArityCheck));
|
---|
999 |
|
---|
1000 | m_jitCode->m_unlinkedCalls = FixedVector<UnlinkedCallLinkInfo>(m_unlinkedCalls.size());
|
---|
1001 | if (m_jitCode->m_unlinkedCalls.size())
|
---|
1002 | std::move(m_unlinkedCalls.begin(), m_unlinkedCalls.end(), m_jitCode->m_unlinkedCalls.begin());
|
---|
1003 | m_jitCode->m_unlinkedStubInfos = FixedVector<BaselineUnlinkedStructureStubInfo>(m_unlinkedStubInfos.size());
|
---|
1004 | if (m_jitCode->m_unlinkedStubInfos.size())
|
---|
1005 | std::move(m_unlinkedStubInfos.begin(), m_unlinkedStubInfos.end(), m_jitCode->m_unlinkedStubInfos.begin());
|
---|
1006 | m_jitCode->m_switchJumpTables = WTFMove(m_switchJumpTables);
|
---|
1007 | m_jitCode->m_stringSwitchJumpTables = WTFMove(m_stringSwitchJumpTables);
|
---|
1008 | m_jitCode->m_jitCodeMap = jitCodeMapBuilder.finalize();
|
---|
1009 | m_jitCode->adoptMathICs(m_mathICs);
|
---|
1010 | m_jitCode->m_constantPool = WTFMove(m_constantPool);
|
---|
1011 | m_jitCode->m_isShareable = m_isShareable;
|
---|
1012 |
|
---|
1013 | if (JITInternal::verbose)
|
---|
1014 | dataLogF("JIT generated code for %p at [%p, %p).\n", m_unlinkedCodeBlock, result.executableMemory()->start().untaggedPtr(), result.executableMemory()->end().untaggedPtr());
|
---|
1015 | }
|
---|
1016 |
|
---|
1017 | CompilationResult JIT::finalizeOnMainThread(CodeBlock* codeBlock)
|
---|
1018 | {
|
---|
1019 | RELEASE_ASSERT(!isCompilationThread());
|
---|
1020 |
|
---|
1021 | if (!m_jitCode)
|
---|
1022 | return CompilationFailed;
|
---|
1023 |
|
---|
1024 | m_linkBuffer->runMainThreadFinalizationTasks();
|
---|
1025 |
|
---|
1026 | if (m_pcToCodeOriginMap)
|
---|
1027 | m_jitCode->m_pcToCodeOriginMap = WTFMove(m_pcToCodeOriginMap);
|
---|
1028 |
|
---|
1029 | m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT->add(
|
---|
1030 | static_cast<double>(m_jitCode->size()) /
|
---|
1031 | static_cast<double>(m_unlinkedCodeBlock->instructionsSize()));
|
---|
1032 |
|
---|
1033 | codeBlock->setupWithUnlinkedBaselineCode(m_jitCode.releaseNonNull());
|
---|
1034 |
|
---|
1035 | return CompilationSuccessful;
|
---|
1036 | }
|
---|
1037 |
|
---|
1038 | size_t JIT::codeSize() const
|
---|
1039 | {
|
---|
1040 | if (!m_linkBuffer)
|
---|
1041 | return 0;
|
---|
1042 | return m_linkBuffer->size();
|
---|
1043 | }
|
---|
1044 |
|
---|
1045 | CompilationResult JIT::privateCompile(CodeBlock* codeBlock, JITCompilationEffort effort)
|
---|
1046 | {
|
---|
1047 | doMainThreadPreparationBeforeCompile();
|
---|
1048 | compileAndLinkWithoutFinalizing(effort);
|
---|
1049 | return finalizeOnMainThread(codeBlock);
|
---|
1050 | }
|
---|
1051 |
|
---|
1052 | void JIT::doMainThreadPreparationBeforeCompile()
|
---|
1053 | {
|
---|
1054 | // This ensures that we have the most up to date type information when performing typecheck optimizations for op_profile_type.
|
---|
1055 | if (m_vm->typeProfiler())
|
---|
1056 | m_vm->typeProfilerLog()->processLogEntries(*m_vm, "Preparing for JIT compilation."_s);
|
---|
1057 | }
|
---|
1058 |
|
---|
1059 | unsigned JIT::frameRegisterCountFor(UnlinkedCodeBlock* codeBlock)
|
---|
1060 | {
|
---|
1061 | ASSERT(static_cast<unsigned>(codeBlock->numCalleeLocals()) == WTF::roundUpToMultipleOf(stackAlignmentRegisters(), static_cast<unsigned>(codeBlock->numCalleeLocals())));
|
---|
1062 |
|
---|
1063 | return roundLocalRegisterCountForFramePointerOffset(codeBlock->numCalleeLocals() + maxFrameExtentForSlowPathCallInRegisters);
|
---|
1064 | }
|
---|
1065 |
|
---|
1066 | unsigned JIT::frameRegisterCountFor(CodeBlock* codeBlock)
|
---|
1067 | {
|
---|
1068 | return frameRegisterCountFor(codeBlock->unlinkedCodeBlock());
|
---|
1069 | }
|
---|
1070 |
|
---|
1071 | int JIT::stackPointerOffsetFor(UnlinkedCodeBlock* codeBlock)
|
---|
1072 | {
|
---|
1073 | return virtualRegisterForLocal(frameRegisterCountFor(codeBlock) - 1).offset();
|
---|
1074 | }
|
---|
1075 |
|
---|
1076 | int JIT::stackPointerOffsetFor(CodeBlock* codeBlock)
|
---|
1077 | {
|
---|
1078 | return stackPointerOffsetFor(codeBlock->unlinkedCodeBlock());
|
---|
1079 | }
|
---|
1080 |
|
---|
1081 | HashMap<CString, Seconds> JIT::compileTimeStats()
|
---|
1082 | {
|
---|
1083 | HashMap<CString, Seconds> result;
|
---|
1084 | if (Options::reportTotalCompileTimes()) {
|
---|
1085 | result.add("Total Compile Time", totalCompileTime());
|
---|
1086 | result.add("Baseline Compile Time", totalBaselineCompileTime);
|
---|
1087 | #if ENABLE(DFG_JIT)
|
---|
1088 | result.add("DFG Compile Time", totalDFGCompileTime);
|
---|
1089 | #if ENABLE(FTL_JIT)
|
---|
1090 | result.add("FTL Compile Time", totalFTLCompileTime);
|
---|
1091 | result.add("FTL (DFG) Compile Time", totalFTLDFGCompileTime);
|
---|
1092 | result.add("FTL (B3) Compile Time", totalFTLB3CompileTime);
|
---|
1093 | #endif // ENABLE(FTL_JIT)
|
---|
1094 | #endif // ENABLE(DFG_JIT)
|
---|
1095 | }
|
---|
1096 | return result;
|
---|
1097 | }
|
---|
1098 |
|
---|
1099 | Seconds JIT::totalCompileTime()
|
---|
1100 | {
|
---|
1101 | return totalBaselineCompileTime + totalDFGCompileTime + totalFTLCompileTime;
|
---|
1102 | }
|
---|
1103 |
|
---|
1104 | } // namespace JSC
|
---|
1105 |
|
---|
1106 | #endif // ENABLE(JIT)
|
---|