source: webkit/trunk/Source/JavaScriptCore/jit/JITThunks.cpp

Last change on this file was 290647, checked in by [email protected], 3 years ago

[JSC] Port EXTRA_CTI_THUNKS to all platforms
https://bugs.webkit.org/show_bug.cgi?id=233822

Patch by Geza Lore <Geza Lore> on 2022-03-01
Reviewed by Saam Barati.

Source/JavaScriptCore:

Port and enable all code paths under #ifdef ENABLE(EXTRA_CTI_THUNKS)
on all platforms, and remove the now unused code paths.

To port the extra thunks to all platforms, it was necessary to enable
them to do function calls to C++ slow path operations, which on some
platforms require passing arguments on the stack. To enable this,
CCallHelpers::emitCTIThunkPrologue and
CCallHelpers::emitCTIThunkEpilogue are introduced that allocate some
additional stack space on platforms where this is necessary.

Additionally, the thunks that require subsequent exception checking
now tail call directly to the exception check thunk, rather than
returning to the baseline code and do a separate call to the exception
check thunk. This saves an extra call site in the generated baseline
ops and reduces code size on all platforms (~1.5% reduction on x86_64).

Also factored out the statically allocated register definitions for
baseline ops into BaselineJITRegisters.h, as there are now quite a lot
of them and some are somewhat elaborate. This necessitates moving the
noOverlap and preferredArgumentGPR/preferredArgumentJSR constexpr
functions out from their previous place, so these now live in
GPRInfo.h

Enabling the extra CTI thunks on ARMv7/Thumb-2 saves about 25%
baseline code size, according to --dumpLinkBufferStats.

  • assembler/MacroAssembler.h:

(JSC::MacroAssembler::subPtr):

  • assembler/MacroAssemblerARMv7.h:

(JSC::MacroAssemblerARMv7::scratchRegister):

  • assembler/MacroAssemblerX86_64.h:

(JSC::MacroAssemblerX86_64::sub64):

  • bytecode/PolymorphicAccess.cpp:

(JSC::AccessGenerationState::emitExplicitExceptionHandler):

  • bytecode/StructureStubInfo.cpp:

(JSC::StructureStubInfo::initializeFromUnlinkedStructureStubInfo):

  • dfg/DFGJITCompiler.cpp:

(JSC::DFG::JITCompiler::link):
(JSC::DFG::JITCompiler::compile):
(JSC::DFG::JITCompiler::compileFunction):

  • dfg/DFGJITCompiler.h:
  • ftl/FTLCompile.cpp:

(JSC::FTL::compile):

  • ftl/FTLLink.cpp:

(JSC::FTL::link):

  • jit/CCallHelpers.h:

(JSC::CCallHelpers::emitCTIThunkPrologue):
(JSC::CCallHelpers::emitCTIThunkEpilogue):

  • jit/JIT.cpp:

(JSC::JIT::emitSlowCaseCall):
(JSC::JIT::privateCompileSlowCases):
(JSC::JIT::compileAndLinkWithoutFinalizing):
(JSC::JIT::link):

  • jit/JIT.h:
  • jit/JITArithmetic.cpp:

(JSC::JIT::emitSlow_op_mod):
(JSC::JIT::emit_op_mod):
(JSC::JIT::emit_op_div):

  • jit/JITCall.cpp:

(JSC::JIT::emit_op_iterator_open):
(JSC::JIT::emit_op_iterator_next):
(JSC::JIT::emitSlow_op_iterator_next):

  • jit/JITInlineCacheGenerator.cpp:

(JSC::JITGetByIdGenerator::generateBaselineDataICFastPath):
(JSC::JITGetByIdWithThisGenerator::generateBaselineDataICFastPath):
(JSC::JITPutByIdGenerator::generateBaselineDataICFastPath):

  • jit/JITInlineCacheGenerator.h:
  • jit/JITOpcodes.cpp:

(JSC::JIT::emit_op_jfalse):
(JSC::JIT::valueIsFalseyGenerator):
(JSC::JIT::emit_op_jtrue):
(JSC::JIT::valueIsTruthyGenerator):
(JSC::JIT::emit_op_throw):
(JSC::JIT::op_throw_handlerGenerator):
(JSC::JIT::emit_op_enter):
(JSC::JIT::op_enter_handlerGenerator):
(JSC::JIT::emitSlow_op_check_traps):
(JSC::JIT::op_check_traps_handlerGenerator):

  • jit/JITPropertyAccess.cpp:

(JSC::JIT::emit_op_get_by_val):
(JSC::JIT::generateGetByValSlowCase):
(JSC::JIT::slow_op_get_by_val_callSlowOperationThenCheckExceptionGenerator):
(JSC::JIT::emit_op_get_private_name):
(JSC::JIT::emitSlow_op_get_private_name):
(JSC::JIT::slow_op_get_private_name_callSlowOperationThenCheckExceptionGenerator):
(JSC::JIT::emit_op_set_private_brand):
(JSC::JIT::emitSlow_op_set_private_brand):
(JSC::JIT::emit_op_check_private_brand):
(JSC::JIT::emitSlow_op_check_private_brand):
(JSC::JIT::emit_op_put_by_val):
(JSC::JIT::emitSlow_op_put_by_val):
(JSC::JIT::slow_op_put_by_val_callSlowOperationThenCheckExceptionGenerator):
(JSC::JIT::emit_op_put_private_name):
(JSC::JIT::emitSlow_op_put_private_name):
(JSC::JIT::slow_op_put_private_name_callSlowOperationThenCheckExceptionGenerator):
(JSC::JIT::emit_op_del_by_id):
(JSC::JIT::emitSlow_op_del_by_id):
(JSC::JIT::slow_op_del_by_id_callSlowOperationThenCheckExceptionGenerator):
(JSC::JIT::emit_op_del_by_val):
(JSC::JIT::emitSlow_op_del_by_val):
(JSC::JIT::slow_op_del_by_val_callSlowOperationThenCheckExceptionGenerator):
(JSC::JIT::emit_op_try_get_by_id):
(JSC::JIT::emitSlow_op_try_get_by_id):
(JSC::JIT::emit_op_get_by_id_direct):
(JSC::JIT::emitSlow_op_get_by_id_direct):
(JSC::JIT::emit_op_get_by_id):
(JSC::JIT::emitSlow_op_get_by_id):
(JSC::JIT::slow_op_get_by_id_callSlowOperationThenCheckExceptionGenerator):
(JSC::JIT::emit_op_get_by_id_with_this):
(JSC::JIT::emitSlow_op_get_by_id_with_this):
(JSC::JIT::slow_op_get_by_id_with_this_callSlowOperationThenCheckExceptionGenerator):
(JSC::JIT::emit_op_put_by_id):
(JSC::JIT::emitSlow_op_put_by_id):
(JSC::JIT::slow_op_put_by_id_callSlowOperationThenCheckExceptionGenerator):
(JSC::JIT::emitSlow_op_in_by_id):
(JSC::JIT::emitSlow_op_in_by_val):
(JSC::JIT::emitHasPrivateSlow):
(JSC::JIT::emitSlow_op_has_private_name):
(JSC::JIT::emitSlow_op_has_private_brand):
(JSC::JIT::emitSlow_op_put_to_scope):
(JSC::JIT::slow_op_put_to_scopeGenerator):
(JSC::JIT::emit_op_get_property_enumerator):
(JSC::JIT::emit_op_enumerator_next):
(JSC::JIT::emit_enumerator_has_propertyImpl):
(JSC::JIT::emit_op_enumerator_get_by_val):
(JSC::JIT::emit_op_enumerator_in_by_val):
(JSC::JIT::emit_op_enumerator_has_own_property):

  • jit/JITThunks.cpp:
  • jit/JITThunks.h:
  • jit/SlowPathCall.cpp:

(JSC::JITSlowPathCall::call):
(JSC::JITSlowPathCall::generateThunk):

  • jit/SlowPathCall.h:

(JSC::JITSlowPathCall::JITSlowPathCall):

  • jit/ThunkGenerators.cpp:

(JSC::handleExceptionGenerator):
(JSC::checkExceptionGenerator):

  • jit/ThunkGenerators.h:

Source/WTF:

  • wtf/PlatformEnable.h:

Remove EXTRA_CTI_THUNKS define (now always on on all platforms)

File size: 11.4 KB
Line 
1/*
2 * Copyright (C) 2012-2021 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "JITThunks.h"
28
29#if ENABLE(JIT)
30
31#include "CommonSlowPaths.h"
32#include "JIT.h"
33#include "JITCode.h"
34#include "JSCJSValueInlines.h"
35#include "SlowPathCall.h"
36#include "ThunkGenerators.h"
37#include "VM.h"
38
39namespace JSC {
40
41JITThunks::JITThunks()
42{
43}
44
45JITThunks::~JITThunks()
46{
47}
48
49static inline NativeExecutable& getMayBeDyingNativeExecutable(const Weak<NativeExecutable>& weak)
50{
51 // This never gets Deleted / Empty slots.
52 WeakImpl* impl = weak.unsafeImpl();
53 ASSERT(impl);
54 // We have a callback removing entry when finalizing. This means that we never hold Deallocated entry in HashSet.
55 ASSERT(impl->state() != WeakImpl::State::Deallocated);
56 // Never use jsCast here. This is possible that this value is "Dead" but not "Finalized" yet. In this case,
57 // we can still access to non-JS data, as we are doing in a finalize callback.
58 auto* executable = static_cast<NativeExecutable*>(impl->jsValue().asCell());
59 ASSERT(executable);
60 return *executable;
61}
62
63inline unsigned JITThunks::WeakNativeExecutableHash::hash(NativeExecutable* executable)
64{
65 return hash(executable->function(), executable->constructor(), executable->name());
66}
67
68inline unsigned JITThunks::WeakNativeExecutableHash::hash(const Weak<NativeExecutable>& key)
69{
70 return hash(&getMayBeDyingNativeExecutable(key));
71}
72
73inline bool JITThunks::WeakNativeExecutableHash::equal(NativeExecutable& a, NativeExecutable& b)
74{
75 if (&a == &b)
76 return true;
77 return a.function() == b.function() && a.constructor() == b.constructor() && a.name() == b.name();
78}
79
80inline bool JITThunks::WeakNativeExecutableHash::equal(const Weak<NativeExecutable>& a, const Weak<NativeExecutable>& b)
81{
82 return equal(getMayBeDyingNativeExecutable(a), getMayBeDyingNativeExecutable(b));
83}
84
85inline bool JITThunks::WeakNativeExecutableHash::equal(const Weak<NativeExecutable>& a, NativeExecutable* bExecutable)
86{
87 return equal(getMayBeDyingNativeExecutable(a), *bExecutable);
88}
89
90inline bool JITThunks::WeakNativeExecutableHash::equal(const Weak<NativeExecutable>& a, const HostFunctionKey& b)
91{
92 auto& aExecutable = getMayBeDyingNativeExecutable(a);
93 return aExecutable.function() == std::get<0>(b) && aExecutable.constructor() == std::get<1>(b) && aExecutable.name() == std::get<2>(b);
94}
95
96MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeCall(VM& vm)
97{
98 ASSERT(Options::useJIT());
99 return ctiStub(vm, nativeCallGenerator).code();
100}
101
102MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeConstruct(VM& vm)
103{
104 ASSERT(Options::useJIT());
105 return ctiStub(vm, nativeConstructGenerator).code();
106}
107
108MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeTailCall(VM& vm)
109{
110 ASSERT(Options::useJIT());
111 return ctiStub(vm, nativeTailCallGenerator).code();
112}
113
114MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeTailCallWithoutSavedTags(VM& vm)
115{
116 ASSERT(Options::useJIT());
117 return ctiStub(vm, nativeTailCallWithoutSavedTagsGenerator).code();
118}
119
120MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiInternalFunctionCall(VM& vm)
121{
122 ASSERT(Options::useJIT());
123 return ctiStub(vm, internalFunctionCallGenerator).code();
124}
125
126MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiInternalFunctionConstruct(VM& vm)
127{
128 ASSERT(Options::useJIT());
129 return ctiStub(vm, internalFunctionConstructGenerator).code();
130}
131
132template <typename GenerateThunk>
133MacroAssemblerCodeRef<JITThunkPtrTag> JITThunks::ctiStubImpl(ThunkGenerator key, GenerateThunk generateThunk)
134{
135 Locker locker { m_lock };
136
137 auto handleEntry = [&] (Entry& entry) {
138 if (entry.needsCrossModifyingCodeFence && !isCompilationThread()) {
139 // The main thread will issue a crossModifyingCodeFence before running
140 // any code the compiler thread generates, including any thunks that they
141 // generate. However, the main thread may grab the thunk a compiler thread
142 // generated before we've issued that crossModifyingCodeFence. Hence, we
143 // conservatively say that when the main thread grabs a thunk generated
144 // from a compiler thread for the first time, it issues a crossModifyingCodeFence.
145 WTF::crossModifyingCodeFence();
146 entry.needsCrossModifyingCodeFence = false;
147 }
148
149 return MacroAssemblerCodeRef<JITThunkPtrTag>(*entry.handle);
150 };
151
152 {
153 auto iter = m_ctiStubMap.find(key);
154 if (iter != m_ctiStubMap.end())
155 return handleEntry(iter->value);
156 }
157
158 // We do two lookups on first addition to the hash table because generateThunk may add to it.
159 MacroAssemblerCodeRef<JITThunkPtrTag> codeRef = generateThunk();
160
161 bool needsCrossModifyingCodeFence = isCompilationThread();
162 auto addResult = m_ctiStubMap.add(key, Entry { PackedRefPtr<ExecutableMemoryHandle>(codeRef.executableMemory()), needsCrossModifyingCodeFence });
163 RELEASE_ASSERT(addResult.isNewEntry); // Thunks aren't recursive, so anything we generated transitively shouldn't have generated 'key'.
164 return handleEntry(addResult.iterator->value);
165}
166
167MacroAssemblerCodeRef<JITThunkPtrTag> JITThunks::ctiStub(VM& vm, ThunkGenerator generator)
168{
169 return ctiStubImpl(generator, [&] {
170 return generator(vm);
171 });
172}
173
174MacroAssemblerCodeRef<JITThunkPtrTag> JITThunks::ctiSlowPathFunctionStub(VM& vm, SlowPathFunction slowPathFunction)
175{
176 auto key = bitwise_cast<ThunkGenerator>(slowPathFunction);
177 return ctiStubImpl(key, [&] {
178 return JITSlowPathCall::generateThunk(vm, slowPathFunction);
179 });
180}
181
182struct JITThunks::HostKeySearcher {
183 static unsigned hash(const HostFunctionKey& key) { return WeakNativeExecutableHash::hash(key); }
184 static bool equal(const Weak<NativeExecutable>& a, const HostFunctionKey& b) { return WeakNativeExecutableHash::equal(a, b); }
185};
186
187struct JITThunks::NativeExecutableTranslator {
188 static unsigned hash(NativeExecutable* key) { return WeakNativeExecutableHash::hash(key); }
189 static bool equal(const Weak<NativeExecutable>& a, NativeExecutable* b) { return WeakNativeExecutableHash::equal(a, b); }
190 static void translate(Weak<NativeExecutable>& location, NativeExecutable* executable, unsigned)
191 {
192 location = Weak<NativeExecutable>(executable, executable->vm().jitStubs.get());
193 }
194};
195
196void JITThunks::finalize(Handle<Unknown> handle, void*)
197{
198 auto* nativeExecutable = static_cast<NativeExecutable*>(handle.get().asCell());
199 auto hostFunctionKey = std::make_tuple(nativeExecutable->function(), nativeExecutable->constructor(), nativeExecutable->name());
200 {
201 DisallowGC disallowGC;
202 auto iterator = m_nativeExecutableSet.find<HostKeySearcher>(hostFunctionKey);
203 // Because this finalizer is called, this means that we still have dead Weak<> in m_nativeExecutableSet.
204 ASSERT(iterator != m_nativeExecutableSet.end());
205 ASSERT(iterator->unsafeImpl()->state() == WeakImpl::State::Finalized);
206 m_nativeExecutableSet.remove(iterator);
207 }
208}
209
210NativeExecutable* JITThunks::hostFunctionStub(VM& vm, TaggedNativeFunction function, TaggedNativeFunction constructor, const String& name)
211{
212 return hostFunctionStub(vm, function, constructor, nullptr, NoIntrinsic, nullptr, name);
213}
214
215NativeExecutable* JITThunks::hostFunctionStub(VM& vm, TaggedNativeFunction function, TaggedNativeFunction constructor, ThunkGenerator generator, Intrinsic intrinsic, const DOMJIT::Signature* signature, const String& name)
216{
217 ASSERT(!isCompilationThread());
218 ASSERT(Options::useJIT());
219
220 auto hostFunctionKey = std::make_tuple(function, constructor, name);
221 {
222 DisallowGC disallowGC;
223 auto iterator = m_nativeExecutableSet.find<HostKeySearcher>(hostFunctionKey);
224 if (iterator != m_nativeExecutableSet.end()) {
225 // It is possible that this returns Weak<> which is Dead, but not finalized.
226 // We should not use this reference to store value created in the subsequent sequence, since allocating NativeExecutable can cause GC, which changes this Set.
227 if (auto* executable = iterator->get())
228 return executable;
229 }
230 }
231
232 RefPtr<JITCode> forCall;
233 if (generator) {
234 MacroAssemblerCodeRef<JSEntryPtrTag> entry = generator(vm).retagged<JSEntryPtrTag>();
235 forCall = adoptRef(new DirectJITCode(entry, entry.code(), JITType::HostCallThunk, intrinsic));
236 } else if (signature)
237 forCall = adoptRef(new NativeDOMJITCode(MacroAssemblerCodeRef<JSEntryPtrTag>::createSelfManagedCodeRef(ctiNativeCall(vm).retagged<JSEntryPtrTag>()), JITType::HostCallThunk, intrinsic, signature));
238 else
239 forCall = adoptRef(new NativeJITCode(MacroAssemblerCodeRef<JSEntryPtrTag>::createSelfManagedCodeRef(ctiNativeCall(vm).retagged<JSEntryPtrTag>()), JITType::HostCallThunk, intrinsic));
240
241 Ref<JITCode> forConstruct = adoptRef(*new NativeJITCode(MacroAssemblerCodeRef<JSEntryPtrTag>::createSelfManagedCodeRef(ctiNativeConstruct(vm).retagged<JSEntryPtrTag>()), JITType::HostCallThunk, NoIntrinsic));
242
243 NativeExecutable* nativeExecutable = NativeExecutable::create(vm, forCall.releaseNonNull(), function, WTFMove(forConstruct), constructor, name);
244 {
245 DisallowGC disallowGC;
246 auto addResult = m_nativeExecutableSet.add<NativeExecutableTranslator>(nativeExecutable);
247 if (!addResult.isNewEntry) {
248 // Override the existing Weak<NativeExecutable> with the new one since it is dead.
249 ASSERT(!*addResult.iterator);
250 *addResult.iterator = Weak<NativeExecutable>(nativeExecutable, this);
251 ASSERT(*addResult.iterator);
252#if ASSERT_ENABLED
253 auto iterator = m_nativeExecutableSet.find<HostKeySearcher>(hostFunctionKey);
254 ASSERT(iterator != m_nativeExecutableSet.end());
255 ASSERT(iterator->get() == nativeExecutable);
256 ASSERT(iterator->unsafeImpl()->state() == WeakImpl::State::Live);
257#endif
258 }
259 }
260 return nativeExecutable;
261}
262
263NativeExecutable* JITThunks::hostFunctionStub(VM& vm, TaggedNativeFunction function, ThunkGenerator generator, Intrinsic intrinsic, const String& name)
264{
265 return hostFunctionStub(vm, function, callHostFunctionAsConstructor, generator, intrinsic, nullptr, name);
266}
267
268} // namespace JSC
269
270#endif // ENABLE(JIT)
Note: See TracBrowser for help on using the repository browser.