1 | /*
|
---|
2 | * Copyright (C) 2012-2021 Apple Inc. All rights reserved.
|
---|
3 | *
|
---|
4 | * Redistribution and use in source and binary forms, with or without
|
---|
5 | * modification, are permitted provided that the following conditions
|
---|
6 | * are met:
|
---|
7 | * 1. Redistributions of source code must retain the above copyright
|
---|
8 | * notice, this list of conditions and the following disclaimer.
|
---|
9 | * 2. Redistributions in binary form must reproduce the above copyright
|
---|
10 | * notice, this list of conditions and the following disclaimer in the
|
---|
11 | * documentation and/or other materials provided with the distribution.
|
---|
12 | *
|
---|
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
|
---|
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
---|
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
---|
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
|
---|
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
---|
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
---|
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
---|
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
---|
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
---|
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
---|
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
---|
24 | */
|
---|
25 |
|
---|
26 | #include "config.h"
|
---|
27 | #include "JITThunks.h"
|
---|
28 |
|
---|
29 | #if ENABLE(JIT)
|
---|
30 |
|
---|
31 | #include "CommonSlowPaths.h"
|
---|
32 | #include "JIT.h"
|
---|
33 | #include "JITCode.h"
|
---|
34 | #include "JSCJSValueInlines.h"
|
---|
35 | #include "SlowPathCall.h"
|
---|
36 | #include "ThunkGenerators.h"
|
---|
37 | #include "VM.h"
|
---|
38 |
|
---|
39 | namespace JSC {
|
---|
40 |
|
---|
41 | JITThunks::JITThunks()
|
---|
42 | {
|
---|
43 | }
|
---|
44 |
|
---|
45 | JITThunks::~JITThunks()
|
---|
46 | {
|
---|
47 | }
|
---|
48 |
|
---|
49 | static inline NativeExecutable& getMayBeDyingNativeExecutable(const Weak<NativeExecutable>& weak)
|
---|
50 | {
|
---|
51 | // This never gets Deleted / Empty slots.
|
---|
52 | WeakImpl* impl = weak.unsafeImpl();
|
---|
53 | ASSERT(impl);
|
---|
54 | // We have a callback removing entry when finalizing. This means that we never hold Deallocated entry in HashSet.
|
---|
55 | ASSERT(impl->state() != WeakImpl::State::Deallocated);
|
---|
56 | // Never use jsCast here. This is possible that this value is "Dead" but not "Finalized" yet. In this case,
|
---|
57 | // we can still access to non-JS data, as we are doing in a finalize callback.
|
---|
58 | auto* executable = static_cast<NativeExecutable*>(impl->jsValue().asCell());
|
---|
59 | ASSERT(executable);
|
---|
60 | return *executable;
|
---|
61 | }
|
---|
62 |
|
---|
63 | inline unsigned JITThunks::WeakNativeExecutableHash::hash(NativeExecutable* executable)
|
---|
64 | {
|
---|
65 | return hash(executable->function(), executable->constructor(), executable->name());
|
---|
66 | }
|
---|
67 |
|
---|
68 | inline unsigned JITThunks::WeakNativeExecutableHash::hash(const Weak<NativeExecutable>& key)
|
---|
69 | {
|
---|
70 | return hash(&getMayBeDyingNativeExecutable(key));
|
---|
71 | }
|
---|
72 |
|
---|
73 | inline bool JITThunks::WeakNativeExecutableHash::equal(NativeExecutable& a, NativeExecutable& b)
|
---|
74 | {
|
---|
75 | if (&a == &b)
|
---|
76 | return true;
|
---|
77 | return a.function() == b.function() && a.constructor() == b.constructor() && a.name() == b.name();
|
---|
78 | }
|
---|
79 |
|
---|
80 | inline bool JITThunks::WeakNativeExecutableHash::equal(const Weak<NativeExecutable>& a, const Weak<NativeExecutable>& b)
|
---|
81 | {
|
---|
82 | return equal(getMayBeDyingNativeExecutable(a), getMayBeDyingNativeExecutable(b));
|
---|
83 | }
|
---|
84 |
|
---|
85 | inline bool JITThunks::WeakNativeExecutableHash::equal(const Weak<NativeExecutable>& a, NativeExecutable* bExecutable)
|
---|
86 | {
|
---|
87 | return equal(getMayBeDyingNativeExecutable(a), *bExecutable);
|
---|
88 | }
|
---|
89 |
|
---|
90 | inline bool JITThunks::WeakNativeExecutableHash::equal(const Weak<NativeExecutable>& a, const HostFunctionKey& b)
|
---|
91 | {
|
---|
92 | auto& aExecutable = getMayBeDyingNativeExecutable(a);
|
---|
93 | return aExecutable.function() == std::get<0>(b) && aExecutable.constructor() == std::get<1>(b) && aExecutable.name() == std::get<2>(b);
|
---|
94 | }
|
---|
95 |
|
---|
96 | MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeCall(VM& vm)
|
---|
97 | {
|
---|
98 | ASSERT(Options::useJIT());
|
---|
99 | return ctiStub(vm, nativeCallGenerator).code();
|
---|
100 | }
|
---|
101 |
|
---|
102 | MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeConstruct(VM& vm)
|
---|
103 | {
|
---|
104 | ASSERT(Options::useJIT());
|
---|
105 | return ctiStub(vm, nativeConstructGenerator).code();
|
---|
106 | }
|
---|
107 |
|
---|
108 | MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeTailCall(VM& vm)
|
---|
109 | {
|
---|
110 | ASSERT(Options::useJIT());
|
---|
111 | return ctiStub(vm, nativeTailCallGenerator).code();
|
---|
112 | }
|
---|
113 |
|
---|
114 | MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeTailCallWithoutSavedTags(VM& vm)
|
---|
115 | {
|
---|
116 | ASSERT(Options::useJIT());
|
---|
117 | return ctiStub(vm, nativeTailCallWithoutSavedTagsGenerator).code();
|
---|
118 | }
|
---|
119 |
|
---|
120 | MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiInternalFunctionCall(VM& vm)
|
---|
121 | {
|
---|
122 | ASSERT(Options::useJIT());
|
---|
123 | return ctiStub(vm, internalFunctionCallGenerator).code();
|
---|
124 | }
|
---|
125 |
|
---|
126 | MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiInternalFunctionConstruct(VM& vm)
|
---|
127 | {
|
---|
128 | ASSERT(Options::useJIT());
|
---|
129 | return ctiStub(vm, internalFunctionConstructGenerator).code();
|
---|
130 | }
|
---|
131 |
|
---|
132 | template <typename GenerateThunk>
|
---|
133 | MacroAssemblerCodeRef<JITThunkPtrTag> JITThunks::ctiStubImpl(ThunkGenerator key, GenerateThunk generateThunk)
|
---|
134 | {
|
---|
135 | Locker locker { m_lock };
|
---|
136 |
|
---|
137 | auto handleEntry = [&] (Entry& entry) {
|
---|
138 | if (entry.needsCrossModifyingCodeFence && !isCompilationThread()) {
|
---|
139 | // The main thread will issue a crossModifyingCodeFence before running
|
---|
140 | // any code the compiler thread generates, including any thunks that they
|
---|
141 | // generate. However, the main thread may grab the thunk a compiler thread
|
---|
142 | // generated before we've issued that crossModifyingCodeFence. Hence, we
|
---|
143 | // conservatively say that when the main thread grabs a thunk generated
|
---|
144 | // from a compiler thread for the first time, it issues a crossModifyingCodeFence.
|
---|
145 | WTF::crossModifyingCodeFence();
|
---|
146 | entry.needsCrossModifyingCodeFence = false;
|
---|
147 | }
|
---|
148 |
|
---|
149 | return MacroAssemblerCodeRef<JITThunkPtrTag>(*entry.handle);
|
---|
150 | };
|
---|
151 |
|
---|
152 | {
|
---|
153 | auto iter = m_ctiStubMap.find(key);
|
---|
154 | if (iter != m_ctiStubMap.end())
|
---|
155 | return handleEntry(iter->value);
|
---|
156 | }
|
---|
157 |
|
---|
158 | // We do two lookups on first addition to the hash table because generateThunk may add to it.
|
---|
159 | MacroAssemblerCodeRef<JITThunkPtrTag> codeRef = generateThunk();
|
---|
160 |
|
---|
161 | bool needsCrossModifyingCodeFence = isCompilationThread();
|
---|
162 | auto addResult = m_ctiStubMap.add(key, Entry { PackedRefPtr<ExecutableMemoryHandle>(codeRef.executableMemory()), needsCrossModifyingCodeFence });
|
---|
163 | RELEASE_ASSERT(addResult.isNewEntry); // Thunks aren't recursive, so anything we generated transitively shouldn't have generated 'key'.
|
---|
164 | return handleEntry(addResult.iterator->value);
|
---|
165 | }
|
---|
166 |
|
---|
167 | MacroAssemblerCodeRef<JITThunkPtrTag> JITThunks::ctiStub(VM& vm, ThunkGenerator generator)
|
---|
168 | {
|
---|
169 | return ctiStubImpl(generator, [&] {
|
---|
170 | return generator(vm);
|
---|
171 | });
|
---|
172 | }
|
---|
173 |
|
---|
174 | MacroAssemblerCodeRef<JITThunkPtrTag> JITThunks::ctiSlowPathFunctionStub(VM& vm, SlowPathFunction slowPathFunction)
|
---|
175 | {
|
---|
176 | auto key = bitwise_cast<ThunkGenerator>(slowPathFunction);
|
---|
177 | return ctiStubImpl(key, [&] {
|
---|
178 | return JITSlowPathCall::generateThunk(vm, slowPathFunction);
|
---|
179 | });
|
---|
180 | }
|
---|
181 |
|
---|
182 | struct JITThunks::HostKeySearcher {
|
---|
183 | static unsigned hash(const HostFunctionKey& key) { return WeakNativeExecutableHash::hash(key); }
|
---|
184 | static bool equal(const Weak<NativeExecutable>& a, const HostFunctionKey& b) { return WeakNativeExecutableHash::equal(a, b); }
|
---|
185 | };
|
---|
186 |
|
---|
187 | struct JITThunks::NativeExecutableTranslator {
|
---|
188 | static unsigned hash(NativeExecutable* key) { return WeakNativeExecutableHash::hash(key); }
|
---|
189 | static bool equal(const Weak<NativeExecutable>& a, NativeExecutable* b) { return WeakNativeExecutableHash::equal(a, b); }
|
---|
190 | static void translate(Weak<NativeExecutable>& location, NativeExecutable* executable, unsigned)
|
---|
191 | {
|
---|
192 | location = Weak<NativeExecutable>(executable, executable->vm().jitStubs.get());
|
---|
193 | }
|
---|
194 | };
|
---|
195 |
|
---|
196 | void JITThunks::finalize(Handle<Unknown> handle, void*)
|
---|
197 | {
|
---|
198 | auto* nativeExecutable = static_cast<NativeExecutable*>(handle.get().asCell());
|
---|
199 | auto hostFunctionKey = std::make_tuple(nativeExecutable->function(), nativeExecutable->constructor(), nativeExecutable->name());
|
---|
200 | {
|
---|
201 | DisallowGC disallowGC;
|
---|
202 | auto iterator = m_nativeExecutableSet.find<HostKeySearcher>(hostFunctionKey);
|
---|
203 | // Because this finalizer is called, this means that we still have dead Weak<> in m_nativeExecutableSet.
|
---|
204 | ASSERT(iterator != m_nativeExecutableSet.end());
|
---|
205 | ASSERT(iterator->unsafeImpl()->state() == WeakImpl::State::Finalized);
|
---|
206 | m_nativeExecutableSet.remove(iterator);
|
---|
207 | }
|
---|
208 | }
|
---|
209 |
|
---|
210 | NativeExecutable* JITThunks::hostFunctionStub(VM& vm, TaggedNativeFunction function, TaggedNativeFunction constructor, const String& name)
|
---|
211 | {
|
---|
212 | return hostFunctionStub(vm, function, constructor, nullptr, NoIntrinsic, nullptr, name);
|
---|
213 | }
|
---|
214 |
|
---|
215 | NativeExecutable* JITThunks::hostFunctionStub(VM& vm, TaggedNativeFunction function, TaggedNativeFunction constructor, ThunkGenerator generator, Intrinsic intrinsic, const DOMJIT::Signature* signature, const String& name)
|
---|
216 | {
|
---|
217 | ASSERT(!isCompilationThread());
|
---|
218 | ASSERT(Options::useJIT());
|
---|
219 |
|
---|
220 | auto hostFunctionKey = std::make_tuple(function, constructor, name);
|
---|
221 | {
|
---|
222 | DisallowGC disallowGC;
|
---|
223 | auto iterator = m_nativeExecutableSet.find<HostKeySearcher>(hostFunctionKey);
|
---|
224 | if (iterator != m_nativeExecutableSet.end()) {
|
---|
225 | // It is possible that this returns Weak<> which is Dead, but not finalized.
|
---|
226 | // We should not use this reference to store value created in the subsequent sequence, since allocating NativeExecutable can cause GC, which changes this Set.
|
---|
227 | if (auto* executable = iterator->get())
|
---|
228 | return executable;
|
---|
229 | }
|
---|
230 | }
|
---|
231 |
|
---|
232 | RefPtr<JITCode> forCall;
|
---|
233 | if (generator) {
|
---|
234 | MacroAssemblerCodeRef<JSEntryPtrTag> entry = generator(vm).retagged<JSEntryPtrTag>();
|
---|
235 | forCall = adoptRef(new DirectJITCode(entry, entry.code(), JITType::HostCallThunk, intrinsic));
|
---|
236 | } else if (signature)
|
---|
237 | forCall = adoptRef(new NativeDOMJITCode(MacroAssemblerCodeRef<JSEntryPtrTag>::createSelfManagedCodeRef(ctiNativeCall(vm).retagged<JSEntryPtrTag>()), JITType::HostCallThunk, intrinsic, signature));
|
---|
238 | else
|
---|
239 | forCall = adoptRef(new NativeJITCode(MacroAssemblerCodeRef<JSEntryPtrTag>::createSelfManagedCodeRef(ctiNativeCall(vm).retagged<JSEntryPtrTag>()), JITType::HostCallThunk, intrinsic));
|
---|
240 |
|
---|
241 | Ref<JITCode> forConstruct = adoptRef(*new NativeJITCode(MacroAssemblerCodeRef<JSEntryPtrTag>::createSelfManagedCodeRef(ctiNativeConstruct(vm).retagged<JSEntryPtrTag>()), JITType::HostCallThunk, NoIntrinsic));
|
---|
242 |
|
---|
243 | NativeExecutable* nativeExecutable = NativeExecutable::create(vm, forCall.releaseNonNull(), function, WTFMove(forConstruct), constructor, name);
|
---|
244 | {
|
---|
245 | DisallowGC disallowGC;
|
---|
246 | auto addResult = m_nativeExecutableSet.add<NativeExecutableTranslator>(nativeExecutable);
|
---|
247 | if (!addResult.isNewEntry) {
|
---|
248 | // Override the existing Weak<NativeExecutable> with the new one since it is dead.
|
---|
249 | ASSERT(!*addResult.iterator);
|
---|
250 | *addResult.iterator = Weak<NativeExecutable>(nativeExecutable, this);
|
---|
251 | ASSERT(*addResult.iterator);
|
---|
252 | #if ASSERT_ENABLED
|
---|
253 | auto iterator = m_nativeExecutableSet.find<HostKeySearcher>(hostFunctionKey);
|
---|
254 | ASSERT(iterator != m_nativeExecutableSet.end());
|
---|
255 | ASSERT(iterator->get() == nativeExecutable);
|
---|
256 | ASSERT(iterator->unsafeImpl()->state() == WeakImpl::State::Live);
|
---|
257 | #endif
|
---|
258 | }
|
---|
259 | }
|
---|
260 | return nativeExecutable;
|
---|
261 | }
|
---|
262 |
|
---|
263 | NativeExecutable* JITThunks::hostFunctionStub(VM& vm, TaggedNativeFunction function, ThunkGenerator generator, Intrinsic intrinsic, const String& name)
|
---|
264 | {
|
---|
265 | return hostFunctionStub(vm, function, callHostFunctionAsConstructor, generator, intrinsic, nullptr, name);
|
---|
266 | }
|
---|
267 |
|
---|
268 | } // namespace JSC
|
---|
269 |
|
---|
270 | #endif // ENABLE(JIT)
|
---|