1 | /*
|
---|
2 | * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
|
---|
3 | *
|
---|
4 | * Redistribution and use in source and binary forms, with or without
|
---|
5 | * modification, are permitted provided that the following conditions
|
---|
6 | * are met:
|
---|
7 | * 1. Redistributions of source code must retain the above copyright
|
---|
8 | * notice, this list of conditions and the following disclaimer.
|
---|
9 | * 2. Redistributions in binary form must reproduce the above copyright
|
---|
10 | * notice, this list of conditions and the following disclaimer in the
|
---|
11 | * documentation and/or other materials provided with the distribution.
|
---|
12 | *
|
---|
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
|
---|
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
---|
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
---|
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
|
---|
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
---|
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
---|
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
---|
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
---|
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
---|
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
---|
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
---|
24 | */
|
---|
25 |
|
---|
26 | #include "config.h"
|
---|
27 | #include "PCToCodeOriginMap.h"
|
---|
28 |
|
---|
29 | #if ENABLE(JIT)
|
---|
30 |
|
---|
31 | #include "B3PCToOriginMap.h"
|
---|
32 | #include "DFGNode.h"
|
---|
33 | #include "LinkBuffer.h"
|
---|
34 | #include "WasmOpcodeOrigin.h"
|
---|
35 |
|
---|
36 | #if COMPILER(MSVC)
|
---|
37 | // See https://msdn.microsoft.com/en-us/library/4wz07268.aspx
|
---|
38 | #pragma warning(disable: 4333)
|
---|
39 | #endif
|
---|
40 |
|
---|
41 | namespace JSC {
|
---|
42 |
|
---|
43 | namespace {
|
---|
44 |
|
---|
45 | class DeltaCompressionBuilder {
|
---|
46 | public:
|
---|
47 | DeltaCompressionBuilder(size_t maxSize)
|
---|
48 | : m_offset(0)
|
---|
49 | , m_maxSize(maxSize)
|
---|
50 | {
|
---|
51 | m_buffer = static_cast<uint8_t*>(fastMalloc(m_maxSize));
|
---|
52 | }
|
---|
53 |
|
---|
54 | template <typename T>
|
---|
55 | void write(T item)
|
---|
56 | {
|
---|
57 | RELEASE_ASSERT(m_offset + sizeof(T) <= m_maxSize);
|
---|
58 | static constexpr uint8_t mask = std::numeric_limits<uint8_t>::max();
|
---|
59 | for (unsigned i = 0; i < sizeof(T); i++) {
|
---|
60 | *(m_buffer + m_offset) = static_cast<uint8_t>(item & mask);
|
---|
61 | item = item >> (sizeof(uint8_t) * 8);
|
---|
62 | m_offset += 1;
|
---|
63 | }
|
---|
64 | }
|
---|
65 |
|
---|
66 | uint8_t* m_buffer;
|
---|
67 | size_t m_offset;
|
---|
68 | size_t m_maxSize;
|
---|
69 | };
|
---|
70 |
|
---|
71 | class DeltaCompresseionReader {
|
---|
72 | public:
|
---|
73 | DeltaCompresseionReader(uint8_t* buffer, size_t size)
|
---|
74 | : m_buffer(buffer)
|
---|
75 | , m_size(size)
|
---|
76 | , m_offset(0)
|
---|
77 | { }
|
---|
78 |
|
---|
79 | template <typename T>
|
---|
80 | T read()
|
---|
81 | {
|
---|
82 | RELEASE_ASSERT(m_offset + sizeof(T) <= m_size);
|
---|
83 | T result = 0;
|
---|
84 | for (unsigned i = 0; i < sizeof(T); i++) {
|
---|
85 | uint8_t bitsAsInt8 = *(m_buffer + m_offset);
|
---|
86 | T bits = static_cast<T>(bitsAsInt8);
|
---|
87 | bits = bits << (sizeof(uint8_t) * 8 * i);
|
---|
88 | result |= bits;
|
---|
89 | m_offset += 1;
|
---|
90 | }
|
---|
91 | return result;
|
---|
92 | }
|
---|
93 |
|
---|
94 | private:
|
---|
95 | uint8_t* m_buffer;
|
---|
96 | size_t m_size;
|
---|
97 | size_t m_offset;
|
---|
98 | };
|
---|
99 |
|
---|
100 | } // anonymous namespace
|
---|
101 |
|
---|
102 | PCToCodeOriginMapBuilder::PCToCodeOriginMapBuilder(VM& vm)
|
---|
103 | : m_shouldBuildMapping(vm.shouldBuilderPCToCodeOriginMapping())
|
---|
104 | { }
|
---|
105 |
|
---|
106 | PCToCodeOriginMapBuilder::PCToCodeOriginMapBuilder(PCToCodeOriginMapBuilder&& other)
|
---|
107 | : m_codeRanges(WTFMove(other.m_codeRanges))
|
---|
108 | , m_shouldBuildMapping(other.m_shouldBuildMapping)
|
---|
109 | { }
|
---|
110 |
|
---|
111 | #if ENABLE(FTL_JIT)
|
---|
112 | PCToCodeOriginMapBuilder::PCToCodeOriginMapBuilder(JSTag, VM& vm, B3::PCToOriginMap b3PCToOriginMap)
|
---|
113 | : m_shouldBuildMapping(vm.shouldBuilderPCToCodeOriginMapping())
|
---|
114 | {
|
---|
115 | if (!m_shouldBuildMapping)
|
---|
116 | return;
|
---|
117 |
|
---|
118 | for (const B3::PCToOriginMap::OriginRange& originRange : b3PCToOriginMap.ranges()) {
|
---|
119 | DFG::Node* node = bitwise_cast<DFG::Node*>(originRange.origin.data());
|
---|
120 | if (node)
|
---|
121 | appendItem(originRange.label, node->origin.semantic);
|
---|
122 | else
|
---|
123 | appendItem(originRange.label, PCToCodeOriginMapBuilder::defaultCodeOrigin());
|
---|
124 | }
|
---|
125 | }
|
---|
126 | #endif
|
---|
127 |
|
---|
128 | #if ENABLE(WEBASSEMBLY_B3JIT)
|
---|
129 | PCToCodeOriginMapBuilder::PCToCodeOriginMapBuilder(WasmTag, B3::PCToOriginMap b3PCToOriginMap)
|
---|
130 | : m_shouldBuildMapping(true)
|
---|
131 | {
|
---|
132 | for (const B3::PCToOriginMap::OriginRange& originRange : b3PCToOriginMap.ranges()) {
|
---|
133 | B3::Origin b3Origin = originRange.origin;
|
---|
134 | if (b3Origin) {
|
---|
135 | Wasm::OpcodeOrigin wasmOrigin { b3Origin };
|
---|
136 | // We stash the location into a BytecodeIndex.
|
---|
137 | appendItem(originRange.label, CodeOrigin(BytecodeIndex(wasmOrigin.location())));
|
---|
138 | } else
|
---|
139 | appendItem(originRange.label, PCToCodeOriginMapBuilder::defaultCodeOrigin());
|
---|
140 | }
|
---|
141 | }
|
---|
142 | #endif
|
---|
143 |
|
---|
144 | void PCToCodeOriginMapBuilder::appendItem(MacroAssembler::Label label, const CodeOrigin& codeOrigin)
|
---|
145 | {
|
---|
146 | if (!m_shouldBuildMapping)
|
---|
147 | return;
|
---|
148 |
|
---|
149 | if (m_codeRanges.size()) {
|
---|
150 | if (m_codeRanges.last().end == label)
|
---|
151 | return;
|
---|
152 | m_codeRanges.last().end = label;
|
---|
153 | if (m_codeRanges.last().codeOrigin == codeOrigin || !codeOrigin)
|
---|
154 | return;
|
---|
155 | }
|
---|
156 |
|
---|
157 | m_codeRanges.append(CodeRange{label, label, codeOrigin});
|
---|
158 | }
|
---|
159 |
|
---|
160 |
|
---|
161 | static constexpr uint8_t sentinelPCDelta = 0;
|
---|
162 | static constexpr int8_t sentinelBytecodeDelta = 0;
|
---|
163 |
|
---|
164 | PCToCodeOriginMap::PCToCodeOriginMap(PCToCodeOriginMapBuilder&& builder, LinkBuffer& linkBuffer)
|
---|
165 | {
|
---|
166 | RELEASE_ASSERT(builder.didBuildMapping());
|
---|
167 |
|
---|
168 | if (!builder.m_codeRanges.size()) {
|
---|
169 | m_pcRangeStart = std::numeric_limits<uintptr_t>::max();
|
---|
170 | m_pcRangeEnd = std::numeric_limits<uintptr_t>::max();
|
---|
171 |
|
---|
172 | m_compressedPCBufferSize = 0;
|
---|
173 | m_compressedPCs = nullptr;
|
---|
174 |
|
---|
175 | m_compressedCodeOriginsSize = 0;
|
---|
176 | m_compressedCodeOrigins = nullptr;
|
---|
177 |
|
---|
178 | return;
|
---|
179 | }
|
---|
180 |
|
---|
181 | // We do a final touch-up on the last range here because of how we generate the table.
|
---|
182 | // The final range (if non empty) would be ignored if we didn't append any (arbitrary)
|
---|
183 | // range as the last item of the vector.
|
---|
184 | PCToCodeOriginMapBuilder::CodeRange& last = builder.m_codeRanges.last();
|
---|
185 | if (!(last.start == last.end))
|
---|
186 | builder.m_codeRanges.append(PCToCodeOriginMapBuilder::CodeRange{ last.end, last.end, last.codeOrigin }); // This range will never actually be found, but it ensures the real last range is found.
|
---|
187 |
|
---|
188 | DeltaCompressionBuilder pcCompressor((sizeof(uintptr_t) + sizeof(uint8_t)) * builder.m_codeRanges.size());
|
---|
189 | void* lastPCValue = nullptr;
|
---|
190 | auto buildPCTable = [&] (void* pcValue) {
|
---|
191 | RELEASE_ASSERT(pcValue > lastPCValue);
|
---|
192 | uintptr_t delta = bitwise_cast<uintptr_t>(pcValue) - bitwise_cast<uintptr_t>(lastPCValue);
|
---|
193 | RELEASE_ASSERT(delta != sentinelPCDelta);
|
---|
194 | lastPCValue = pcValue;
|
---|
195 | if (delta > std::numeric_limits<uint8_t>::max()) {
|
---|
196 | pcCompressor.write<uint8_t>(sentinelPCDelta);
|
---|
197 | pcCompressor.write<uintptr_t>(delta);
|
---|
198 | return;
|
---|
199 | }
|
---|
200 |
|
---|
201 | pcCompressor.write<uint8_t>(static_cast<uint8_t>(delta));
|
---|
202 | };
|
---|
203 |
|
---|
204 | DeltaCompressionBuilder codeOriginCompressor((sizeof(intptr_t) + sizeof(int8_t) + sizeof(int8_t) + sizeof(InlineCallFrame*)) * builder.m_codeRanges.size());
|
---|
205 | CodeOrigin lastCodeOrigin(BytecodeIndex(0));
|
---|
206 | auto buildCodeOriginTable = [&] (const CodeOrigin& codeOrigin) {
|
---|
207 | intptr_t delta = static_cast<intptr_t>(codeOrigin.bytecodeIndex().offset()) - static_cast<intptr_t>(lastCodeOrigin.bytecodeIndex().offset());
|
---|
208 | lastCodeOrigin = codeOrigin;
|
---|
209 | if (delta > std::numeric_limits<int8_t>::max() || delta < std::numeric_limits<int8_t>::min() || delta == sentinelBytecodeDelta) {
|
---|
210 | codeOriginCompressor.write<int8_t>(sentinelBytecodeDelta);
|
---|
211 | codeOriginCompressor.write<intptr_t>(delta);
|
---|
212 | } else
|
---|
213 | codeOriginCompressor.write<int8_t>(static_cast<int8_t>(delta));
|
---|
214 |
|
---|
215 | int8_t hasInlineCallFrameByte = codeOrigin.inlineCallFrame() ? 1 : 0;
|
---|
216 | codeOriginCompressor.write<int8_t>(hasInlineCallFrameByte);
|
---|
217 | if (hasInlineCallFrameByte)
|
---|
218 | codeOriginCompressor.write<uintptr_t>(bitwise_cast<uintptr_t>(codeOrigin.inlineCallFrame()));
|
---|
219 | };
|
---|
220 |
|
---|
221 | m_pcRangeStart = linkBuffer.locationOf<NoPtrTag>(builder.m_codeRanges.first().start).dataLocation<uintptr_t>();
|
---|
222 | m_pcRangeEnd = linkBuffer.locationOf<NoPtrTag>(builder.m_codeRanges.last().end).dataLocation<uintptr_t>();
|
---|
223 | m_pcRangeEnd -= 1;
|
---|
224 |
|
---|
225 | for (unsigned i = 0; i < builder.m_codeRanges.size(); i++) {
|
---|
226 | PCToCodeOriginMapBuilder::CodeRange& codeRange = builder.m_codeRanges[i];
|
---|
227 | void* start = linkBuffer.locationOf<NoPtrTag>(codeRange.start).dataLocation();
|
---|
228 | void* end = linkBuffer.locationOf<NoPtrTag>(codeRange.end).dataLocation();
|
---|
229 | ASSERT(m_pcRangeStart <= bitwise_cast<uintptr_t>(start));
|
---|
230 | ASSERT(m_pcRangeEnd >= bitwise_cast<uintptr_t>(end) - 1);
|
---|
231 | if (start == end)
|
---|
232 | ASSERT(i == builder.m_codeRanges.size() - 1);
|
---|
233 | if (i > 0)
|
---|
234 | ASSERT(linkBuffer.locationOf<NoPtrTag>(builder.m_codeRanges[i - 1].end).dataLocation() == start);
|
---|
235 |
|
---|
236 | buildPCTable(start);
|
---|
237 | buildCodeOriginTable(codeRange.codeOrigin);
|
---|
238 | }
|
---|
239 |
|
---|
240 | m_compressedPCBufferSize = pcCompressor.m_offset;
|
---|
241 | m_compressedPCs = static_cast<uint8_t*>(fastRealloc(pcCompressor.m_buffer, m_compressedPCBufferSize));
|
---|
242 |
|
---|
243 | m_compressedCodeOriginsSize = codeOriginCompressor.m_offset;
|
---|
244 | m_compressedCodeOrigins = static_cast<uint8_t*>(fastRealloc(codeOriginCompressor.m_buffer, m_compressedCodeOriginsSize));
|
---|
245 | }
|
---|
246 |
|
---|
247 | PCToCodeOriginMap::~PCToCodeOriginMap()
|
---|
248 | {
|
---|
249 | if (m_compressedPCs)
|
---|
250 | fastFree(m_compressedPCs);
|
---|
251 | if (m_compressedCodeOrigins)
|
---|
252 | fastFree(m_compressedCodeOrigins);
|
---|
253 | }
|
---|
254 |
|
---|
255 | double PCToCodeOriginMap::memorySize()
|
---|
256 | {
|
---|
257 | double size = 0;
|
---|
258 | size += m_compressedPCBufferSize;
|
---|
259 | size += m_compressedCodeOriginsSize;
|
---|
260 | return size;
|
---|
261 | }
|
---|
262 |
|
---|
263 | std::optional<CodeOrigin> PCToCodeOriginMap::findPC(void* pc) const
|
---|
264 | {
|
---|
265 | uintptr_t pcAsInt = bitwise_cast<uintptr_t>(pc);
|
---|
266 | if (!(m_pcRangeStart <= pcAsInt && pcAsInt <= m_pcRangeEnd))
|
---|
267 | return std::nullopt;
|
---|
268 |
|
---|
269 | uintptr_t currentPC = 0;
|
---|
270 | BytecodeIndex currentBytecodeIndex = BytecodeIndex(0);
|
---|
271 | InlineCallFrame* currentInlineCallFrame = nullptr;
|
---|
272 |
|
---|
273 | DeltaCompresseionReader pcReader(m_compressedPCs, m_compressedPCBufferSize);
|
---|
274 | DeltaCompresseionReader codeOriginReader(m_compressedCodeOrigins, m_compressedCodeOriginsSize);
|
---|
275 | while (true) {
|
---|
276 | uintptr_t previousPC = currentPC;
|
---|
277 | {
|
---|
278 | uint8_t value = pcReader.read<uint8_t>();
|
---|
279 | uintptr_t delta;
|
---|
280 | if (value == sentinelPCDelta)
|
---|
281 | delta = pcReader.read<uintptr_t>();
|
---|
282 | else
|
---|
283 | delta = value;
|
---|
284 | currentPC += delta;
|
---|
285 | }
|
---|
286 |
|
---|
287 | CodeOrigin previousOrigin = CodeOrigin(currentBytecodeIndex, currentInlineCallFrame);
|
---|
288 | {
|
---|
289 | int8_t value = codeOriginReader.read<int8_t>();
|
---|
290 | intptr_t delta;
|
---|
291 | if (value == sentinelBytecodeDelta)
|
---|
292 | delta = codeOriginReader.read<intptr_t>();
|
---|
293 | else
|
---|
294 | delta = static_cast<intptr_t>(value);
|
---|
295 |
|
---|
296 | currentBytecodeIndex = BytecodeIndex(static_cast<intptr_t>(currentBytecodeIndex.offset()) + delta);
|
---|
297 |
|
---|
298 | int8_t hasInlineFrame = codeOriginReader.read<int8_t>();
|
---|
299 | ASSERT(hasInlineFrame == 0 || hasInlineFrame == 1);
|
---|
300 | if (hasInlineFrame)
|
---|
301 | currentInlineCallFrame = bitwise_cast<InlineCallFrame*>(codeOriginReader.read<uintptr_t>());
|
---|
302 | else
|
---|
303 | currentInlineCallFrame = nullptr;
|
---|
304 | }
|
---|
305 |
|
---|
306 | if (previousPC) {
|
---|
307 | uintptr_t startOfRange = previousPC;
|
---|
308 | // We subtract 1 because we generate end points inclusively in this table, even though we are interested in ranges of the form: [previousPC, currentPC)
|
---|
309 | uintptr_t endOfRange = currentPC - 1;
|
---|
310 | if (startOfRange <= pcAsInt && pcAsInt <= endOfRange)
|
---|
311 | return std::optional<CodeOrigin>(previousOrigin); // We return previousOrigin here because CodeOrigin's are mapped to the startValue of the range.
|
---|
312 | }
|
---|
313 | }
|
---|
314 |
|
---|
315 | RELEASE_ASSERT_NOT_REACHED();
|
---|
316 | return std::nullopt;
|
---|
317 | }
|
---|
318 |
|
---|
319 | } // namespace JSC
|
---|
320 |
|
---|
321 | #endif // ENABLE(JIT)
|
---|