1 | /*
|
---|
2 | * Copyright (C) 2008 Apple Inc. All rights reserved.
|
---|
3 | *
|
---|
4 | * Redistribution and use in source and binary forms, with or without
|
---|
5 | * modification, are permitted provided that the following conditions
|
---|
6 | * are met:
|
---|
7 | * 1. Redistributions of source code must retain the above copyright
|
---|
8 | * notice, this list of conditions and the following disclaimer.
|
---|
9 | * 2. Redistributions in binary form must reproduce the above copyright
|
---|
10 | * notice, this list of conditions and the following disclaimer in the
|
---|
11 | * documentation and/or other materials provided with the distribution.
|
---|
12 | *
|
---|
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
|
---|
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
---|
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
---|
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
|
---|
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
---|
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
---|
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
---|
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
---|
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
---|
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
---|
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
---|
24 | */
|
---|
25 |
|
---|
26 | #ifndef JIT_h
|
---|
27 | #define JIT_h
|
---|
28 |
|
---|
29 | #include <wtf/Platform.h>
|
---|
30 |
|
---|
31 | #if ENABLE(JIT)
|
---|
32 |
|
---|
33 | // We've run into some problems where changing the size of the class JIT leads to
|
---|
34 | // performance fluctuations. Try forcing alignment in an attempt to stabalize this.
|
---|
35 | #if COMPILER(GCC)
|
---|
36 | #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
|
---|
37 | #else
|
---|
38 | #define JIT_CLASS_ALIGNMENT
|
---|
39 | #endif
|
---|
40 |
|
---|
41 | #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" shoud be %d, not %d.\n", #expected, actual, expected);
|
---|
42 |
|
---|
43 | #include "CodeBlock.h"
|
---|
44 | #include "Interpreter.h"
|
---|
45 | #include "JITCode.h"
|
---|
46 | #include "JITStubs.h"
|
---|
47 | #include "Opcode.h"
|
---|
48 | #include "RegisterFile.h"
|
---|
49 | #include "MacroAssembler.h"
|
---|
50 | #include "Profiler.h"
|
---|
51 | #include <bytecode/SamplingTool.h>
|
---|
52 | #include <wtf/AlwaysInline.h>
|
---|
53 | #include <wtf/Vector.h>
|
---|
54 |
|
---|
55 | namespace JSC {
|
---|
56 |
|
---|
57 | class CodeBlock;
|
---|
58 | class JIT;
|
---|
59 | class JSPropertyNameIterator;
|
---|
60 | class Interpreter;
|
---|
61 | class Register;
|
---|
62 | class RegisterFile;
|
---|
63 | class ScopeChainNode;
|
---|
64 | class StructureChain;
|
---|
65 |
|
---|
66 | struct CallLinkInfo;
|
---|
67 | struct Instruction;
|
---|
68 | struct OperandTypes;
|
---|
69 | struct PolymorphicAccessStructureList;
|
---|
70 | struct SimpleJumpTable;
|
---|
71 | struct StringJumpTable;
|
---|
72 | struct StructureStubInfo;
|
---|
73 |
|
---|
74 | struct CallRecord {
|
---|
75 | MacroAssembler::Call from;
|
---|
76 | unsigned bytecodeIndex;
|
---|
77 | void* to;
|
---|
78 |
|
---|
79 | CallRecord()
|
---|
80 | {
|
---|
81 | }
|
---|
82 |
|
---|
83 | CallRecord(MacroAssembler::Call from, unsigned bytecodeIndex, void* to = 0)
|
---|
84 | : from(from)
|
---|
85 | , bytecodeIndex(bytecodeIndex)
|
---|
86 | , to(to)
|
---|
87 | {
|
---|
88 | }
|
---|
89 | };
|
---|
90 |
|
---|
91 | struct JumpTable {
|
---|
92 | MacroAssembler::Jump from;
|
---|
93 | unsigned toBytecodeIndex;
|
---|
94 |
|
---|
95 | JumpTable(MacroAssembler::Jump f, unsigned t)
|
---|
96 | : from(f)
|
---|
97 | , toBytecodeIndex(t)
|
---|
98 | {
|
---|
99 | }
|
---|
100 | };
|
---|
101 |
|
---|
102 | struct SlowCaseEntry {
|
---|
103 | MacroAssembler::Jump from;
|
---|
104 | unsigned to;
|
---|
105 | unsigned hint;
|
---|
106 |
|
---|
107 | SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
|
---|
108 | : from(f)
|
---|
109 | , to(t)
|
---|
110 | , hint(h)
|
---|
111 | {
|
---|
112 | }
|
---|
113 | };
|
---|
114 |
|
---|
115 | struct SwitchRecord {
|
---|
116 | enum Type {
|
---|
117 | Immediate,
|
---|
118 | Character,
|
---|
119 | String
|
---|
120 | };
|
---|
121 |
|
---|
122 | Type type;
|
---|
123 |
|
---|
124 | union {
|
---|
125 | SimpleJumpTable* simpleJumpTable;
|
---|
126 | StringJumpTable* stringJumpTable;
|
---|
127 | } jumpTable;
|
---|
128 |
|
---|
129 | unsigned bytecodeIndex;
|
---|
130 | unsigned defaultOffset;
|
---|
131 |
|
---|
132 | SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeIndex, unsigned defaultOffset, Type type)
|
---|
133 | : type(type)
|
---|
134 | , bytecodeIndex(bytecodeIndex)
|
---|
135 | , defaultOffset(defaultOffset)
|
---|
136 | {
|
---|
137 | this->jumpTable.simpleJumpTable = jumpTable;
|
---|
138 | }
|
---|
139 |
|
---|
140 | SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeIndex, unsigned defaultOffset)
|
---|
141 | : type(String)
|
---|
142 | , bytecodeIndex(bytecodeIndex)
|
---|
143 | , defaultOffset(defaultOffset)
|
---|
144 | {
|
---|
145 | this->jumpTable.stringJumpTable = jumpTable;
|
---|
146 | }
|
---|
147 | };
|
---|
148 |
|
---|
149 | struct PropertyStubCompilationInfo {
|
---|
150 | MacroAssembler::Call callReturnLocation;
|
---|
151 | MacroAssembler::Label hotPathBegin;
|
---|
152 | };
|
---|
153 |
|
---|
154 | struct StructureStubCompilationInfo {
|
---|
155 | MacroAssembler::DataLabelPtr hotPathBegin;
|
---|
156 | MacroAssembler::Call hotPathOther;
|
---|
157 | MacroAssembler::Call callReturnLocation;
|
---|
158 | };
|
---|
159 |
|
---|
160 | struct MethodCallCompilationInfo {
|
---|
161 | MethodCallCompilationInfo(unsigned propertyAccessIndex)
|
---|
162 | : propertyAccessIndex(propertyAccessIndex)
|
---|
163 | {
|
---|
164 | }
|
---|
165 |
|
---|
166 | MacroAssembler::DataLabelPtr structureToCompare;
|
---|
167 | unsigned propertyAccessIndex;
|
---|
168 | };
|
---|
169 |
|
---|
170 | // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
|
---|
171 | void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
|
---|
172 | void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
|
---|
173 | void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
|
---|
174 |
|
---|
175 | class JIT : private MacroAssembler {
|
---|
176 | friend class JITStubCall;
|
---|
177 |
|
---|
178 | using MacroAssembler::Jump;
|
---|
179 | using MacroAssembler::JumpList;
|
---|
180 | using MacroAssembler::Label;
|
---|
181 |
|
---|
182 | // NOTES:
|
---|
183 | //
|
---|
184 | // regT0 has two special meanings. The return value from a stub
|
---|
185 | // call will always be in regT0, and by default (unless
|
---|
186 | // a register is specified) emitPutVirtualRegister() will store
|
---|
187 | // the value from regT0.
|
---|
188 | //
|
---|
189 | // regT3 is required to be callee-preserved.
|
---|
190 | //
|
---|
191 | // tempRegister2 is has no such dependencies. It is important that
|
---|
192 | // on x86/x86-64 it is ecx for performance reasons, since the
|
---|
193 | // MacroAssembler will need to plant register swaps if it is not -
|
---|
194 | // however the code will still function correctly.
|
---|
195 | #if PLATFORM(X86_64)
|
---|
196 | static const RegisterID returnValueRegister = X86Registers::eax;
|
---|
197 | static const RegisterID cachedResultRegister = X86Registers::eax;
|
---|
198 | static const RegisterID firstArgumentRegister = X86Registers::edi;
|
---|
199 |
|
---|
200 | static const RegisterID timeoutCheckRegister = X86Registers::r12;
|
---|
201 | static const RegisterID callFrameRegister = X86Registers::r13;
|
---|
202 | static const RegisterID tagTypeNumberRegister = X86Registers::r14;
|
---|
203 | static const RegisterID tagMaskRegister = X86Registers::r15;
|
---|
204 |
|
---|
205 | static const RegisterID regT0 = X86Registers::eax;
|
---|
206 | static const RegisterID regT1 = X86Registers::edx;
|
---|
207 | static const RegisterID regT2 = X86Registers::ecx;
|
---|
208 | static const RegisterID regT3 = X86Registers::ebx;
|
---|
209 |
|
---|
210 | static const FPRegisterID fpRegT0 = X86Registers::xmm0;
|
---|
211 | static const FPRegisterID fpRegT1 = X86Registers::xmm1;
|
---|
212 | static const FPRegisterID fpRegT2 = X86Registers::xmm2;
|
---|
213 | #elif PLATFORM(X86)
|
---|
214 | static const RegisterID returnValueRegister = X86Registers::eax;
|
---|
215 | static const RegisterID cachedResultRegister = X86Registers::eax;
|
---|
216 | // On x86 we always use fastcall conventions = but on
|
---|
217 | // OS X if might make more sense to just use regparm.
|
---|
218 | static const RegisterID firstArgumentRegister = X86Registers::ecx;
|
---|
219 |
|
---|
220 | static const RegisterID timeoutCheckRegister = X86Registers::esi;
|
---|
221 | static const RegisterID callFrameRegister = X86Registers::edi;
|
---|
222 |
|
---|
223 | static const RegisterID regT0 = X86Registers::eax;
|
---|
224 | static const RegisterID regT1 = X86Registers::edx;
|
---|
225 | static const RegisterID regT2 = X86Registers::ecx;
|
---|
226 | static const RegisterID regT3 = X86Registers::ebx;
|
---|
227 |
|
---|
228 | static const FPRegisterID fpRegT0 = X86Registers::xmm0;
|
---|
229 | static const FPRegisterID fpRegT1 = X86Registers::xmm1;
|
---|
230 | static const FPRegisterID fpRegT2 = X86Registers::xmm2;
|
---|
231 | #elif PLATFORM(ARM_THUMB2)
|
---|
232 | static const RegisterID returnValueRegister = ARMRegisters::r0;
|
---|
233 | static const RegisterID cachedResultRegister = ARMRegisters::r0;
|
---|
234 | static const RegisterID firstArgumentRegister = ARMRegisters::r0;
|
---|
235 |
|
---|
236 | static const RegisterID regT0 = ARMRegisters::r0;
|
---|
237 | static const RegisterID regT1 = ARMRegisters::r1;
|
---|
238 | static const RegisterID regT2 = ARMRegisters::r2;
|
---|
239 | static const RegisterID regT3 = ARMRegisters::r4;
|
---|
240 |
|
---|
241 | static const RegisterID callFrameRegister = ARMRegisters::r5;
|
---|
242 | static const RegisterID timeoutCheckRegister = ARMRegisters::r6;
|
---|
243 |
|
---|
244 | static const FPRegisterID fpRegT0 = ARMRegisters::d0;
|
---|
245 | static const FPRegisterID fpRegT1 = ARMRegisters::d1;
|
---|
246 | static const FPRegisterID fpRegT2 = ARMRegisters::d2;
|
---|
247 | #elif PLATFORM(ARM_TRADITIONAL)
|
---|
248 | static const RegisterID returnValueRegister = ARMRegisters::r0;
|
---|
249 | static const RegisterID cachedResultRegister = ARMRegisters::r0;
|
---|
250 | static const RegisterID firstArgumentRegister = ARMRegisters::r0;
|
---|
251 |
|
---|
252 | static const RegisterID timeoutCheckRegister = ARMRegisters::r5;
|
---|
253 | static const RegisterID callFrameRegister = ARMRegisters::r4;
|
---|
254 |
|
---|
255 | static const RegisterID regT0 = ARMRegisters::r0;
|
---|
256 | static const RegisterID regT1 = ARMRegisters::r1;
|
---|
257 | static const RegisterID regT2 = ARMRegisters::r2;
|
---|
258 | // Callee preserved
|
---|
259 | static const RegisterID regT3 = ARMRegisters::r7;
|
---|
260 |
|
---|
261 | static const RegisterID regS0 = ARMRegisters::S0;
|
---|
262 | // Callee preserved
|
---|
263 | static const RegisterID regS1 = ARMRegisters::S1;
|
---|
264 |
|
---|
265 | static const RegisterID regStackPtr = ARMRegisters::sp;
|
---|
266 | static const RegisterID regLink = ARMRegisters::lr;
|
---|
267 |
|
---|
268 | static const FPRegisterID fpRegT0 = ARMRegisters::d0;
|
---|
269 | static const FPRegisterID fpRegT1 = ARMRegisters::d1;
|
---|
270 | static const FPRegisterID fpRegT2 = ARMRegisters::d2;
|
---|
271 | #else
|
---|
272 | #error "JIT not supported on this platform."
|
---|
273 | #endif
|
---|
274 |
|
---|
275 | static const int patchGetByIdDefaultStructure = -1;
|
---|
276 | // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
|
---|
277 | // will compress the displacement, and we may not be able to fit a patched offset.
|
---|
278 | static const int patchGetByIdDefaultOffset = 256;
|
---|
279 |
|
---|
280 | public:
|
---|
281 | static JITCode compile(JSGlobalData* globalData, CodeBlock* codeBlock)
|
---|
282 | {
|
---|
283 | return JIT(globalData, codeBlock).privateCompile();
|
---|
284 | }
|
---|
285 |
|
---|
286 | static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress)
|
---|
287 | {
|
---|
288 | JIT jit(globalData, codeBlock);
|
---|
289 | jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, cachedOffset, returnAddress, callFrame);
|
---|
290 | }
|
---|
291 |
|
---|
292 | static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
|
---|
293 | {
|
---|
294 | JIT jit(globalData, codeBlock);
|
---|
295 | jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, cachedOffset);
|
---|
296 | }
|
---|
297 | static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset)
|
---|
298 | {
|
---|
299 | JIT jit(globalData, codeBlock);
|
---|
300 | jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, cachedOffset, callFrame);
|
---|
301 | }
|
---|
302 | static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset)
|
---|
303 | {
|
---|
304 | JIT jit(globalData, codeBlock);
|
---|
305 | jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, cachedOffset, callFrame);
|
---|
306 | }
|
---|
307 |
|
---|
308 | static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress)
|
---|
309 | {
|
---|
310 | JIT jit(globalData, codeBlock);
|
---|
311 | jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, cachedOffset, returnAddress, callFrame);
|
---|
312 | }
|
---|
313 |
|
---|
314 | static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress)
|
---|
315 | {
|
---|
316 | JIT jit(globalData, codeBlock);
|
---|
317 | jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress);
|
---|
318 | }
|
---|
319 |
|
---|
320 | static void compileCTIMachineTrampolines(JSGlobalData* globalData, RefPtr<ExecutablePool>* executablePool, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
|
---|
321 | {
|
---|
322 | JIT jit(globalData);
|
---|
323 | jit.privateCompileCTIMachineTrampolines(executablePool, globalData, ctiStringLengthTrampoline, ctiVirtualCallLink, ctiVirtualCall, ctiNativeCallThunk);
|
---|
324 | }
|
---|
325 |
|
---|
326 | static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
|
---|
327 | static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
|
---|
328 | static void patchMethodCallProto(CodeBlock* codeblock, MethodCallLinkInfo&, JSFunction*, Structure*, JSObject*, ReturnAddressPtr);
|
---|
329 |
|
---|
330 | static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
|
---|
331 | {
|
---|
332 | JIT jit(globalData, codeBlock);
|
---|
333 | return jit.privateCompilePatchGetArrayLength(returnAddress);
|
---|
334 | }
|
---|
335 |
|
---|
336 | static void linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JITCode&, CallLinkInfo*, int callerArgCount, JSGlobalData*);
|
---|
337 | static void unlinkCall(CallLinkInfo*);
|
---|
338 |
|
---|
339 | private:
|
---|
340 | struct JSRInfo {
|
---|
341 | DataLabelPtr storeLocation;
|
---|
342 | Label target;
|
---|
343 |
|
---|
344 | JSRInfo(DataLabelPtr storeLocation, Label targetLocation)
|
---|
345 | : storeLocation(storeLocation)
|
---|
346 | , target(targetLocation)
|
---|
347 | {
|
---|
348 | }
|
---|
349 | };
|
---|
350 |
|
---|
351 | JIT(JSGlobalData*, CodeBlock* = 0);
|
---|
352 |
|
---|
353 | void privateCompileMainPass();
|
---|
354 | void privateCompileLinkPass();
|
---|
355 | void privateCompileSlowCases();
|
---|
356 | JITCode privateCompile();
|
---|
357 | void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
|
---|
358 | void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, size_t cachedOffset);
|
---|
359 | void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame);
|
---|
360 | void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame);
|
---|
361 | void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
|
---|
362 | void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress);
|
---|
363 |
|
---|
364 | void privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* data, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk);
|
---|
365 | void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
|
---|
366 |
|
---|
367 | void addSlowCase(Jump);
|
---|
368 | void addSlowCase(JumpList);
|
---|
369 | void addJump(Jump, int);
|
---|
370 | void emitJumpSlowToHot(Jump, int);
|
---|
371 |
|
---|
372 | void compileOpCall(OpcodeID, Instruction* instruction, unsigned callLinkInfoIndex);
|
---|
373 | void compileOpCallVarargs(Instruction* instruction);
|
---|
374 | void compileOpCallInitializeCallFrame();
|
---|
375 | void compileOpCallSetupArgs(Instruction*);
|
---|
376 | void compileOpCallVarargsSetupArgs(Instruction*);
|
---|
377 | void compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID);
|
---|
378 | void compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter);
|
---|
379 | void compileOpConstructSetupArgs(Instruction*);
|
---|
380 |
|
---|
381 | enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
|
---|
382 | void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
|
---|
383 | bool isOperandConstantImmediateDouble(unsigned src);
|
---|
384 |
|
---|
385 | void emitLoadDouble(unsigned index, FPRegisterID value);
|
---|
386 | void emitLoadInt32ToDouble(unsigned index, FPRegisterID value);
|
---|
387 |
|
---|
388 | Address addressFor(unsigned index, RegisterID base = callFrameRegister);
|
---|
389 |
|
---|
390 | void testPrototype(Structure*, JumpList& failureCases);
|
---|
391 |
|
---|
392 | #if USE(JSVALUE32_64)
|
---|
393 | Address tagFor(unsigned index, RegisterID base = callFrameRegister);
|
---|
394 | Address payloadFor(unsigned index, RegisterID base = callFrameRegister);
|
---|
395 |
|
---|
396 | bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
|
---|
397 |
|
---|
398 | void emitLoadTag(unsigned index, RegisterID tag);
|
---|
399 | void emitLoadPayload(unsigned index, RegisterID payload);
|
---|
400 |
|
---|
401 | void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
|
---|
402 | void emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
|
---|
403 | void emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2);
|
---|
404 |
|
---|
405 | void emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
|
---|
406 | void emitStore(unsigned index, const JSValue constant, RegisterID base = callFrameRegister);
|
---|
407 | void emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32 = false);
|
---|
408 | void emitStoreInt32(unsigned index, Imm32 payload, bool indexIsInt32 = false);
|
---|
409 | void emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell = false);
|
---|
410 | void emitStoreBool(unsigned index, RegisterID tag, bool indexIsBool = false);
|
---|
411 | void emitStoreDouble(unsigned index, FPRegisterID value);
|
---|
412 |
|
---|
413 | bool isLabeled(unsigned bytecodeIndex);
|
---|
414 | void map(unsigned bytecodeIndex, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload);
|
---|
415 | void unmap(RegisterID);
|
---|
416 | void unmap();
|
---|
417 | bool isMapped(unsigned virtualRegisterIndex);
|
---|
418 | bool getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload);
|
---|
419 | bool getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag);
|
---|
420 |
|
---|
421 | void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex);
|
---|
422 | void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag);
|
---|
423 | void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, unsigned virtualRegisterIndex);
|
---|
424 |
|
---|
425 | #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
|
---|
426 | void compileGetByIdHotPath();
|
---|
427 | void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
|
---|
428 | #endif
|
---|
429 | void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, Structure* structure, size_t cachedOffset);
|
---|
430 | void compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
|
---|
431 | void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, Structure* structure, size_t cachedOffset);
|
---|
432 |
|
---|
433 | // Arithmetic opcode helpers
|
---|
434 | void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
|
---|
435 | void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
|
---|
436 | void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
|
---|
437 |
|
---|
438 | #if PLATFORM(X86)
|
---|
439 | // These architecture specific value are used to enable patching - see comment on op_put_by_id.
|
---|
440 | static const int patchOffsetPutByIdStructure = 7;
|
---|
441 | static const int patchOffsetPutByIdExternalLoad = 13;
|
---|
442 | static const int patchLengthPutByIdExternalLoad = 3;
|
---|
443 | static const int patchOffsetPutByIdPropertyMapOffset1 = 22;
|
---|
444 | static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
|
---|
445 | // These architecture specific value are used to enable patching - see comment on op_get_by_id.
|
---|
446 | static const int patchOffsetGetByIdStructure = 7;
|
---|
447 | static const int patchOffsetGetByIdBranchToSlowCase = 13;
|
---|
448 | static const int patchOffsetGetByIdExternalLoad = 13;
|
---|
449 | static const int patchLengthGetByIdExternalLoad = 3;
|
---|
450 | static const int patchOffsetGetByIdPropertyMapOffset1 = 22;
|
---|
451 | static const int patchOffsetGetByIdPropertyMapOffset2 = 28;
|
---|
452 | static const int patchOffsetGetByIdPutResult = 28;
|
---|
453 | #if ENABLE(OPCODE_SAMPLING) && USE(JIT_STUB_ARGUMENT_VA_LIST)
|
---|
454 | static const int patchOffsetGetByIdSlowCaseCall = 35;
|
---|
455 | #elif ENABLE(OPCODE_SAMPLING)
|
---|
456 | static const int patchOffsetGetByIdSlowCaseCall = 37;
|
---|
457 | #elif USE(JIT_STUB_ARGUMENT_VA_LIST)
|
---|
458 | static const int patchOffsetGetByIdSlowCaseCall = 25;
|
---|
459 | #else
|
---|
460 | static const int patchOffsetGetByIdSlowCaseCall = 27;
|
---|
461 | #endif
|
---|
462 | static const int patchOffsetOpCallCompareToJump = 6;
|
---|
463 |
|
---|
464 | static const int patchOffsetMethodCheckProtoObj = 11;
|
---|
465 | static const int patchOffsetMethodCheckProtoStruct = 18;
|
---|
466 | static const int patchOffsetMethodCheckPutFunction = 29;
|
---|
467 | #else
|
---|
468 | #error "JSVALUE32_64 not supported on this platform."
|
---|
469 | #endif
|
---|
470 |
|
---|
471 | #else // USE(JSVALUE32_64)
|
---|
472 | void emitGetVirtualRegister(int src, RegisterID dst);
|
---|
473 | void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
|
---|
474 | void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
|
---|
475 |
|
---|
476 | int32_t getConstantOperandImmediateInt(unsigned src);
|
---|
477 |
|
---|
478 | void emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst);
|
---|
479 | void emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index);
|
---|
480 |
|
---|
481 | void killLastResultRegister();
|
---|
482 |
|
---|
483 | Jump emitJumpIfJSCell(RegisterID);
|
---|
484 | Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
|
---|
485 | void emitJumpSlowCaseIfJSCell(RegisterID);
|
---|
486 | Jump emitJumpIfNotJSCell(RegisterID);
|
---|
487 | void emitJumpSlowCaseIfNotJSCell(RegisterID);
|
---|
488 | void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
|
---|
489 | #if USE(JSVALUE64)
|
---|
490 | JIT::Jump emitJumpIfImmediateNumber(RegisterID);
|
---|
491 | JIT::Jump emitJumpIfNotImmediateNumber(RegisterID);
|
---|
492 | #else
|
---|
493 | JIT::Jump emitJumpIfImmediateNumber(RegisterID reg)
|
---|
494 | {
|
---|
495 | return emitJumpIfImmediateInteger(reg);
|
---|
496 | }
|
---|
497 |
|
---|
498 | JIT::Jump emitJumpIfNotImmediateNumber(RegisterID reg)
|
---|
499 | {
|
---|
500 | return emitJumpIfNotImmediateInteger(reg);
|
---|
501 | }
|
---|
502 | #endif
|
---|
503 | JIT::Jump emitJumpIfImmediateInteger(RegisterID);
|
---|
504 | JIT::Jump emitJumpIfNotImmediateInteger(RegisterID);
|
---|
505 | JIT::Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
|
---|
506 | void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
|
---|
507 | void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
|
---|
508 | void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
|
---|
509 |
|
---|
510 | #if !USE(JSVALUE64)
|
---|
511 | void emitFastArithDeTagImmediate(RegisterID);
|
---|
512 | Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
|
---|
513 | #endif
|
---|
514 | void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
|
---|
515 | void emitFastArithImmToInt(RegisterID);
|
---|
516 | void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
|
---|
517 |
|
---|
518 | void emitTagAsBoolImmediate(RegisterID reg);
|
---|
519 | void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
|
---|
520 | #if USE(JSVALUE64)
|
---|
521 | void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
|
---|
522 | #else
|
---|
523 | void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes);
|
---|
524 | #endif
|
---|
525 |
|
---|
526 | #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
|
---|
527 | void compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier* ident, unsigned propertyAccessInstructionIndex);
|
---|
528 | void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
|
---|
529 | #endif
|
---|
530 | void compileGetDirectOffset(RegisterID base, RegisterID result, Structure* structure, size_t cachedOffset);
|
---|
531 | void compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID result, size_t cachedOffset);
|
---|
532 | void compilePutDirectOffset(RegisterID base, RegisterID value, Structure* structure, size_t cachedOffset);
|
---|
533 |
|
---|
534 | #if PLATFORM(X86_64)
|
---|
535 | // These architecture specific value are used to enable patching - see comment on op_put_by_id.
|
---|
536 | static const int patchOffsetPutByIdStructure = 10;
|
---|
537 | static const int patchOffsetPutByIdExternalLoad = 20;
|
---|
538 | static const int patchLengthPutByIdExternalLoad = 4;
|
---|
539 | static const int patchOffsetPutByIdPropertyMapOffset = 31;
|
---|
540 | // These architecture specific value are used to enable patching - see comment on op_get_by_id.
|
---|
541 | static const int patchOffsetGetByIdStructure = 10;
|
---|
542 | static const int patchOffsetGetByIdBranchToSlowCase = 20;
|
---|
543 | static const int patchOffsetGetByIdExternalLoad = 20;
|
---|
544 | static const int patchLengthGetByIdExternalLoad = 4;
|
---|
545 | static const int patchOffsetGetByIdPropertyMapOffset = 31;
|
---|
546 | static const int patchOffsetGetByIdPutResult = 31;
|
---|
547 | #if ENABLE(OPCODE_SAMPLING)
|
---|
548 | static const int patchOffsetGetByIdSlowCaseCall = 64;
|
---|
549 | #else
|
---|
550 | static const int patchOffsetGetByIdSlowCaseCall = 41;
|
---|
551 | #endif
|
---|
552 | static const int patchOffsetOpCallCompareToJump = 9;
|
---|
553 |
|
---|
554 | static const int patchOffsetMethodCheckProtoObj = 20;
|
---|
555 | static const int patchOffsetMethodCheckProtoStruct = 30;
|
---|
556 | static const int patchOffsetMethodCheckPutFunction = 50;
|
---|
557 | #elif PLATFORM(X86)
|
---|
558 | // These architecture specific value are used to enable patching - see comment on op_put_by_id.
|
---|
559 | static const int patchOffsetPutByIdStructure = 7;
|
---|
560 | static const int patchOffsetPutByIdExternalLoad = 13;
|
---|
561 | static const int patchLengthPutByIdExternalLoad = 3;
|
---|
562 | static const int patchOffsetPutByIdPropertyMapOffset = 22;
|
---|
563 | // These architecture specific value are used to enable patching - see comment on op_get_by_id.
|
---|
564 | static const int patchOffsetGetByIdStructure = 7;
|
---|
565 | static const int patchOffsetGetByIdBranchToSlowCase = 13;
|
---|
566 | static const int patchOffsetGetByIdExternalLoad = 13;
|
---|
567 | static const int patchLengthGetByIdExternalLoad = 3;
|
---|
568 | static const int patchOffsetGetByIdPropertyMapOffset = 22;
|
---|
569 | static const int patchOffsetGetByIdPutResult = 22;
|
---|
570 | #if ENABLE(OPCODE_SAMPLING) && USE(JIT_STUB_ARGUMENT_VA_LIST)
|
---|
571 | static const int patchOffsetGetByIdSlowCaseCall = 31;
|
---|
572 | #elif ENABLE(OPCODE_SAMPLING)
|
---|
573 | static const int patchOffsetGetByIdSlowCaseCall = 33;
|
---|
574 | #elif USE(JIT_STUB_ARGUMENT_VA_LIST)
|
---|
575 | static const int patchOffsetGetByIdSlowCaseCall = 21;
|
---|
576 | #else
|
---|
577 | static const int patchOffsetGetByIdSlowCaseCall = 23;
|
---|
578 | #endif
|
---|
579 | static const int patchOffsetOpCallCompareToJump = 6;
|
---|
580 |
|
---|
581 | static const int patchOffsetMethodCheckProtoObj = 11;
|
---|
582 | static const int patchOffsetMethodCheckProtoStruct = 18;
|
---|
583 | static const int patchOffsetMethodCheckPutFunction = 29;
|
---|
584 | #elif PLATFORM(ARM_THUMB2)
|
---|
585 | // These architecture specific value are used to enable patching - see comment on op_put_by_id.
|
---|
586 | static const int patchOffsetPutByIdStructure = 10;
|
---|
587 | static const int patchOffsetPutByIdExternalLoad = 20;
|
---|
588 | static const int patchLengthPutByIdExternalLoad = 12;
|
---|
589 | static const int patchOffsetPutByIdPropertyMapOffset = 40;
|
---|
590 | // These architecture specific value are used to enable patching - see comment on op_get_by_id.
|
---|
591 | static const int patchOffsetGetByIdStructure = 10;
|
---|
592 | static const int patchOffsetGetByIdBranchToSlowCase = 20;
|
---|
593 | static const int patchOffsetGetByIdExternalLoad = 20;
|
---|
594 | static const int patchLengthGetByIdExternalLoad = 12;
|
---|
595 | static const int patchOffsetGetByIdPropertyMapOffset = 40;
|
---|
596 | static const int patchOffsetGetByIdPutResult = 44;
|
---|
597 | #if ENABLE(OPCODE_SAMPLING)
|
---|
598 | static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
|
---|
599 | #else
|
---|
600 | static const int patchOffsetGetByIdSlowCaseCall = 28;
|
---|
601 | #endif
|
---|
602 | static const int patchOffsetOpCallCompareToJump = 10;
|
---|
603 |
|
---|
604 | static const int patchOffsetMethodCheckProtoObj = 18;
|
---|
605 | static const int patchOffsetMethodCheckProtoStruct = 28;
|
---|
606 | static const int patchOffsetMethodCheckPutFunction = 46;
|
---|
607 | #elif PLATFORM(ARM_TRADITIONAL)
|
---|
608 | // These architecture specific value are used to enable patching - see comment on op_put_by_id.
|
---|
609 | static const int patchOffsetPutByIdStructure = 4;
|
---|
610 | static const int patchOffsetPutByIdExternalLoad = 16;
|
---|
611 | static const int patchLengthPutByIdExternalLoad = 4;
|
---|
612 | static const int patchOffsetPutByIdPropertyMapOffset = 20;
|
---|
613 | // These architecture specific value are used to enable patching - see comment on op_get_by_id.
|
---|
614 | static const int patchOffsetGetByIdStructure = 4;
|
---|
615 | static const int patchOffsetGetByIdBranchToSlowCase = 16;
|
---|
616 | static const int patchOffsetGetByIdExternalLoad = 16;
|
---|
617 | static const int patchLengthGetByIdExternalLoad = 4;
|
---|
618 | static const int patchOffsetGetByIdPropertyMapOffset = 20;
|
---|
619 | static const int patchOffsetGetByIdPutResult = 28;
|
---|
620 | #if ENABLE(OPCODE_SAMPLING)
|
---|
621 | #error "OPCODE_SAMPLING is not yet supported"
|
---|
622 | #else
|
---|
623 | static const int patchOffsetGetByIdSlowCaseCall = 28;
|
---|
624 | #endif
|
---|
625 | static const int patchOffsetOpCallCompareToJump = 12;
|
---|
626 |
|
---|
627 | static const int patchOffsetMethodCheckProtoObj = 12;
|
---|
628 | static const int patchOffsetMethodCheckProtoStruct = 20;
|
---|
629 | static const int patchOffsetMethodCheckPutFunction = 32;
|
---|
630 | #endif
|
---|
631 | #endif // USE(JSVALUE32_64)
|
---|
632 |
|
---|
633 | #if PLATFORM(ARM_TRADITIONAL)
|
---|
634 | // sequenceOpCall
|
---|
635 | static const int sequenceOpCallInstructionSpace = 12;
|
---|
636 | static const int sequenceOpCallConstantSpace = 2;
|
---|
637 | // sequenceMethodCheck
|
---|
638 | static const int sequenceMethodCheckInstructionSpace = 40;
|
---|
639 | static const int sequenceMethodCheckConstantSpace = 6;
|
---|
640 | // sequenceGetByIdHotPath
|
---|
641 | static const int sequenceGetByIdHotPathInstructionSpace = 28;
|
---|
642 | static const int sequenceGetByIdHotPathConstantSpace = 3;
|
---|
643 | // sequenceGetByIdSlowCase
|
---|
644 | static const int sequenceGetByIdSlowCaseInstructionSpace = 32;
|
---|
645 | static const int sequenceGetByIdSlowCaseConstantSpace = 2;
|
---|
646 | // sequencePutById
|
---|
647 | static const int sequencePutByIdInstructionSpace = 28;
|
---|
648 | static const int sequencePutByIdConstantSpace = 3;
|
---|
649 | #endif
|
---|
650 |
|
---|
651 | #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
|
---|
652 | #define BEGIN_UNINTERRUPTED_SEQUENCE(name) beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace)
|
---|
653 | #define END_UNINTERRUPTED_SEQUENCE(name) endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace)
|
---|
654 |
|
---|
655 | void beginUninterruptedSequence(int, int);
|
---|
656 | void endUninterruptedSequence(int, int);
|
---|
657 |
|
---|
658 | #else
|
---|
659 | #define BEGIN_UNINTERRUPTED_SEQUENCE(name)
|
---|
660 | #define END_UNINTERRUPTED_SEQUENCE(name)
|
---|
661 | #endif
|
---|
662 |
|
---|
663 | void emit_op_add(Instruction*);
|
---|
664 | void emit_op_bitand(Instruction*);
|
---|
665 | void emit_op_bitnot(Instruction*);
|
---|
666 | void emit_op_bitor(Instruction*);
|
---|
667 | void emit_op_bitxor(Instruction*);
|
---|
668 | void emit_op_call(Instruction*);
|
---|
669 | void emit_op_call_eval(Instruction*);
|
---|
670 | void emit_op_call_varargs(Instruction*);
|
---|
671 | void emit_op_catch(Instruction*);
|
---|
672 | void emit_op_construct(Instruction*);
|
---|
673 | void emit_op_construct_verify(Instruction*);
|
---|
674 | void emit_op_convert_this(Instruction*);
|
---|
675 | void emit_op_create_arguments(Instruction*);
|
---|
676 | void emit_op_debug(Instruction*);
|
---|
677 | void emit_op_del_by_id(Instruction*);
|
---|
678 | void emit_op_div(Instruction*);
|
---|
679 | void emit_op_end(Instruction*);
|
---|
680 | void emit_op_enter(Instruction*);
|
---|
681 | void emit_op_enter_with_activation(Instruction*);
|
---|
682 | void emit_op_eq(Instruction*);
|
---|
683 | void emit_op_eq_null(Instruction*);
|
---|
684 | void emit_op_get_by_id(Instruction*);
|
---|
685 | void emit_op_get_by_val(Instruction*);
|
---|
686 | void emit_op_get_global_var(Instruction*);
|
---|
687 | void emit_op_get_scoped_var(Instruction*);
|
---|
688 | void emit_op_init_arguments(Instruction*);
|
---|
689 | void emit_op_instanceof(Instruction*);
|
---|
690 | void emit_op_jeq_null(Instruction*);
|
---|
691 | void emit_op_jfalse(Instruction*);
|
---|
692 | void emit_op_jmp(Instruction*);
|
---|
693 | void emit_op_jmp_scopes(Instruction*);
|
---|
694 | void emit_op_jneq_null(Instruction*);
|
---|
695 | void emit_op_jneq_ptr(Instruction*);
|
---|
696 | void emit_op_jnless(Instruction*);
|
---|
697 | void emit_op_jnlesseq(Instruction*);
|
---|
698 | void emit_op_jsr(Instruction*);
|
---|
699 | void emit_op_jtrue(Instruction*);
|
---|
700 | void emit_op_load_varargs(Instruction*);
|
---|
701 | void emit_op_loop(Instruction*);
|
---|
702 | void emit_op_loop_if_less(Instruction*);
|
---|
703 | void emit_op_loop_if_lesseq(Instruction*);
|
---|
704 | void emit_op_loop_if_true(Instruction*);
|
---|
705 | void emit_op_lshift(Instruction*);
|
---|
706 | void emit_op_method_check(Instruction*);
|
---|
707 | void emit_op_mod(Instruction*);
|
---|
708 | void emit_op_mov(Instruction*);
|
---|
709 | void emit_op_mul(Instruction*);
|
---|
710 | void emit_op_negate(Instruction*);
|
---|
711 | void emit_op_neq(Instruction*);
|
---|
712 | void emit_op_neq_null(Instruction*);
|
---|
713 | void emit_op_new_array(Instruction*);
|
---|
714 | void emit_op_new_error(Instruction*);
|
---|
715 | void emit_op_new_func(Instruction*);
|
---|
716 | void emit_op_new_func_exp(Instruction*);
|
---|
717 | void emit_op_new_object(Instruction*);
|
---|
718 | void emit_op_new_regexp(Instruction*);
|
---|
719 | void emit_op_get_pnames(Instruction*);
|
---|
720 | void emit_op_next_pname(Instruction*);
|
---|
721 | void emit_op_not(Instruction*);
|
---|
722 | void emit_op_nstricteq(Instruction*);
|
---|
723 | void emit_op_pop_scope(Instruction*);
|
---|
724 | void emit_op_post_dec(Instruction*);
|
---|
725 | void emit_op_post_inc(Instruction*);
|
---|
726 | void emit_op_pre_dec(Instruction*);
|
---|
727 | void emit_op_pre_inc(Instruction*);
|
---|
728 | void emit_op_profile_did_call(Instruction*);
|
---|
729 | void emit_op_profile_will_call(Instruction*);
|
---|
730 | void emit_op_push_new_scope(Instruction*);
|
---|
731 | void emit_op_push_scope(Instruction*);
|
---|
732 | void emit_op_put_by_id(Instruction*);
|
---|
733 | void emit_op_put_by_index(Instruction*);
|
---|
734 | void emit_op_put_by_val(Instruction*);
|
---|
735 | void emit_op_put_getter(Instruction*);
|
---|
736 | void emit_op_put_global_var(Instruction*);
|
---|
737 | void emit_op_put_scoped_var(Instruction*);
|
---|
738 | void emit_op_put_setter(Instruction*);
|
---|
739 | void emit_op_resolve(Instruction*);
|
---|
740 | void emit_op_resolve_base(Instruction*);
|
---|
741 | void emit_op_resolve_global(Instruction*);
|
---|
742 | void emit_op_resolve_skip(Instruction*);
|
---|
743 | void emit_op_resolve_with_base(Instruction*);
|
---|
744 | void emit_op_ret(Instruction*);
|
---|
745 | void emit_op_rshift(Instruction*);
|
---|
746 | void emit_op_sret(Instruction*);
|
---|
747 | void emit_op_strcat(Instruction*);
|
---|
748 | void emit_op_stricteq(Instruction*);
|
---|
749 | void emit_op_sub(Instruction*);
|
---|
750 | void emit_op_switch_char(Instruction*);
|
---|
751 | void emit_op_switch_imm(Instruction*);
|
---|
752 | void emit_op_switch_string(Instruction*);
|
---|
753 | void emit_op_tear_off_activation(Instruction*);
|
---|
754 | void emit_op_tear_off_arguments(Instruction*);
|
---|
755 | void emit_op_throw(Instruction*);
|
---|
756 | void emit_op_to_jsnumber(Instruction*);
|
---|
757 | void emit_op_to_primitive(Instruction*);
|
---|
758 | void emit_op_unexpected_load(Instruction*);
|
---|
759 |
|
---|
760 | void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
761 | void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
762 | void emitSlow_op_bitnot(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
763 | void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
764 | void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
765 | void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
766 | void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
767 | void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
768 | void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
769 | void emitSlow_op_construct_verify(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
770 | void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
771 | void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
772 | void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
773 | void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
774 | void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
775 | void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
776 | void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
777 | void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
778 | void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
779 | void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
780 | void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
781 | void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
782 | void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
783 | void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
784 | void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
785 | void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
786 | void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
787 | void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
788 | void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
789 | void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
790 | void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
791 | void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
792 | void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
793 | void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
794 | void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
795 | void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
796 | void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
797 | void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
798 | void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
799 | void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
800 | void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
801 | void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
802 | void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
|
---|
803 |
|
---|
804 | /* These functions are deprecated: Please use JITStubCall instead. */
|
---|
805 | void emitPutJITStubArg(RegisterID src, unsigned argumentNumber);
|
---|
806 | #if USE(JSVALUE32_64)
|
---|
807 | void emitPutJITStubArg(RegisterID tag, RegisterID payload, unsigned argumentNumber);
|
---|
808 | void emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch1, RegisterID scratch2);
|
---|
809 | #else
|
---|
810 | void emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch);
|
---|
811 | #endif
|
---|
812 | void emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber);
|
---|
813 | void emitPutJITStubArgConstant(void* value, unsigned argumentNumber);
|
---|
814 | void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
|
---|
815 |
|
---|
816 | void emitInitRegister(unsigned dst);
|
---|
817 |
|
---|
818 | void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
|
---|
819 | void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry);
|
---|
820 | void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
|
---|
821 | void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
|
---|
822 |
|
---|
823 | JSValue getConstantOperand(unsigned src);
|
---|
824 | bool isOperandConstantImmediateInt(unsigned src);
|
---|
825 |
|
---|
826 | Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
|
---|
827 | {
|
---|
828 | return iter++->from;
|
---|
829 | }
|
---|
830 | void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
|
---|
831 | {
|
---|
832 | iter->from.link(this);
|
---|
833 | ++iter;
|
---|
834 | }
|
---|
835 | void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int vReg);
|
---|
836 |
|
---|
837 | Jump checkStructure(RegisterID reg, Structure* structure);
|
---|
838 |
|
---|
839 | void restoreArgumentReference();
|
---|
840 | void restoreArgumentReferenceForTrampoline();
|
---|
841 |
|
---|
842 | Call emitNakedCall(CodePtr function = CodePtr());
|
---|
843 |
|
---|
844 | void preserveReturnAddressAfterCall(RegisterID);
|
---|
845 | void restoreReturnAddressBeforeReturn(RegisterID);
|
---|
846 | void restoreReturnAddressBeforeReturn(Address);
|
---|
847 |
|
---|
848 | void emitTimeoutCheck();
|
---|
849 | #ifndef NDEBUG
|
---|
850 | void printBytecodeOperandTypes(unsigned src1, unsigned src2);
|
---|
851 | #endif
|
---|
852 |
|
---|
853 | #if ENABLE(SAMPLING_FLAGS)
|
---|
854 | void setSamplingFlag(int32_t);
|
---|
855 | void clearSamplingFlag(int32_t);
|
---|
856 | #endif
|
---|
857 |
|
---|
858 | #if ENABLE(SAMPLING_COUNTERS)
|
---|
859 | void emitCount(AbstractSamplingCounter&, uint32_t = 1);
|
---|
860 | #endif
|
---|
861 |
|
---|
862 | #if ENABLE(OPCODE_SAMPLING)
|
---|
863 | void sampleInstruction(Instruction*, bool = false);
|
---|
864 | #endif
|
---|
865 |
|
---|
866 | #if ENABLE(CODEBLOCK_SAMPLING)
|
---|
867 | void sampleCodeBlock(CodeBlock*);
|
---|
868 | #else
|
---|
869 | void sampleCodeBlock(CodeBlock*) {}
|
---|
870 | #endif
|
---|
871 |
|
---|
872 | Interpreter* m_interpreter;
|
---|
873 | JSGlobalData* m_globalData;
|
---|
874 | CodeBlock* m_codeBlock;
|
---|
875 |
|
---|
876 | Vector<CallRecord> m_calls;
|
---|
877 | Vector<Label> m_labels;
|
---|
878 | Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
|
---|
879 | Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
|
---|
880 | Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo;
|
---|
881 | Vector<JumpTable> m_jmpTable;
|
---|
882 |
|
---|
883 | unsigned m_bytecodeIndex;
|
---|
884 | Vector<JSRInfo> m_jsrSites;
|
---|
885 | Vector<SlowCaseEntry> m_slowCases;
|
---|
886 | Vector<SwitchRecord> m_switches;
|
---|
887 |
|
---|
888 | unsigned m_propertyAccessInstructionIndex;
|
---|
889 | unsigned m_globalResolveInfoIndex;
|
---|
890 | unsigned m_callLinkInfoIndex;
|
---|
891 |
|
---|
892 | #if USE(JSVALUE32_64)
|
---|
893 | unsigned m_jumpTargetIndex;
|
---|
894 | unsigned m_mappedBytecodeIndex;
|
---|
895 | unsigned m_mappedVirtualRegisterIndex;
|
---|
896 | RegisterID m_mappedTag;
|
---|
897 | RegisterID m_mappedPayload;
|
---|
898 | #else
|
---|
899 | int m_lastResultBytecodeRegister;
|
---|
900 | unsigned m_jumpTargetsPosition;
|
---|
901 | #endif
|
---|
902 |
|
---|
903 | #ifndef NDEBUG
|
---|
904 | #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
|
---|
905 | Label m_uninterruptedInstructionSequenceBegin;
|
---|
906 | int m_uninterruptedConstantSequenceBegin;
|
---|
907 | #endif
|
---|
908 | #endif
|
---|
909 | } JIT_CLASS_ALIGNMENT;
|
---|
910 | } // namespace JSC
|
---|
911 |
|
---|
912 | #endif // ENABLE(JIT)
|
---|
913 |
|
---|
914 | #endif // JIT_h
|
---|