source: webkit/trunk/JavaScriptCore/jit/JITCall.cpp@ 48774

Last change on this file since 48774 was 47614, checked in by [email protected], 16 years ago

Numbering of arguments to emitGetJITStubArg/emitPutJITStubArg incorrect
https://bugs.webkit.org/show_bug.cgi?id=28513

Reviewed by Oliver Hunt.

The argumentNumber argument to emitGetJITStubArg/emitPutJITStubArg should match
the argument number used within the stub functions in JITStubs.cpp, but it doesn't.

Firstly, all the numbers changed when we added a void* 'reserved' as the first slot
(rather than leaving argument 0 unused), and secondly in 32_64 builds the index to
peek/poke needs to be multiplies by 2 (since the argument to peek/poke is a number
of machine words, and on 32_64 build the argument slots to stub functions are two
words wide).

  • jit/JIT.h:
  • jit/JITCall.cpp:

(JSC::JIT::compileOpCallSetupArgs):
(JSC::JIT::compileOpConstructSetupArgs):
(JSC::JIT::compileOpCallVarargsSetupArgs):
(JSC::JIT::compileOpCall):

  • jit/JITInlineMethods.h:

(JSC::JIT::emitPutJITStubArg):
(JSC::JIT::emitPutJITStubArgConstant):
(JSC::JIT::emitGetJITStubArg):
(JSC::JIT::emitPutJITStubArgFromVirtualRegister):

  • jit/JITOpcodes.cpp:

(JSC::JIT::privateCompileCTIMachineTrampolines):

  • jit/JITPropertyAccess.cpp:

(JSC::JIT::privateCompilePutByIdTransition):

File size: 27.9 KB
Line 
1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "JIT.h"
28
29#if ENABLE(JIT)
30
31#include "CodeBlock.h"
32#include "JITInlineMethods.h"
33#include "JITStubCall.h"
34#include "JSArray.h"
35#include "JSFunction.h"
36#include "Interpreter.h"
37#include "ResultType.h"
38#include "SamplingTool.h"
39
40#ifndef NDEBUG
41#include <stdio.h>
42#endif
43
44using namespace std;
45
46namespace JSC {
47
48#if USE(JSVALUE32_64)
49
50void JIT::compileOpCallInitializeCallFrame()
51{
52 // regT0 holds callee, regT1 holds argCount
53 store32(regT1, Address(callFrameRegister, RegisterFile::ArgumentCount * static_cast<int>(sizeof(Register))));
54
55 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_data) + OBJECT_OFFSETOF(ScopeChain, m_node)), regT1); // scopeChain
56
57 emitStore(static_cast<unsigned>(RegisterFile::OptionalCalleeArguments), JSValue());
58 storePtr(regT0, Address(callFrameRegister, RegisterFile::Callee * static_cast<int>(sizeof(Register)))); // callee
59 storePtr(regT1, Address(callFrameRegister, RegisterFile::ScopeChain * static_cast<int>(sizeof(Register)))); // scopeChain
60}
61
62void JIT::compileOpCallSetupArgs(Instruction* instruction)
63{
64 int argCount = instruction[3].u.operand;
65 int registerOffset = instruction[4].u.operand;
66
67 emitPutJITStubArg(regT1, regT0, 0);
68 emitPutJITStubArgConstant(registerOffset, 1);
69 emitPutJITStubArgConstant(argCount, 2);
70}
71
72void JIT::compileOpConstructSetupArgs(Instruction* instruction)
73{
74 int argCount = instruction[3].u.operand;
75 int registerOffset = instruction[4].u.operand;
76 int proto = instruction[5].u.operand;
77 int thisRegister = instruction[6].u.operand;
78
79 emitPutJITStubArg(regT1, regT0, 0);
80 emitPutJITStubArgConstant(registerOffset, 1);
81 emitPutJITStubArgConstant(argCount, 2);
82 emitPutJITStubArgFromVirtualRegister(proto, 3, regT2, regT3);
83 emitPutJITStubArgConstant(thisRegister, 4);
84}
85
86void JIT::compileOpCallVarargsSetupArgs(Instruction*)
87{
88 emitPutJITStubArg(regT1, regT0, 0);
89 emitPutJITStubArg(regT3, 1); // registerOffset
90 emitPutJITStubArg(regT2, 2); // argCount
91}
92
93void JIT::compileOpCallVarargs(Instruction* instruction)
94{
95 int dst = instruction[1].u.operand;
96 int callee = instruction[2].u.operand;
97 int argCountRegister = instruction[3].u.operand;
98 int registerOffset = instruction[4].u.operand;
99
100 emitLoad(callee, regT1, regT0);
101 emitLoadPayload(argCountRegister, regT2); // argCount
102 addPtr(Imm32(registerOffset), regT2, regT3); // registerOffset
103
104 compileOpCallVarargsSetupArgs(instruction);
105
106 emitJumpSlowCaseIfNotJSCell(callee, regT1);
107 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr)));
108
109 // Speculatively roll the callframe, assuming argCount will match the arity.
110 mul32(Imm32(sizeof(Register)), regT3, regT3);
111 addPtr(callFrameRegister, regT3);
112 storePtr(callFrameRegister, Address(regT3, RegisterFile::CallerFrame * static_cast<int>(sizeof(Register))));
113 move(regT3, callFrameRegister);
114
115 move(regT2, regT1); // argCount
116
117 emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
118
119 emitStore(dst, regT1, regT0);
120
121 sampleCodeBlock(m_codeBlock);
122}
123
124void JIT::compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
125{
126 int dst = instruction[1].u.operand;
127 int callee = instruction[2].u.operand;
128
129 linkSlowCaseIfNotJSCell(iter, callee);
130 linkSlowCase(iter);
131
132 JITStubCall stubCall(this, cti_op_call_NotJSFunction);
133 stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
134
135 map(m_bytecodeIndex + OPCODE_LENGTH(op_call_varargs), dst, regT1, regT0);
136 sampleCodeBlock(m_codeBlock);
137}
138
139void JIT::emit_op_ret(Instruction* currentInstruction)
140{
141 unsigned dst = currentInstruction[1].u.operand;
142
143 // We could JIT generate the deref, only calling out to C when the refcount hits zero.
144 if (m_codeBlock->needsFullScopeChain())
145 JITStubCall(this, cti_op_ret_scopeChain).call();
146
147 emitLoad(dst, regT1, regT0);
148 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
149 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
150
151 restoreReturnAddressBeforeReturn(regT2);
152 ret();
153}
154
155void JIT::emit_op_construct_verify(Instruction* currentInstruction)
156{
157 unsigned dst = currentInstruction[1].u.operand;
158
159 emitLoad(dst, regT1, regT0);
160 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
161 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
162 addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
163}
164
165void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
166{
167 unsigned dst = currentInstruction[1].u.operand;
168 unsigned src = currentInstruction[2].u.operand;
169
170 linkSlowCase(iter);
171 linkSlowCase(iter);
172 emitLoad(src, regT1, regT0);
173 emitStore(dst, regT1, regT0);
174}
175
176void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
177{
178 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
179}
180
181void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
182{
183 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
184}
185
186void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
187{
188 compileOpCallVarargsSlowCase(currentInstruction, iter);
189}
190
191void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
192{
193 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
194}
195
196void JIT::emit_op_call(Instruction* currentInstruction)
197{
198 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
199}
200
201void JIT::emit_op_call_eval(Instruction* currentInstruction)
202{
203 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
204}
205
206void JIT::emit_op_load_varargs(Instruction* currentInstruction)
207{
208 int argCountDst = currentInstruction[1].u.operand;
209 int argsOffset = currentInstruction[2].u.operand;
210
211 JITStubCall stubCall(this, cti_op_load_varargs);
212 stubCall.addArgument(Imm32(argsOffset));
213 stubCall.call();
214 // Stores a naked int32 in the register file.
215 store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
216}
217
218void JIT::emit_op_call_varargs(Instruction* currentInstruction)
219{
220 compileOpCallVarargs(currentInstruction);
221}
222
223void JIT::emit_op_construct(Instruction* currentInstruction)
224{
225 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
226}
227
228#if !ENABLE(JIT_OPTIMIZE_CALL)
229
230/* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
231
232void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned)
233{
234 int dst = instruction[1].u.operand;
235 int callee = instruction[2].u.operand;
236 int argCount = instruction[3].u.operand;
237 int registerOffset = instruction[4].u.operand;
238
239 Jump wasEval1;
240 Jump wasEval2;
241 if (opcodeID == op_call_eval) {
242 JITStubCall stubCall(this, cti_op_call_eval);
243 stubCall.addArgument(callee);
244 stubCall.addArgument(JIT::Imm32(registerOffset));
245 stubCall.addArgument(JIT::Imm32(argCount));
246 stubCall.call();
247 wasEval1 = branchTest32(NonZero, regT0);
248 wasEval2 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
249 }
250
251 emitLoad(callee, regT1, regT2);
252
253 if (opcodeID == op_call)
254 compileOpCallSetupArgs(instruction);
255 else if (opcodeID == op_construct)
256 compileOpConstructSetupArgs(instruction);
257
258 emitJumpSlowCaseIfNotJSCell(callee, regT1);
259 addSlowCase(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsFunctionVPtr)));
260
261 // First, in the case of a construct, allocate the new object.
262 if (opcodeID == op_construct) {
263 JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
264 emitLoad(callee, regT1, regT2);
265 }
266
267 // Speculatively roll the callframe, assuming argCount will match the arity.
268 storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
269 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
270 move(Imm32(argCount), regT1);
271
272 emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
273
274 if (opcodeID == op_call_eval) {
275 wasEval1.link(this);
276 wasEval2.link(this);
277 }
278
279 emitStore(dst, regT1, regT0);;
280
281 sampleCodeBlock(m_codeBlock);
282}
283
284void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned, OpcodeID opcodeID)
285{
286 int dst = instruction[1].u.operand;
287 int callee = instruction[2].u.operand;
288
289 linkSlowCaseIfNotJSCell(iter, callee);
290 linkSlowCase(iter);
291
292 JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
293 stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
294
295 sampleCodeBlock(m_codeBlock);
296}
297
298#else // !ENABLE(JIT_OPTIMIZE_CALL)
299
300/* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
301
302void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
303{
304 int dst = instruction[1].u.operand;
305 int callee = instruction[2].u.operand;
306 int argCount = instruction[3].u.operand;
307 int registerOffset = instruction[4].u.operand;
308
309 Jump wasEval1;
310 Jump wasEval2;
311 if (opcodeID == op_call_eval) {
312 JITStubCall stubCall(this, cti_op_call_eval);
313 stubCall.addArgument(callee);
314 stubCall.addArgument(JIT::Imm32(registerOffset));
315 stubCall.addArgument(JIT::Imm32(argCount));
316 stubCall.call();
317 wasEval1 = branchTest32(NonZero, regT0);
318 wasEval2 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
319 }
320
321 emitLoad(callee, regT1, regT0);
322
323 DataLabelPtr addressOfLinkedFunctionCheck;
324 Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, ImmPtr(0));
325 addSlowCase(jumpToSlow);
326 ASSERT(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow) == patchOffsetOpCallCompareToJump);
327 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
328
329 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
330
331 // The following is the fast case, only used whan a callee can be linked.
332
333 // In the case of OpConstruct, call out to a cti_ function to create the new object.
334 if (opcodeID == op_construct) {
335 int proto = instruction[5].u.operand;
336 int thisRegister = instruction[6].u.operand;
337
338 JITStubCall stubCall(this, cti_op_construct_JSConstruct);
339 stubCall.addArgument(regT1, regT0);
340 stubCall.addArgument(Imm32(0)); // FIXME: Remove this unused JITStub argument.
341 stubCall.addArgument(Imm32(0)); // FIXME: Remove this unused JITStub argument.
342 stubCall.addArgument(proto);
343 stubCall.call(thisRegister);
344
345 emitLoad(callee, regT1, regT0);
346 }
347
348 // Fast version of stack frame initialization, directly relative to edi.
349 // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
350 emitStore(registerOffset + RegisterFile::OptionalCalleeArguments, JSValue());
351 emitStore(registerOffset + RegisterFile::Callee, regT1, regT0);
352
353 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_data) + OBJECT_OFFSETOF(ScopeChain, m_node)), regT1); // newScopeChain
354 store32(Imm32(argCount), Address(callFrameRegister, (registerOffset + RegisterFile::ArgumentCount) * static_cast<int>(sizeof(Register))));
355 storePtr(callFrameRegister, Address(callFrameRegister, (registerOffset + RegisterFile::CallerFrame) * static_cast<int>(sizeof(Register))));
356 storePtr(regT1, Address(callFrameRegister, (registerOffset + RegisterFile::ScopeChain) * static_cast<int>(sizeof(Register))));
357 addPtr(Imm32(registerOffset * sizeof(Register)), callFrameRegister);
358
359 // Call to the callee
360 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
361
362 if (opcodeID == op_call_eval) {
363 wasEval1.link(this);
364 wasEval2.link(this);
365 }
366
367 // Put the return value in dst. In the interpreter, op_ret does this.
368 emitStore(dst, regT1, regT0);
369 map(m_bytecodeIndex + opcodeLengths[opcodeID], dst, regT1, regT0);
370
371 sampleCodeBlock(m_codeBlock);
372}
373
374void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID)
375{
376 int dst = instruction[1].u.operand;
377 int callee = instruction[2].u.operand;
378 int argCount = instruction[3].u.operand;
379 int registerOffset = instruction[4].u.operand;
380
381 linkSlowCase(iter);
382 linkSlowCase(iter);
383
384 // The arguments have been set up on the hot path for op_call_eval
385 if (opcodeID == op_call)
386 compileOpCallSetupArgs(instruction);
387 else if (opcodeID == op_construct)
388 compileOpConstructSetupArgs(instruction);
389
390 // Fast check for JS function.
391 Jump callLinkFailNotObject = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
392 Jump callLinkFailNotJSFunction = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr));
393
394 // First, in the case of a construct, allocate the new object.
395 if (opcodeID == op_construct) {
396 JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
397 emitLoad(callee, regT1, regT0);
398 }
399
400 // Speculatively roll the callframe, assuming argCount will match the arity.
401 storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
402 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
403 move(Imm32(argCount), regT1);
404
405 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_globalData->jitStubs.ctiVirtualCallLink());
406
407 // Put the return value in dst.
408 emitStore(dst, regT1, regT0);;
409 sampleCodeBlock(m_codeBlock);
410
411 // If not, we need an extra case in the if below!
412 ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval));
413
414 // Done! - return back to the hot path.
415 if (opcodeID == op_construct)
416 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_construct));
417 else
418 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call));
419
420 // This handles host functions
421 callLinkFailNotObject.link(this);
422 callLinkFailNotJSFunction.link(this);
423 JITStubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction).call();
424
425 emitStore(dst, regT1, regT0);;
426 sampleCodeBlock(m_codeBlock);
427}
428
429/* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
430
431#endif // !ENABLE(JIT_OPTIMIZE_CALL)
432
433#else // USE(JSVALUE32_64)
434
435void JIT::compileOpCallInitializeCallFrame()
436{
437 store32(regT1, Address(callFrameRegister, RegisterFile::ArgumentCount * static_cast<int>(sizeof(Register))));
438
439 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_data) + OBJECT_OFFSETOF(ScopeChain, m_node)), regT1); // newScopeChain
440
441 storePtr(ImmPtr(JSValue::encode(JSValue())), Address(callFrameRegister, RegisterFile::OptionalCalleeArguments * static_cast<int>(sizeof(Register))));
442 storePtr(regT0, Address(callFrameRegister, RegisterFile::Callee * static_cast<int>(sizeof(Register))));
443 storePtr(regT1, Address(callFrameRegister, RegisterFile::ScopeChain * static_cast<int>(sizeof(Register))));
444}
445
446void JIT::compileOpCallSetupArgs(Instruction* instruction)
447{
448 int argCount = instruction[3].u.operand;
449 int registerOffset = instruction[4].u.operand;
450
451 // ecx holds func
452 emitPutJITStubArg(regT0, 0);
453 emitPutJITStubArgConstant(argCount, 2);
454 emitPutJITStubArgConstant(registerOffset, 1);
455}
456
457void JIT::compileOpCallVarargsSetupArgs(Instruction* instruction)
458{
459 int registerOffset = instruction[4].u.operand;
460
461 // ecx holds func
462 emitPutJITStubArg(regT0, 0);
463 emitPutJITStubArg(regT1, 2);
464 addPtr(Imm32(registerOffset), regT1, regT2);
465 emitPutJITStubArg(regT2, 1);
466}
467
468void JIT::compileOpConstructSetupArgs(Instruction* instruction)
469{
470 int argCount = instruction[3].u.operand;
471 int registerOffset = instruction[4].u.operand;
472 int proto = instruction[5].u.operand;
473 int thisRegister = instruction[6].u.operand;
474
475 // ecx holds func
476 emitPutJITStubArg(regT0, 0);
477 emitPutJITStubArgConstant(registerOffset, 1);
478 emitPutJITStubArgConstant(argCount, 2);
479 emitPutJITStubArgFromVirtualRegister(proto, 3, regT2);
480 emitPutJITStubArgConstant(thisRegister, 4);
481}
482
483void JIT::compileOpCallVarargs(Instruction* instruction)
484{
485 int dst = instruction[1].u.operand;
486 int callee = instruction[2].u.operand;
487 int argCountRegister = instruction[3].u.operand;
488
489 emitGetVirtualRegister(argCountRegister, regT1);
490 emitGetVirtualRegister(callee, regT0);
491 compileOpCallVarargsSetupArgs(instruction);
492
493 // Check for JSFunctions.
494 emitJumpSlowCaseIfNotJSCell(regT0);
495 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr)));
496
497 // Speculatively roll the callframe, assuming argCount will match the arity.
498 mul32(Imm32(sizeof(Register)), regT2, regT2);
499 intptr_t offset = (intptr_t)sizeof(Register) * (intptr_t)RegisterFile::CallerFrame;
500 addPtr(Imm32((int32_t)offset), regT2, regT3);
501 addPtr(callFrameRegister, regT3);
502 storePtr(callFrameRegister, regT3);
503 addPtr(regT2, callFrameRegister);
504 emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
505
506 // Put the return value in dst. In the interpreter, op_ret does this.
507 emitPutVirtualRegister(dst);
508
509 sampleCodeBlock(m_codeBlock);
510}
511
512void JIT::compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
513{
514 int dst = instruction[1].u.operand;
515
516 linkSlowCase(iter);
517 linkSlowCase(iter);
518 JITStubCall stubCall(this, cti_op_call_NotJSFunction);
519 stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
520
521 sampleCodeBlock(m_codeBlock);
522}
523
524#if !ENABLE(JIT_OPTIMIZE_CALL)
525
526/* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
527
528void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned)
529{
530 int dst = instruction[1].u.operand;
531 int callee = instruction[2].u.operand;
532 int argCount = instruction[3].u.operand;
533 int registerOffset = instruction[4].u.operand;
534
535 // Handle eval
536 Jump wasEval;
537 if (opcodeID == op_call_eval) {
538 JITStubCall stubCall(this, cti_op_call_eval);
539 stubCall.addArgument(callee, regT0);
540 stubCall.addArgument(JIT::Imm32(registerOffset));
541 stubCall.addArgument(JIT::Imm32(argCount));
542 stubCall.call();
543 wasEval = branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue())));
544 }
545
546 emitGetVirtualRegister(callee, regT0);
547 // The arguments have been set up on the hot path for op_call_eval
548 if (opcodeID == op_call)
549 compileOpCallSetupArgs(instruction);
550 else if (opcodeID == op_construct)
551 compileOpConstructSetupArgs(instruction);
552
553 // Check for JSFunctions.
554 emitJumpSlowCaseIfNotJSCell(regT0);
555 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr)));
556
557 // First, in the case of a construct, allocate the new object.
558 if (opcodeID == op_construct) {
559 JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
560 emitGetVirtualRegister(callee, regT0);
561 }
562
563 // Speculatively roll the callframe, assuming argCount will match the arity.
564 storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
565 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
566 move(Imm32(argCount), regT1);
567
568 emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
569
570 if (opcodeID == op_call_eval)
571 wasEval.link(this);
572
573 // Put the return value in dst. In the interpreter, op_ret does this.
574 emitPutVirtualRegister(dst);
575
576 sampleCodeBlock(m_codeBlock);
577}
578
579void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned, OpcodeID opcodeID)
580{
581 int dst = instruction[1].u.operand;
582
583 linkSlowCase(iter);
584 linkSlowCase(iter);
585 JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
586 stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
587
588 sampleCodeBlock(m_codeBlock);
589}
590
591#else // !ENABLE(JIT_OPTIMIZE_CALL)
592
593/* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
594
595void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
596{
597 int dst = instruction[1].u.operand;
598 int callee = instruction[2].u.operand;
599 int argCount = instruction[3].u.operand;
600 int registerOffset = instruction[4].u.operand;
601
602 // Handle eval
603 Jump wasEval;
604 if (opcodeID == op_call_eval) {
605 JITStubCall stubCall(this, cti_op_call_eval);
606 stubCall.addArgument(callee, regT0);
607 stubCall.addArgument(JIT::Imm32(registerOffset));
608 stubCall.addArgument(JIT::Imm32(argCount));
609 stubCall.call();
610 wasEval = branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue())));
611 }
612
613 // This plants a check for a cached JSFunction value, so we can plant a fast link to the callee.
614 // This deliberately leaves the callee in ecx, used when setting up the stack frame below
615 emitGetVirtualRegister(callee, regT0);
616 DataLabelPtr addressOfLinkedFunctionCheck;
617
618 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
619
620 Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, ImmPtr(JSValue::encode(JSValue())));
621
622 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
623
624 addSlowCase(jumpToSlow);
625 ASSERT(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow) == patchOffsetOpCallCompareToJump);
626 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
627
628 // The following is the fast case, only used whan a callee can be linked.
629
630 // In the case of OpConstruct, call out to a cti_ function to create the new object.
631 if (opcodeID == op_construct) {
632 int proto = instruction[5].u.operand;
633 int thisRegister = instruction[6].u.operand;
634
635 emitPutJITStubArg(regT0, 0);
636 emitPutJITStubArgFromVirtualRegister(proto, 3, regT2);
637 JITStubCall stubCall(this, cti_op_construct_JSConstruct);
638 stubCall.call(thisRegister);
639 emitGetVirtualRegister(callee, regT0);
640 }
641
642 // Fast version of stack frame initialization, directly relative to edi.
643 // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
644 storePtr(ImmPtr(JSValue::encode(JSValue())), Address(callFrameRegister, (registerOffset + RegisterFile::OptionalCalleeArguments) * static_cast<int>(sizeof(Register))));
645 storePtr(regT0, Address(callFrameRegister, (registerOffset + RegisterFile::Callee) * static_cast<int>(sizeof(Register))));
646 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_data) + OBJECT_OFFSETOF(ScopeChain, m_node)), regT1); // newScopeChain
647 store32(Imm32(argCount), Address(callFrameRegister, (registerOffset + RegisterFile::ArgumentCount) * static_cast<int>(sizeof(Register))));
648 storePtr(callFrameRegister, Address(callFrameRegister, (registerOffset + RegisterFile::CallerFrame) * static_cast<int>(sizeof(Register))));
649 storePtr(regT1, Address(callFrameRegister, (registerOffset + RegisterFile::ScopeChain) * static_cast<int>(sizeof(Register))));
650 addPtr(Imm32(registerOffset * sizeof(Register)), callFrameRegister);
651
652 // Call to the callee
653 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
654
655 if (opcodeID == op_call_eval)
656 wasEval.link(this);
657
658 // Put the return value in dst. In the interpreter, op_ret does this.
659 emitPutVirtualRegister(dst);
660
661 sampleCodeBlock(m_codeBlock);
662}
663
664void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID)
665{
666 int dst = instruction[1].u.operand;
667 int callee = instruction[2].u.operand;
668 int argCount = instruction[3].u.operand;
669 int registerOffset = instruction[4].u.operand;
670
671 linkSlowCase(iter);
672
673 // The arguments have been set up on the hot path for op_call_eval
674 if (opcodeID == op_call)
675 compileOpCallSetupArgs(instruction);
676 else if (opcodeID == op_construct)
677 compileOpConstructSetupArgs(instruction);
678
679 // Fast check for JS function.
680 Jump callLinkFailNotObject = emitJumpIfNotJSCell(regT0);
681 Jump callLinkFailNotJSFunction = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr));
682
683 // First, in the case of a construct, allocate the new object.
684 if (opcodeID == op_construct) {
685 JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
686 emitGetVirtualRegister(callee, regT0);
687 }
688
689 // Speculatively roll the callframe, assuming argCount will match the arity.
690 storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
691 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
692 move(Imm32(argCount), regT1);
693
694 move(regT0, regT2);
695
696 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_globalData->jitStubs.ctiVirtualCallLink());
697
698 // Put the return value in dst.
699 emitPutVirtualRegister(dst);
700 sampleCodeBlock(m_codeBlock);
701
702 // If not, we need an extra case in the if below!
703 ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval));
704
705 // Done! - return back to the hot path.
706 if (opcodeID == op_construct)
707 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_construct));
708 else
709 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call));
710
711 // This handles host functions
712 callLinkFailNotObject.link(this);
713 callLinkFailNotJSFunction.link(this);
714 JITStubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction).call();
715
716 emitPutVirtualRegister(dst);
717 sampleCodeBlock(m_codeBlock);
718}
719
720/* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
721
722#endif // !ENABLE(JIT_OPTIMIZE_CALL)
723
724#endif // USE(JSVALUE32_64)
725
726} // namespace JSC
727
728#endif // ENABLE(JIT)
Note: See TracBrowser for help on using the repository browser.