source: webkit/trunk/JavaScriptCore/jit/JITOpcodes.cpp@ 44843

Last change on this file since 44843 was 44838, checked in by [email protected], 16 years ago

2009-06-18 Gavin Barraclough <[email protected]>

Rubber Stamped by Mark Rowe (originally reviewed by Sam Weinig).

(Reintroducing patch added in r44492, and reverted in r44796.)

Change the implementation of op_throw so the stub function always modifies its
return address - if it doesn't find a 'catch' it will switch to a trampoline
to force a return from JIT execution. This saves memory, by avoiding the need
for a unique return for every op_throw.

  • jit/JITOpcodes.cpp: (JSC::JIT::emit_op_throw):

JITStubs::cti_op_throw now always changes its return address,
remove return code generated after the stub call (this is now
handled by ctiOpThrowNotCaught).

  • jit/JITStubs.cpp: (JSC::):

Add ctiOpThrowNotCaught definitions.

(JSC::JITStubs::DEFINE_STUB_FUNCTION):

Change cti_op_throw to always change its return address.

  • jit/JITStubs.h:

Add ctiOpThrowNotCaught declaration.

File size: 44.4 KB
Line 
1/*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "JIT.h"
28
29#if ENABLE(JIT)
30
31#include "JITInlineMethods.h"
32#include "JITStubCall.h"
33#include "JSArray.h"
34#include "JSCell.h"
35
36namespace JSC {
37
38#define RECORD_JUMP_TARGET(targetOffset) \
39 do { m_labels[m_bytecodeIndex + (targetOffset)].used(); } while (false)
40
41void JIT::emit_op_mov(Instruction* currentInstruction)
42{
43 int dst = currentInstruction[1].u.operand;
44 int src = currentInstruction[2].u.operand;
45
46 if (m_codeBlock->isConstantRegisterIndex(src)) {
47 storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
48 if (dst == m_lastResultBytecodeRegister)
49 killLastResultRegister();
50 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
51 // If either the src or dst is the cached register go though
52 // get/put registers to make sure we track this correctly.
53 emitGetVirtualRegister(src, regT0);
54 emitPutVirtualRegister(dst);
55 } else {
56 // Perform the copy via regT1; do not disturb any mapping in regT0.
57 loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
58 storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
59 }
60}
61
62void JIT::emit_op_end(Instruction* currentInstruction)
63{
64 if (m_codeBlock->needsFullScopeChain())
65 JITStubCall(this, JITStubs::cti_op_end).call();
66 ASSERT(returnValueRegister != callFrameRegister);
67 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
68 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
69 ret();
70}
71
72void JIT::emit_op_jmp(Instruction* currentInstruction)
73{
74 unsigned target = currentInstruction[1].u.operand;
75 addJump(jump(), target + 1);
76 RECORD_JUMP_TARGET(target + 1);
77}
78
79void JIT::emit_op_loop(Instruction* currentInstruction)
80{
81 emitTimeoutCheck();
82
83 unsigned target = currentInstruction[1].u.operand;
84 addJump(jump(), target + 1);
85}
86
87void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
88{
89 emitTimeoutCheck();
90
91 unsigned op1 = currentInstruction[1].u.operand;
92 unsigned op2 = currentInstruction[2].u.operand;
93 unsigned target = currentInstruction[3].u.operand;
94 if (isOperandConstantImmediateInt(op2)) {
95 emitGetVirtualRegister(op1, regT0);
96 emitJumpSlowCaseIfNotImmediateInteger(regT0);
97#if USE(ALTERNATE_JSIMMEDIATE)
98 int32_t op2imm = getConstantOperandImmediateInt(op2);
99#else
100 int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
101#endif
102 addJump(branch32(LessThan, regT0, Imm32(op2imm)), target + 3);
103 } else if (isOperandConstantImmediateInt(op1)) {
104 emitGetVirtualRegister(op2, regT0);
105 emitJumpSlowCaseIfNotImmediateInteger(regT0);
106#if USE(ALTERNATE_JSIMMEDIATE)
107 int32_t op1imm = getConstantOperandImmediateInt(op1);
108#else
109 int32_t op1imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op1)));
110#endif
111 addJump(branch32(GreaterThan, regT0, Imm32(op1imm)), target + 3);
112 } else {
113 emitGetVirtualRegisters(op1, regT0, op2, regT1);
114 emitJumpSlowCaseIfNotImmediateInteger(regT0);
115 emitJumpSlowCaseIfNotImmediateInteger(regT1);
116 addJump(branch32(LessThan, regT0, regT1), target + 3);
117 }
118}
119
120void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
121{
122 emitTimeoutCheck();
123
124 unsigned op1 = currentInstruction[1].u.operand;
125 unsigned op2 = currentInstruction[2].u.operand;
126 unsigned target = currentInstruction[3].u.operand;
127 if (isOperandConstantImmediateInt(op2)) {
128 emitGetVirtualRegister(op1, regT0);
129 emitJumpSlowCaseIfNotImmediateInteger(regT0);
130#if USE(ALTERNATE_JSIMMEDIATE)
131 int32_t op2imm = getConstantOperandImmediateInt(op2);
132#else
133 int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
134#endif
135 addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target + 3);
136 } else {
137 emitGetVirtualRegisters(op1, regT0, op2, regT1);
138 emitJumpSlowCaseIfNotImmediateInteger(regT0);
139 emitJumpSlowCaseIfNotImmediateInteger(regT1);
140 addJump(branch32(LessThanOrEqual, regT0, regT1), target + 3);
141 }
142}
143
144void JIT::emit_op_new_object(Instruction* currentInstruction)
145{
146 JITStubCall(this, JITStubs::cti_op_new_object).call(currentInstruction[1].u.operand);
147}
148
149void JIT::emit_op_instanceof(Instruction* currentInstruction)
150{
151 // Load the operands (baseVal, proto, and value respectively) into registers.
152 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
153 emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
154 emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
155 emitGetVirtualRegister(currentInstruction[2].u.operand, regT2);
156
157 // Check that baseVal & proto are cells.
158 emitJumpSlowCaseIfNotJSCell(regT0);
159 emitJumpSlowCaseIfNotJSCell(regT1);
160
161 // Check that baseVal is an object, that it 'ImplementsHasInstance' but that it does not 'OverridesHasInstance'.
162 loadPtr(Address(regT0, FIELD_OFFSET(JSCell, m_structure)), regT0);
163 addSlowCase(branch32(NotEqual, Address(regT0, FIELD_OFFSET(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
164 addSlowCase(branchTest32(Zero, Address(regT0, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
165
166 // If value is not an Object, return false.
167 Jump valueIsImmediate = emitJumpIfNotJSCell(regT2);
168 loadPtr(Address(regT2, FIELD_OFFSET(JSCell, m_structure)), regT0);
169 Jump valueIsNotObject = branch32(NotEqual, Address(regT0, FIELD_OFFSET(Structure, m_typeInfo.m_type)), Imm32(ObjectType));
170
171 // Check proto is object.
172 loadPtr(Address(regT1, FIELD_OFFSET(JSCell, m_structure)), regT0);
173 addSlowCase(branch32(NotEqual, Address(regT0, FIELD_OFFSET(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
174
175 // Optimistically load the result true, and start looping.
176 // Initially, regT1 still contains proto and regT2 still contains value.
177 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
178 move(ImmPtr(JSValue::encode(jsBoolean(true))), regT0);
179 Label loop(this);
180
181 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
182 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
183 loadPtr(Address(regT2, FIELD_OFFSET(JSCell, m_structure)), regT2);
184 loadPtr(Address(regT2, FIELD_OFFSET(Structure, m_prototype)), regT2);
185 Jump isInstance = branchPtr(Equal, regT2, regT1);
186 branchPtr(NotEqual, regT2, ImmPtr(JSValue::encode(jsNull())), loop);
187
188 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
189 valueIsImmediate.link(this);
190 valueIsNotObject.link(this);
191 move(ImmPtr(JSValue::encode(jsBoolean(false))), regT0);
192
193 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
194 isInstance.link(this);
195 emitPutVirtualRegister(currentInstruction[1].u.operand);
196}
197
198void JIT::emit_op_new_func(Instruction* currentInstruction)
199{
200 JITStubCall stubCall(this, JITStubs::cti_op_new_func);
201 stubCall.addArgument(ImmPtr(m_codeBlock->function(currentInstruction[2].u.operand)));
202 stubCall.call(currentInstruction[1].u.operand);
203}
204
205void JIT::emit_op_call(Instruction* currentInstruction)
206{
207 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
208}
209
210void JIT::emit_op_call_eval(Instruction* currentInstruction)
211{
212 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
213}
214
215void JIT::emit_op_load_varargs(Instruction* currentInstruction)
216{
217 JITStubCall stubCall(this, JITStubs::cti_op_load_varargs);
218 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
219 stubCall.call(currentInstruction[1].u.operand);
220}
221
222void JIT::emit_op_call_varargs(Instruction* currentInstruction)
223{
224 compileOpCallVarargs(currentInstruction);
225}
226
227void JIT::emit_op_construct(Instruction* currentInstruction)
228{
229 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
230}
231
232void JIT::emit_op_get_global_var(Instruction* currentInstruction)
233{
234 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
235 move(ImmPtr(globalObject), regT0);
236 emitGetVariableObjectRegister(regT0, currentInstruction[3].u.operand, regT0);
237 emitPutVirtualRegister(currentInstruction[1].u.operand);
238}
239
240void JIT::emit_op_put_global_var(Instruction* currentInstruction)
241{
242 emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
243 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
244 move(ImmPtr(globalObject), regT0);
245 emitPutVariableObjectRegister(regT1, regT0, currentInstruction[2].u.operand);
246}
247
248void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
249{
250 int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
251
252 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
253 while (skip--)
254 loadPtr(Address(regT0, FIELD_OFFSET(ScopeChainNode, next)), regT0);
255
256 loadPtr(Address(regT0, FIELD_OFFSET(ScopeChainNode, object)), regT0);
257 emitGetVariableObjectRegister(regT0, currentInstruction[2].u.operand, regT0);
258 emitPutVirtualRegister(currentInstruction[1].u.operand);
259}
260
261void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
262{
263 int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
264
265 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
266 emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
267 while (skip--)
268 loadPtr(Address(regT1, FIELD_OFFSET(ScopeChainNode, next)), regT1);
269
270 loadPtr(Address(regT1, FIELD_OFFSET(ScopeChainNode, object)), regT1);
271 emitPutVariableObjectRegister(regT0, regT1, currentInstruction[1].u.operand);
272}
273
274void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
275{
276 JITStubCall stubCall(this, JITStubs::cti_op_tear_off_activation);
277 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
278 stubCall.call();
279}
280
281void JIT::emit_op_tear_off_arguments(Instruction*)
282{
283 JITStubCall(this, JITStubs::cti_op_tear_off_arguments).call();
284}
285
286void JIT::emit_op_ret(Instruction* currentInstruction)
287{
288 // We could JIT generate the deref, only calling out to C when the refcount hits zero.
289 if (m_codeBlock->needsFullScopeChain())
290 JITStubCall(this, JITStubs::cti_op_ret_scopeChain).call();
291
292 ASSERT(callFrameRegister != regT1);
293 ASSERT(regT1 != returnValueRegister);
294 ASSERT(returnValueRegister != callFrameRegister);
295
296 // Return the result in %eax.
297 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
298
299 // Grab the return address.
300 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
301
302 // Restore our caller's "r".
303 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
304
305 // Return.
306 restoreReturnAddressBeforeReturn(regT1);
307 ret();
308}
309
310void JIT::emit_op_new_array(Instruction* currentInstruction)
311{
312 JITStubCall stubCall(this, JITStubs::cti_op_new_array);
313 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
314 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
315 stubCall.call(currentInstruction[1].u.operand);
316}
317
318void JIT::emit_op_resolve(Instruction* currentInstruction)
319{
320 JITStubCall stubCall(this, JITStubs::cti_op_resolve);
321 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
322 stubCall.call(currentInstruction[1].u.operand);
323}
324
325void JIT::emit_op_construct_verify(Instruction* currentInstruction)
326{
327 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
328
329 emitJumpSlowCaseIfNotJSCell(regT0);
330 loadPtr(Address(regT0, FIELD_OFFSET(JSCell, m_structure)), regT2);
331 addSlowCase(branch32(NotEqual, Address(regT2, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type)), Imm32(ObjectType)));
332
333}
334
335void JIT::emit_op_to_primitive(Instruction* currentInstruction)
336{
337 int dst = currentInstruction[1].u.operand;
338 int src = currentInstruction[2].u.operand;
339
340 emitGetVirtualRegister(src, regT0);
341
342 Jump isImm = emitJumpIfNotJSCell(regT0);
343 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
344 isImm.link(this);
345
346 if (dst != src)
347 emitPutVirtualRegister(dst);
348
349}
350
351void JIT::emit_op_strcat(Instruction* currentInstruction)
352{
353 JITStubCall stubCall(this, JITStubs::cti_op_strcat);
354 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
355 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
356 stubCall.call(currentInstruction[1].u.operand);
357}
358
359void JIT::emit_op_resolve_func(Instruction* currentInstruction)
360{
361 JITStubCall stubCall(this, JITStubs::cti_op_resolve_func);
362 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
363 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
364 stubCall.call(currentInstruction[2].u.operand);
365}
366
367void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
368{
369 emitTimeoutCheck();
370
371 unsigned target = currentInstruction[2].u.operand;
372 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
373
374 Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0))));
375 addJump(emitJumpIfImmediateInteger(regT0), target + 2);
376
377 addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target + 2);
378 addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
379
380 isZero.link(this);
381};
382void JIT::emit_op_resolve_base(Instruction* currentInstruction)
383{
384 JITStubCall stubCall(this, JITStubs::cti_op_resolve_base);
385 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
386 stubCall.call(currentInstruction[1].u.operand);
387}
388
389void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
390{
391 JITStubCall stubCall(this, JITStubs::cti_op_resolve_skip);
392 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
393 stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
394 stubCall.call(currentInstruction[1].u.operand);
395}
396
397void JIT::emit_op_resolve_global(Instruction* currentInstruction)
398{
399 // Fast case
400 void* globalObject = currentInstruction[2].u.jsCell;
401 Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
402
403 unsigned currentIndex = m_globalResolveInfoIndex++;
404 void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
405 void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
406
407 // Check Structure of global object
408 move(ImmPtr(globalObject), regT0);
409 loadPtr(structureAddress, regT1);
410 Jump noMatch = branchPtr(NotEqual, regT1, Address(regT0, FIELD_OFFSET(JSCell, m_structure))); // Structures don't match
411
412 // Load cached property
413 // Assume that the global object always uses external storage.
414 loadPtr(Address(regT0, FIELD_OFFSET(JSGlobalObject, m_externalStorage)), regT0);
415 load32(offsetAddr, regT1);
416 loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
417 emitPutVirtualRegister(currentInstruction[1].u.operand);
418 Jump end = jump();
419
420 // Slow case
421 noMatch.link(this);
422 JITStubCall stubCall(this, JITStubs::cti_op_resolve_global);
423 stubCall.addArgument(ImmPtr(globalObject));
424 stubCall.addArgument(ImmPtr(ident));
425 stubCall.addArgument(Imm32(currentIndex));
426 stubCall.call(currentInstruction[1].u.operand);
427 end.link(this);
428}
429
430void JIT::emit_op_not(Instruction* currentInstruction)
431{
432 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
433 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
434 addSlowCase(branchTestPtr(NonZero, regT0, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
435 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), regT0);
436 emitPutVirtualRegister(currentInstruction[1].u.operand);
437}
438
439void JIT::emit_op_jfalse(Instruction* currentInstruction)
440{
441 unsigned target = currentInstruction[2].u.operand;
442 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
443
444 addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0)))), target + 2);
445 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
446
447 addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))), target + 2);
448 addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))));
449
450 isNonZero.link(this);
451 RECORD_JUMP_TARGET(target + 2);
452};
453void JIT::emit_op_jeq_null(Instruction* currentInstruction)
454{
455 unsigned src = currentInstruction[1].u.operand;
456 unsigned target = currentInstruction[2].u.operand;
457
458 emitGetVirtualRegister(src, regT0);
459 Jump isImmediate = emitJumpIfNotJSCell(regT0);
460
461 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
462 loadPtr(Address(regT0, FIELD_OFFSET(JSCell, m_structure)), regT2);
463 addJump(branchTest32(NonZero, Address(regT2, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
464 Jump wasNotImmediate = jump();
465
466 // Now handle the immediate cases - undefined & null
467 isImmediate.link(this);
468 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
469 addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNull()))), target + 2);
470
471 wasNotImmediate.link(this);
472 RECORD_JUMP_TARGET(target + 2);
473};
474void JIT::emit_op_jneq_null(Instruction* currentInstruction)
475{
476 unsigned src = currentInstruction[1].u.operand;
477 unsigned target = currentInstruction[2].u.operand;
478
479 emitGetVirtualRegister(src, regT0);
480 Jump isImmediate = emitJumpIfNotJSCell(regT0);
481
482 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
483 loadPtr(Address(regT0, FIELD_OFFSET(JSCell, m_structure)), regT2);
484 addJump(branchTest32(Zero, Address(regT2, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
485 Jump wasNotImmediate = jump();
486
487 // Now handle the immediate cases - undefined & null
488 isImmediate.link(this);
489 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
490 addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsNull()))), target + 2);
491
492 wasNotImmediate.link(this);
493 RECORD_JUMP_TARGET(target + 2);
494}
495
496void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
497{
498 unsigned src = currentInstruction[1].u.operand;
499 JSCell* ptr = currentInstruction[2].u.jsCell;
500 unsigned target = currentInstruction[3].u.operand;
501
502 emitGetVirtualRegister(src, regT0);
503 addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue(ptr)))), target + 3);
504
505 RECORD_JUMP_TARGET(target + 3);
506}
507
508void JIT::emit_op_unexpected_load(Instruction* currentInstruction)
509{
510 JSValue v = m_codeBlock->unexpectedConstant(currentInstruction[2].u.operand);
511 move(ImmPtr(JSValue::encode(v)), regT0);
512 emitPutVirtualRegister(currentInstruction[1].u.operand);
513}
514
515void JIT::emit_op_jsr(Instruction* currentInstruction)
516{
517 int retAddrDst = currentInstruction[1].u.operand;
518 int target = currentInstruction[2].u.operand;
519 DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
520 addJump(jump(), target + 2);
521 m_jsrSites.append(JSRInfo(storeLocation, label()));
522 killLastResultRegister();
523 RECORD_JUMP_TARGET(target + 2);
524}
525
526void JIT::emit_op_sret(Instruction* currentInstruction)
527{
528 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
529 killLastResultRegister();
530}
531
532void JIT::emit_op_eq(Instruction* currentInstruction)
533{
534 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
535 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
536 set32(Equal, regT1, regT0, regT0);
537 emitTagAsBoolImmediate(regT0);
538 emitPutVirtualRegister(currentInstruction[1].u.operand);
539}
540
541void JIT::emit_op_bitnot(Instruction* currentInstruction)
542{
543 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
544 emitJumpSlowCaseIfNotImmediateInteger(regT0);
545#if USE(ALTERNATE_JSIMMEDIATE)
546 not32(regT0);
547 emitFastArithIntToImmNoCheck(regT0, regT0);
548#else
549 xorPtr(Imm32(~JSImmediate::TagTypeNumber), regT0);
550#endif
551 emitPutVirtualRegister(currentInstruction[1].u.operand);
552}
553
554void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
555{
556 JITStubCall stubCall(this, JITStubs::cti_op_resolve_with_base);
557 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
558 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
559 stubCall.call(currentInstruction[2].u.operand);
560}
561
562void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
563{
564 JITStubCall stubCall(this, JITStubs::cti_op_new_func_exp);
565 stubCall.addArgument(ImmPtr(m_codeBlock->functionExpression(currentInstruction[2].u.operand)));
566 stubCall.call(currentInstruction[1].u.operand);
567}
568
569void JIT::emit_op_jtrue(Instruction* currentInstruction)
570{
571 unsigned target = currentInstruction[2].u.operand;
572 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
573
574 Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0))));
575 addJump(emitJumpIfImmediateInteger(regT0), target + 2);
576
577 addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target + 2);
578 addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
579
580 isZero.link(this);
581 RECORD_JUMP_TARGET(target + 2);
582}
583
584void JIT::emit_op_neq(Instruction* currentInstruction)
585{
586 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
587 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
588 set32(NotEqual, regT1, regT0, regT0);
589 emitTagAsBoolImmediate(regT0);
590
591 emitPutVirtualRegister(currentInstruction[1].u.operand);
592
593}
594
595void JIT::emit_op_bitxor(Instruction* currentInstruction)
596{
597 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
598 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
599 xorPtr(regT1, regT0);
600 emitFastArithReTagImmediate(regT0, regT0);
601 emitPutVirtualRegister(currentInstruction[1].u.operand);
602}
603
604void JIT::emit_op_new_regexp(Instruction* currentInstruction)
605{
606 JITStubCall stubCall(this, JITStubs::cti_op_new_regexp);
607 stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
608 stubCall.call(currentInstruction[1].u.operand);
609}
610
611void JIT::emit_op_bitor(Instruction* currentInstruction)
612{
613 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
614 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
615 orPtr(regT1, regT0);
616 emitPutVirtualRegister(currentInstruction[1].u.operand);
617}
618
619void JIT::emit_op_throw(Instruction* currentInstruction)
620{
621 JITStubCall stubCall(this, JITStubs::cti_op_throw);
622 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
623 stubCall.call();
624 ASSERT(regT0 == returnValueRegister);
625#ifndef NDEBUG
626 // cti_op_throw always changes it's return address,
627 // this point in the code should never be reached.
628 breakpoint();
629#endif
630}
631
632void JIT::emit_op_next_pname(Instruction* currentInstruction)
633{
634 JITStubCall stubCall(this, JITStubs::cti_op_next_pname);
635 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
636 stubCall.call();
637 Jump endOfIter = branchTestPtr(Zero, regT0);
638 emitPutVirtualRegister(currentInstruction[1].u.operand);
639 addJump(jump(), currentInstruction[3].u.operand + 3);
640 endOfIter.link(this);
641}
642
643void JIT::emit_op_push_scope(Instruction* currentInstruction)
644{
645 JITStubCall stubCall(this, JITStubs::cti_op_push_scope);
646 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
647 stubCall.call(currentInstruction[1].u.operand);
648}
649
650void JIT::emit_op_pop_scope(Instruction*)
651{
652 JITStubCall(this, JITStubs::cti_op_pop_scope).call();
653}
654
655void JIT::emit_op_stricteq(Instruction* currentInstruction)
656{
657 compileOpStrictEq(currentInstruction, OpStrictEq);
658}
659
660void JIT::emit_op_nstricteq(Instruction* currentInstruction)
661{
662 compileOpStrictEq(currentInstruction, OpNStrictEq);
663}
664
665void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
666{
667 int srcVReg = currentInstruction[2].u.operand;
668 emitGetVirtualRegister(srcVReg, regT0);
669
670 Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
671
672 emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
673 loadPtr(Address(regT0, FIELD_OFFSET(JSCell, m_structure)), regT2);
674 addSlowCase(branch32(NotEqual, Address(regT2, FIELD_OFFSET(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
675
676 wasImmediate.link(this);
677
678 emitPutVirtualRegister(currentInstruction[1].u.operand);
679}
680
681void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
682{
683 JITStubCall stubCall(this, JITStubs::cti_op_push_new_scope);
684 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
685 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
686 stubCall.call(currentInstruction[1].u.operand);
687}
688
689void JIT::emit_op_catch(Instruction* currentInstruction)
690{
691 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
692 peek(callFrameRegister, offsetof(struct JITStackFrame, callFrame) / sizeof (void*));
693 emitPutVirtualRegister(currentInstruction[1].u.operand);
694}
695
696void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
697{
698 JITStubCall stubCall(this, JITStubs::cti_op_jmp_scopes);
699 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
700 stubCall.call();
701 addJump(jump(), currentInstruction[2].u.operand + 2);
702 RECORD_JUMP_TARGET(currentInstruction[2].u.operand + 2);
703}
704
705void JIT::emit_op_switch_imm(Instruction* currentInstruction)
706{
707 unsigned tableIndex = currentInstruction[1].u.operand;
708 unsigned defaultOffset = currentInstruction[2].u.operand;
709 unsigned scrutinee = currentInstruction[3].u.operand;
710
711 // create jump table for switch destinations, track this switch statement.
712 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
713 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
714 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
715
716 JITStubCall stubCall(this, JITStubs::cti_op_switch_imm);
717 stubCall.addArgument(scrutinee, regT2);
718 stubCall.addArgument(Imm32(tableIndex));
719 stubCall.call();
720 jump(regT0);
721}
722
723void JIT::emit_op_switch_char(Instruction* currentInstruction)
724{
725 unsigned tableIndex = currentInstruction[1].u.operand;
726 unsigned defaultOffset = currentInstruction[2].u.operand;
727 unsigned scrutinee = currentInstruction[3].u.operand;
728
729 // create jump table for switch destinations, track this switch statement.
730 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
731 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
732 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
733
734 JITStubCall stubCall(this, JITStubs::cti_op_switch_char);
735 stubCall.addArgument(scrutinee, regT2);
736 stubCall.addArgument(Imm32(tableIndex));
737 stubCall.call();
738 jump(regT0);
739}
740
741void JIT::emit_op_switch_string(Instruction* currentInstruction)
742{
743 unsigned tableIndex = currentInstruction[1].u.operand;
744 unsigned defaultOffset = currentInstruction[2].u.operand;
745 unsigned scrutinee = currentInstruction[3].u.operand;
746
747 // create jump table for switch destinations, track this switch statement.
748 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
749 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
750
751 JITStubCall stubCall(this, JITStubs::cti_op_switch_string);
752 stubCall.addArgument(scrutinee, regT2);
753 stubCall.addArgument(Imm32(tableIndex));
754 stubCall.call();
755 jump(regT0);
756}
757
758void JIT::emit_op_new_error(Instruction* currentInstruction)
759{
760 JITStubCall stubCall(this, JITStubs::cti_op_new_error);
761 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
762 stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->unexpectedConstant(currentInstruction[3].u.operand))));
763 stubCall.addArgument(Imm32(m_bytecodeIndex));
764 stubCall.call(currentInstruction[1].u.operand);
765}
766
767void JIT::emit_op_debug(Instruction* currentInstruction)
768{
769 JITStubCall stubCall(this, JITStubs::cti_op_debug);
770 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
771 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
772 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
773 stubCall.call();
774}
775
776void JIT::emit_op_eq_null(Instruction* currentInstruction)
777{
778 unsigned dst = currentInstruction[1].u.operand;
779 unsigned src1 = currentInstruction[2].u.operand;
780
781 emitGetVirtualRegister(src1, regT0);
782 Jump isImmediate = emitJumpIfNotJSCell(regT0);
783
784 loadPtr(Address(regT0, FIELD_OFFSET(JSCell, m_structure)), regT2);
785 setTest32(NonZero, Address(regT2, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
786
787 Jump wasNotImmediate = jump();
788
789 isImmediate.link(this);
790
791 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
792 setPtr(Equal, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
793
794 wasNotImmediate.link(this);
795
796 emitTagAsBoolImmediate(regT0);
797 emitPutVirtualRegister(dst);
798
799}
800
801void JIT::emit_op_neq_null(Instruction* currentInstruction)
802{
803 unsigned dst = currentInstruction[1].u.operand;
804 unsigned src1 = currentInstruction[2].u.operand;
805
806 emitGetVirtualRegister(src1, regT0);
807 Jump isImmediate = emitJumpIfNotJSCell(regT0);
808
809 loadPtr(Address(regT0, FIELD_OFFSET(JSCell, m_structure)), regT2);
810 setTest32(Zero, Address(regT2, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
811
812 Jump wasNotImmediate = jump();
813
814 isImmediate.link(this);
815
816 andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
817 setPtr(NotEqual, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
818
819 wasNotImmediate.link(this);
820
821 emitTagAsBoolImmediate(regT0);
822 emitPutVirtualRegister(dst);
823
824}
825
826void JIT::emit_op_enter(Instruction*)
827{
828 // Even though CTI doesn't use them, we initialize our constant
829 // registers to zap stale pointers, to avoid unnecessarily prolonging
830 // object lifetime and increasing GC pressure.
831 size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
832 for (size_t j = 0; j < count; ++j)
833 emitInitRegister(j);
834
835}
836
837void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
838{
839 // Even though CTI doesn't use them, we initialize our constant
840 // registers to zap stale pointers, to avoid unnecessarily prolonging
841 // object lifetime and increasing GC pressure.
842 size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
843 for (size_t j = 0; j < count; ++j)
844 emitInitRegister(j);
845
846 JITStubCall(this, JITStubs::cti_op_push_activation).call(currentInstruction[1].u.operand);
847}
848
849void JIT::emit_op_create_arguments(Instruction*)
850{
851 Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
852 if (m_codeBlock->m_numParameters == 1)
853 JITStubCall(this, JITStubs::cti_op_create_arguments_no_params).call();
854 else
855 JITStubCall(this, JITStubs::cti_op_create_arguments).call();
856 argsCreated.link(this);
857}
858
859void JIT::emit_op_init_arguments(Instruction*)
860{
861 storePtr(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
862}
863
864void JIT::emit_op_convert_this(Instruction* currentInstruction)
865{
866 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
867
868 emitJumpSlowCaseIfNotJSCell(regT0);
869 loadPtr(Address(regT0, FIELD_OFFSET(JSCell, m_structure)), regT1);
870 addSlowCase(branchTest32(NonZero, Address(regT1, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
871
872}
873
874void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
875{
876 peek(regT1, FIELD_OFFSET(JITStackFrame, enabledProfilerReference) / sizeof (void*));
877 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
878
879 JITStubCall stubCall(this, JITStubs::cti_op_profile_will_call);
880 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
881 stubCall.call();
882 noProfiler.link(this);
883
884}
885
886void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
887{
888 peek(regT1, FIELD_OFFSET(JITStackFrame, enabledProfilerReference) / sizeof (void*));
889 Jump noProfiler = branchTestPtr(Zero, Address(regT1));
890
891 JITStubCall stubCall(this, JITStubs::cti_op_profile_did_call);
892 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
893 stubCall.call();
894 noProfiler.link(this);
895}
896
897
898// Slow cases
899
900void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
901{
902 linkSlowCase(iter);
903 linkSlowCase(iter);
904 JITStubCall stubCall(this, JITStubs::cti_op_convert_this);
905 stubCall.addArgument(regT0);
906 stubCall.call(currentInstruction[1].u.operand);
907}
908
909void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
910{
911 linkSlowCase(iter);
912 linkSlowCase(iter);
913 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
914 emitPutVirtualRegister(currentInstruction[1].u.operand);
915}
916
917void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
918{
919 linkSlowCase(iter);
920
921 JITStubCall stubCall(this, JITStubs::cti_op_to_primitive);
922 stubCall.addArgument(regT0);
923 stubCall.call(currentInstruction[1].u.operand);
924}
925
926void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
927{
928 // The slow void JIT::emitSlow_that handles accesses to arrays (below) may jump back up to here.
929 Label beginGetByValSlow(this);
930
931 Jump notImm = getSlowCase(iter);
932 linkSlowCase(iter);
933 linkSlowCase(iter);
934 emitFastArithIntToImmNoCheck(regT1, regT1);
935
936 notImm.link(this);
937 JITStubCall stubCall(this, JITStubs::cti_op_get_by_val);
938 stubCall.addArgument(regT0);
939 stubCall.addArgument(regT1);
940 stubCall.call(currentInstruction[1].u.operand);
941 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
942
943 // This is slow void JIT::emitSlow_that handles accesses to arrays above the fast cut-off.
944 // First, check if this is an access to the vector
945 linkSlowCase(iter);
946 branch32(AboveOrEqual, regT1, Address(regT2, FIELD_OFFSET(ArrayStorage, m_vectorLength)), beginGetByValSlow);
947
948 // okay, missed the fast region, but it is still in the vector. Get the value.
949 loadPtr(BaseIndex(regT2, regT1, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), regT2);
950 // Check whether the value loaded is zero; if so we need to return undefined.
951 branchTestPtr(Zero, regT2, beginGetByValSlow);
952 move(regT2, regT0);
953 emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
954}
955
956void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
957{
958 unsigned op1 = currentInstruction[1].u.operand;
959 unsigned op2 = currentInstruction[2].u.operand;
960 unsigned target = currentInstruction[3].u.operand;
961 if (isOperandConstantImmediateInt(op2)) {
962 linkSlowCase(iter);
963 JITStubCall stubCall(this, JITStubs::cti_op_loop_if_less);
964 stubCall.addArgument(regT0);
965 stubCall.addArgument(op2, regT2);
966 stubCall.call();
967 emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
968 } else if (isOperandConstantImmediateInt(op1)) {
969 linkSlowCase(iter);
970 JITStubCall stubCall(this, JITStubs::cti_op_loop_if_less);
971 stubCall.addArgument(op1, regT2);
972 stubCall.addArgument(regT0);
973 stubCall.call();
974 emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
975 } else {
976 linkSlowCase(iter);
977 linkSlowCase(iter);
978 JITStubCall stubCall(this, JITStubs::cti_op_loop_if_less);
979 stubCall.addArgument(regT0);
980 stubCall.addArgument(regT1);
981 stubCall.call();
982 emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
983 }
984}
985
986void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
987{
988 unsigned op2 = currentInstruction[2].u.operand;
989 unsigned target = currentInstruction[3].u.operand;
990 if (isOperandConstantImmediateInt(op2)) {
991 linkSlowCase(iter);
992 JITStubCall stubCall(this, JITStubs::cti_op_loop_if_lesseq);
993 stubCall.addArgument(regT0);
994 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
995 stubCall.call();
996 emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
997 } else {
998 linkSlowCase(iter);
999 linkSlowCase(iter);
1000 JITStubCall stubCall(this, JITStubs::cti_op_loop_if_lesseq);
1001 stubCall.addArgument(regT0);
1002 stubCall.addArgument(regT1);
1003 stubCall.call();
1004 emitJumpSlowToHot(branchTest32(NonZero, regT0), target + 3);
1005 }
1006}
1007
1008void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1009{
1010 // Normal slow cases - either is not an immediate imm, or is an array.
1011 Jump notImm = getSlowCase(iter);
1012 linkSlowCase(iter);
1013 linkSlowCase(iter);
1014 emitFastArithIntToImmNoCheck(regT1, regT1);
1015
1016 notImm.link(this); {
1017 JITStubCall stubCall(this, JITStubs::cti_op_put_by_val);
1018 stubCall.addArgument(regT0);
1019 stubCall.addArgument(regT1);
1020 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1021 stubCall.call();
1022 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_put_by_val));
1023 }
1024
1025 // slow cases for immediate int accesses to arrays
1026 linkSlowCase(iter);
1027 linkSlowCase(iter); {
1028 JITStubCall stubCall(this, JITStubs::cti_op_put_by_val_array);
1029 stubCall.addArgument(regT0);
1030 stubCall.addArgument(regT1);
1031 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1032 stubCall.call();
1033 }
1034}
1035
1036void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1037{
1038 linkSlowCase(iter);
1039 JITStubCall stubCall(this, JITStubs::cti_op_jtrue);
1040 stubCall.addArgument(regT0);
1041 stubCall.call();
1042 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand + 2);
1043}
1044
1045void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1046{
1047 linkSlowCase(iter);
1048 xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
1049 JITStubCall stubCall(this, JITStubs::cti_op_not);
1050 stubCall.addArgument(regT0);
1051 stubCall.call(currentInstruction[1].u.operand);
1052}
1053
1054void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1055{
1056 linkSlowCase(iter);
1057 JITStubCall stubCall(this, JITStubs::cti_op_jtrue);
1058 stubCall.addArgument(regT0);
1059 stubCall.call();
1060 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand + 2); // inverted!
1061}
1062
1063void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1064{
1065 linkSlowCase(iter);
1066 JITStubCall stubCall(this, JITStubs::cti_op_bitnot);
1067 stubCall.addArgument(regT0);
1068 stubCall.call(currentInstruction[1].u.operand);
1069}
1070
1071void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1072{
1073 linkSlowCase(iter);
1074 JITStubCall stubCall(this, JITStubs::cti_op_jtrue);
1075 stubCall.addArgument(regT0);
1076 stubCall.call();
1077 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand + 2);
1078}
1079
1080void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1081{
1082 linkSlowCase(iter);
1083 JITStubCall stubCall(this, JITStubs::cti_op_bitxor);
1084 stubCall.addArgument(regT0);
1085 stubCall.addArgument(regT1);
1086 stubCall.call(currentInstruction[1].u.operand);
1087}
1088
1089void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1090{
1091 linkSlowCase(iter);
1092 JITStubCall stubCall(this, JITStubs::cti_op_bitor);
1093 stubCall.addArgument(regT0);
1094 stubCall.addArgument(regT1);
1095 stubCall.call(currentInstruction[1].u.operand);
1096}
1097
1098void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1099{
1100 linkSlowCase(iter);
1101 JITStubCall stubCall(this, JITStubs::cti_op_eq);
1102 stubCall.addArgument(regT0);
1103 stubCall.addArgument(regT1);
1104 stubCall.call(currentInstruction[1].u.operand);
1105}
1106
1107void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1108{
1109 linkSlowCase(iter);
1110 JITStubCall stubCall(this, JITStubs::cti_op_neq);
1111 stubCall.addArgument(regT0);
1112 stubCall.addArgument(regT1);
1113 stubCall.call(currentInstruction[1].u.operand);
1114}
1115
1116void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1117{
1118 linkSlowCase(iter);
1119 linkSlowCase(iter);
1120 JITStubCall stubCall(this, JITStubs::cti_op_stricteq);
1121 stubCall.addArgument(regT0);
1122 stubCall.addArgument(regT1);
1123 stubCall.call(currentInstruction[1].u.operand);
1124}
1125
1126void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1127{
1128 linkSlowCase(iter);
1129 linkSlowCase(iter);
1130 JITStubCall stubCall(this, JITStubs::cti_op_nstricteq);
1131 stubCall.addArgument(regT0);
1132 stubCall.addArgument(regT1);
1133 stubCall.call(currentInstruction[1].u.operand);
1134}
1135
1136void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1137{
1138 linkSlowCase(iter);
1139 linkSlowCase(iter);
1140 linkSlowCase(iter);
1141 linkSlowCase(iter);
1142 linkSlowCase(iter);
1143 JITStubCall stubCall(this, JITStubs::cti_op_instanceof);
1144 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1145 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1146 stubCall.addArgument(currentInstruction[4].u.operand, regT2);
1147 stubCall.call(currentInstruction[1].u.operand);
1148}
1149
1150void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1151{
1152 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
1153}
1154
1155void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1156{
1157 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
1158}
1159
1160void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1161{
1162 compileOpCallVarargsSlowCase(currentInstruction, iter);
1163}
1164
1165void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1166{
1167 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
1168}
1169
1170void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1171{
1172 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1173 linkSlowCase(iter);
1174
1175 JITStubCall stubCall(this, JITStubs::cti_op_to_jsnumber);
1176 stubCall.addArgument(regT0);
1177 stubCall.call(currentInstruction[1].u.operand);
1178}
1179
1180
1181} // namespace JSC
1182
1183#endif // ENABLE(JIT)
Note: See TracBrowser for help on using the repository browser.