Ignore:
Timestamp:
May 8, 2010, 3:59:03 PM (15 years ago)
Author:
[email protected]
Message:

2010-05-08 Oliver Hunt <[email protected]>

Reviewed by Gavin Barraclough.

Split JSVALUE32_64 code out of JITOpcodes.cpp and into JITOpcodes32_64.cpp
https://bugs.webkit.org/show_bug.cgi?id=38808

  • GNUmakefile.am:
  • JavaScriptCore.gypi:
  • JavaScriptCore.pro:
  • JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.vcproj:
  • JavaScriptCore.xcodeproj/project.pbxproj:
  • jit/JITOpcodes.cpp:
  • jit/JITOpcodes32_64.cpp: Added. (JSC::JIT::privateCompileCTIMachineTrampolines): (JSC::JIT::emit_op_mov): (JSC::JIT::emit_op_end): (JSC::JIT::emit_op_jmp): (JSC::JIT::emit_op_loop_if_lesseq): (JSC::JIT::emitSlow_op_loop_if_lesseq): (JSC::JIT::emit_op_new_object): (JSC::JIT::emit_op_instanceof): (JSC::JIT::emitSlow_op_instanceof): (JSC::JIT::emit_op_new_func): (JSC::JIT::emit_op_get_global_var): (JSC::JIT::emit_op_put_global_var): (JSC::JIT::emit_op_get_scoped_var): (JSC::JIT::emit_op_put_scoped_var): (JSC::JIT::emit_op_tear_off_activation): (JSC::JIT::emit_op_tear_off_arguments): (JSC::JIT::emit_op_new_array): (JSC::JIT::emit_op_resolve): (JSC::JIT::emit_op_to_primitive): (JSC::JIT::emitSlow_op_to_primitive): (JSC::JIT::emit_op_strcat): (JSC::JIT::emit_op_resolve_base): (JSC::JIT::emit_op_resolve_skip): (JSC::JIT::emit_op_resolve_global): (JSC::JIT::emitSlow_op_resolve_global): (JSC::JIT::emit_op_not): (JSC::JIT::emitSlow_op_not): (JSC::JIT::emit_op_jfalse): (JSC::JIT::emitSlow_op_jfalse): (JSC::JIT::emit_op_jtrue): (JSC::JIT::emitSlow_op_jtrue): (JSC::JIT::emit_op_jeq_null): (JSC::JIT::emit_op_jneq_null): (JSC::JIT::emit_op_jneq_ptr): (JSC::JIT::emit_op_jsr): (JSC::JIT::emit_op_sret): (JSC::JIT::emit_op_eq): (JSC::JIT::emitSlow_op_eq): (JSC::JIT::emit_op_neq): (JSC::JIT::emitSlow_op_neq): (JSC::JIT::compileOpStrictEq): (JSC::JIT::emit_op_stricteq): (JSC::JIT::emitSlow_op_stricteq): (JSC::JIT::emit_op_nstricteq): (JSC::JIT::emitSlow_op_nstricteq): (JSC::JIT::emit_op_eq_null): (JSC::JIT::emit_op_neq_null): (JSC::JIT::emit_op_resolve_with_base): (JSC::JIT::emit_op_new_func_exp): (JSC::JIT::emit_op_throw): (JSC::JIT::emit_op_get_pnames): (JSC::JIT::emit_op_next_pname): (JSC::JIT::emit_op_push_scope): (JSC::JIT::emit_op_pop_scope): (JSC::JIT::emit_op_to_jsnumber): (JSC::JIT::emitSlow_op_to_jsnumber): (JSC::JIT::emit_op_push_new_scope): (JSC::JIT::emit_op_catch): (JSC::JIT::emit_op_jmp_scopes): (JSC::JIT::emit_op_switch_imm): (JSC::JIT::emit_op_switch_char): (JSC::JIT::emit_op_switch_string): (JSC::JIT::emit_op_new_error): (JSC::JIT::emit_op_debug): (JSC::JIT::emit_op_enter): (JSC::JIT::emit_op_enter_with_activation): (JSC::JIT::emit_op_create_arguments): (JSC::JIT::emit_op_init_arguments): (JSC::JIT::emit_op_convert_this): (JSC::JIT::emitSlow_op_convert_this): (JSC::JIT::emit_op_profile_will_call): (JSC::JIT::emit_op_profile_did_call):
File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/JavaScriptCore/jit/JITOpcodes.cpp

    r59005 r59040  
    4040namespace JSC {
    4141
    42 #if USE(JSVALUE32_64)
    43 
    44 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
    45 {
    46 #if ENABLE(JIT_OPTIMIZE_MOD)
    47     Label softModBegin = align();
    48     softModulo();
    49 #endif
    50 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    51     // (1) This function provides fast property access for string length
    52     Label stringLengthBegin = align();
    53    
    54     // regT0 holds payload, regT1 holds tag
    55    
    56     Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
    57     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
    58 
    59     // Checks out okay! - get the length from the Ustring.
    60     load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
    61 
    62     Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
    63     move(regT2, regT0);
    64     move(Imm32(JSValue::Int32Tag), regT1);
    65 
    66     ret();
    67 #endif
    68 
    69     // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
    70 
    71 #if ENABLE(JIT_OPTIMIZE_CALL)
    72     // VirtualCallLink Trampoline
    73     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
    74     Label virtualCallLinkBegin = align();
    75     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
    76 
    77     Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
    78 
    79     Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
    80     preserveReturnAddressAfterCall(regT3);
    81     restoreArgumentReference();
    82     Call callJSFunction2 = call();
    83     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
    84     emitGetJITStubArg(2, regT1); // argCount
    85     restoreReturnAddressBeforeReturn(regT3);
    86     hasCodeBlock2.link(this);
    87 
    88     // Check argCount matches callee arity.
    89     Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
    90     preserveReturnAddressAfterCall(regT3);
    91     emitPutJITStubArg(regT3, 1); // return address
    92     restoreArgumentReference();
    93     Call callArityCheck2 = call();
    94     move(regT1, callFrameRegister);
    95     emitGetJITStubArg(2, regT1); // argCount
    96     restoreReturnAddressBeforeReturn(regT3);
    97     arityCheckOkay2.link(this);
    98 
    99     isNativeFunc2.link(this);
    100 
    101     compileOpCallInitializeCallFrame();
    102 
    103     preserveReturnAddressAfterCall(regT3);
    104     emitPutJITStubArg(regT3, 1); // return address
    105     restoreArgumentReference();
    106     Call callLazyLinkCall = call();
    107     restoreReturnAddressBeforeReturn(regT3);
    108     jump(regT0);
    109 #endif // ENABLE(JIT_OPTIMIZE_CALL)
    110 
    111     // VirtualCall Trampoline
    112     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
    113     Label virtualCallBegin = align();
    114     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
    115 
    116     Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
    117 
    118     Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
    119     preserveReturnAddressAfterCall(regT3);
    120     restoreArgumentReference();
    121     Call callJSFunction1 = call();
    122     emitGetJITStubArg(2, regT1); // argCount
    123     restoreReturnAddressBeforeReturn(regT3);
    124     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
    125     hasCodeBlock3.link(this);
    126    
    127     // Check argCount matches callee arity.
    128     Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
    129     preserveReturnAddressAfterCall(regT3);
    130     emitPutJITStubArg(regT3, 1); // return address
    131     restoreArgumentReference();
    132     Call callArityCheck1 = call();
    133     move(regT1, callFrameRegister);
    134     emitGetJITStubArg(2, regT1); // argCount
    135     restoreReturnAddressBeforeReturn(regT3);
    136     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
    137     arityCheckOkay3.link(this);
    138 
    139     isNativeFunc3.link(this);
    140 
    141     compileOpCallInitializeCallFrame();
    142     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
    143     jump(regT0);
    144 
    145 #if CPU(X86) || CPU(ARM_TRADITIONAL)
    146     Label nativeCallThunk = align();
    147     preserveReturnAddressAfterCall(regT0);
    148     emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
    149 
    150     // Load caller frame's scope chain into this callframe so that whatever we call can
    151     // get to its global data.
    152     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
    153     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
    154     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
    155    
    156 #if CPU(X86)
    157     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
    158 
    159     /* We have two structs that we use to describe the stackframe we set up for our
    160      * call to native code.  NativeCallFrameStructure describes the how we set up the stack
    161      * in advance of the call.  NativeFunctionCalleeSignature describes the callframe
    162      * as the native code expects it.  We do this as we are using the fastcall calling
    163      * convention which results in the callee popping its arguments off the stack, but
    164      * not the rest of the callframe so we need a nice way to ensure we increment the
    165      * stack pointer by the right amount after the call.
    166      */
    167 
    168 #if COMPILER(MSVC) || OS(LINUX)
    169 #if COMPILER(MSVC)
    170 #pragma pack(push)
    171 #pragma pack(4)
    172 #endif // COMPILER(MSVC)
    173     struct NativeCallFrameStructure {
    174       //  CallFrame* callFrame; // passed in EDX
    175         JSObject* callee;
    176         JSValue thisValue;
    177         ArgList* argPointer;
    178         ArgList args;
    179         JSValue result;
    180     };
    181     struct NativeFunctionCalleeSignature {
    182         JSObject* callee;
    183         JSValue thisValue;
    184         ArgList* argPointer;
    185     };
    186 #if COMPILER(MSVC)
    187 #pragma pack(pop)
    188 #endif // COMPILER(MSVC)
    189 #else
    190     struct NativeCallFrameStructure {
    191       //  CallFrame* callFrame; // passed in ECX
    192       //  JSObject* callee; // passed in EDX
    193         JSValue thisValue;
    194         ArgList* argPointer;
    195         ArgList args;
    196     };
    197     struct NativeFunctionCalleeSignature {
    198         JSValue thisValue;
    199         ArgList* argPointer;
    200     };
    201 #endif
    202    
    203     const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
    204     // Allocate system stack frame
    205     subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
    206 
    207     // Set up arguments
    208     subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
    209 
    210     // push argcount
    211     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
    212    
    213     // Calculate the start of the callframe header, and store in regT1
    214     addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
    215    
    216     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
    217     mul32(Imm32(sizeof(Register)), regT0, regT0);
    218     subPtr(regT0, regT1);
    219     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
    220 
    221     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
    222     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
    223     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
    224 
    225     // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
    226     loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
    227     loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT3);
    228     storePtr(regT2, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
    229     storePtr(regT3, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
    230 
    231 #if COMPILER(MSVC) || OS(LINUX)
    232     // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
    233     addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
    234 
    235     // Plant callee
    236     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
    237     storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
    238 
    239     // Plant callframe
    240     move(callFrameRegister, X86Registers::edx);
    241 
    242     call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
    243 
    244     // JSValue is a non-POD type, so eax points to it
    245     emitLoad(0, regT1, regT0, X86Registers::eax);
    246 #else
    247     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx); // callee
    248     move(callFrameRegister, X86Registers::ecx); // callFrame
    249     call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
    250 #endif
    251 
    252     // We've put a few temporaries on the stack in addition to the actual arguments
    253     // so pull them off now
    254     addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
    255 
    256 #elif CPU(ARM_TRADITIONAL)
    257     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
    258 
    259     // Allocate stack space for our arglist
    260     COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0 && sizeof(JSValue) == 8 && sizeof(Register) == 8, ArgList_should_by_8byte_aligned);
    261     subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
    262 
    263     // Set up arguments
    264     subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
    265 
    266     // Push argcount
    267     storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
    268 
    269     // Calculate the start of the callframe header, and store in regT1
    270     move(callFrameRegister, regT1);
    271     sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
    272 
    273     // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
    274     mul32(Imm32(sizeof(Register)), regT0, regT0);
    275     subPtr(regT0, regT1);
    276 
    277     // push pointer to arguments
    278     storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
    279 
    280     // Argument passing method:
    281     // r0 - points to return value
    282     // r1 - callFrame
    283     // r2 - callee
    284     // stack: this(JSValue) and a pointer to ArgList
    285 
    286 #if OS(WINCE)
    287     // Setup arg4:
    288     push(stackPointerRegister);
    289 
    290     // Setup arg3:
    291     // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
    292     load32(Address(regT1, -(int32_t)sizeof(void*) * 2), ARMRegisters::r3);
    293     push(ARMRegisters::r3);
    294     load32(Address(regT1, -(int32_t)sizeof(void*)), regT3);
    295     storePtr(regT3, Address(stackPointerRegister));
    296 
    297     // Setup arg2:
    298     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT2);
    299 
    300     // Setup arg1:
    301     move(callFrameRegister, regT1);
    302 
    303     // Setup arg0:
    304     move(stackPointerRegister, regT0);
    305 
    306     call(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_data)));
    307 
    308     load32(Address(stackPointerRegister, 0), regT0);
    309     load32(Address(stackPointerRegister, 4), regT1);
    310 
    311     addPtr(Imm32(sizeof(ArgList) + 8), stackPointerRegister);
    312 #else // OS(WINCE)
    313     move(stackPointerRegister, regT3);
    314     subPtr(Imm32(8), stackPointerRegister);
    315     move(stackPointerRegister, regT0);
    316     subPtr(Imm32(8 + 4 + 4 /* padding */), stackPointerRegister);
    317 
    318     // Setup arg4:
    319     storePtr(regT3, Address(stackPointerRegister, 8));
    320 
    321     // Setup arg3:
    322     // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
    323     load32(Address(regT1, -(int32_t)sizeof(void*) * 2), regT3);
    324     storePtr(regT3, Address(stackPointerRegister, 0));
    325     load32(Address(regT1, -(int32_t)sizeof(void*)), regT3);
    326     storePtr(regT3, Address(stackPointerRegister, 4));
    327 
    328     // Setup arg2:
    329     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT2);
    330 
    331     // Setup arg1:
    332     move(callFrameRegister, regT1);
    333 
    334     call(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_data)));
    335 
    336     // Load return value
    337     load32(Address(stackPointerRegister, 16), regT0);
    338     load32(Address(stackPointerRegister, 20), regT1);
    339 
    340     addPtr(Imm32(sizeof(ArgList) + 16 + 8), stackPointerRegister);
    341 #endif // OS(WINCE)
    342 
    343 #endif
    344 
    345     // Check for an exception
    346     move(ImmPtr(&globalData->exception), regT2);
    347     Jump sawException = branch32(NotEqual, tagFor(0, regT2), Imm32(JSValue::EmptyValueTag));
    348 
    349     // Grab the return address.
    350     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT3);
    351    
    352     // Restore our caller's "r".
    353     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
    354    
    355     // Return.
    356     restoreReturnAddressBeforeReturn(regT3);
    357     ret();
    358 
    359     // Handle an exception
    360     sawException.link(this);
    361     // Grab the return address.
    362     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
    363     move(ImmPtr(&globalData->exceptionLocation), regT2);
    364     storePtr(regT1, regT2);
    365     move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT2);
    366     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
    367     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
    368     restoreReturnAddressBeforeReturn(regT2);
    369     ret();
    370 
    371 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
    372 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
    373 #else
    374     breakpoint();
    375 #endif
    376    
    377 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    378     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
    379     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
    380     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
    381 #endif
    382 
    383     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
    384     LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
    385 
    386 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    387     patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
    388     patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
    389     patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
    390 #endif
    391     patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
    392     patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
    393 #if ENABLE(JIT_OPTIMIZE_CALL)
    394     patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
    395     patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
    396     patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
    397 #endif
    398 
    399     CodeRef finalCode = patchBuffer.finalizeCode();
    400     *executablePool = finalCode.m_executablePool;
    401 
    402     trampolines->ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
    403     trampolines->ctiNativeCallThunk = adoptRef(new NativeExecutable(JITCode(JITCode::HostFunction(trampolineAt(finalCode, nativeCallThunk)))));
    404 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
    405     trampolines->ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
    406 #else
    407     UNUSED_PARAM(ctiStringLengthTrampoline);
    408 #endif
    409 #if ENABLE(JIT_OPTIMIZE_CALL)
    410     trampolines->ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
    411 #else
    412     UNUSED_PARAM(ctiVirtualCallLink);
    413 #endif
    414 #if ENABLE(JIT_OPTIMIZE_MOD)
    415     trampolines->ctiSoftModulo = trampolineAt(finalCode, softModBegin);
    416 #endif
    417 }
    418 
    419 void JIT::emit_op_mov(Instruction* currentInstruction)
    420 {
    421     unsigned dst = currentInstruction[1].u.operand;
    422     unsigned src = currentInstruction[2].u.operand;
    423 
    424     if (m_codeBlock->isConstantRegisterIndex(src))
    425         emitStore(dst, getConstantOperand(src));
    426     else {
    427         emitLoad(src, regT1, regT0);
    428         emitStore(dst, regT1, regT0);
    429         map(m_bytecodeIndex + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
    430     }
    431 }
    432 
    433 void JIT::emit_op_end(Instruction* currentInstruction)
    434 {
    435     if (m_codeBlock->needsFullScopeChain())
    436         JITStubCall(this, cti_op_end).call();
    437     ASSERT(returnValueRegister != callFrameRegister);
    438     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
    439     restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
    440     ret();
    441 }
    442 
    443 void JIT::emit_op_jmp(Instruction* currentInstruction)
    444 {
    445     unsigned target = currentInstruction[1].u.operand;
    446     addJump(jump(), target);
    447 }
    448 
    449 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
    450 {
    451     unsigned op1 = currentInstruction[1].u.operand;
    452     unsigned op2 = currentInstruction[2].u.operand;
    453     unsigned target = currentInstruction[3].u.operand;
    454 
    455     emitTimeoutCheck();
    456 
    457     if (isOperandConstantImmediateInt(op1)) {
    458         emitLoad(op2, regT1, regT0);
    459         addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
    460         addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target);
    461         return;
    462     }
    463 
    464     if (isOperandConstantImmediateInt(op2)) {
    465         emitLoad(op1, regT1, regT0);
    466         addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
    467         addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
    468         return;
    469     }
    470 
    471     emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
    472     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
    473     addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
    474     addJump(branch32(LessThanOrEqual, regT0, regT2), target);
    475 }
    476 
    477 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    478 {
    479     unsigned op1 = currentInstruction[1].u.operand;
    480     unsigned op2 = currentInstruction[2].u.operand;
    481     unsigned target = currentInstruction[3].u.operand;
    482 
    483     if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
    484         linkSlowCase(iter); // int32 check
    485     linkSlowCase(iter); // int32 check
    486 
    487     JITStubCall stubCall(this, cti_op_loop_if_lesseq);
    488     stubCall.addArgument(op1);
    489     stubCall.addArgument(op2);
    490     stubCall.call();
    491     emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
    492 }
    493 
    494 void JIT::emit_op_new_object(Instruction* currentInstruction)
    495 {
    496     JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
    497 }
    498 
    499 void JIT::emit_op_instanceof(Instruction* currentInstruction)
    500 {
    501     unsigned dst = currentInstruction[1].u.operand;
    502     unsigned value = currentInstruction[2].u.operand;
    503     unsigned baseVal = currentInstruction[3].u.operand;
    504     unsigned proto = currentInstruction[4].u.operand;
    505 
    506     // Load the operands into registers.
    507     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
    508     emitLoadPayload(value, regT2);
    509     emitLoadPayload(baseVal, regT0);
    510     emitLoadPayload(proto, regT1);
    511 
    512     // Check that value, baseVal, and proto are cells.
    513     emitJumpSlowCaseIfNotJSCell(value);
    514     emitJumpSlowCaseIfNotJSCell(baseVal);
    515     emitJumpSlowCaseIfNotJSCell(proto);
    516 
    517     // Check that baseVal 'ImplementsDefaultHasInstance'.
    518     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
    519     addSlowCase(branchTest8(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
    520 
    521     // Optimistically load the result true, and start looping.
    522     // Initially, regT1 still contains proto and regT2 still contains value.
    523     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
    524     move(Imm32(JSValue::TrueTag), regT0);
    525     Label loop(this);
    526 
    527     // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
    528     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
    529     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
    530     load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
    531     Jump isInstance = branchPtr(Equal, regT2, regT1);
    532     branchTest32(NonZero, regT2).linkTo(loop, this);
    533 
    534     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
    535     move(Imm32(JSValue::FalseTag), regT0);
    536 
    537     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
    538     isInstance.link(this);
    539     emitStoreBool(dst, regT0);
    540 }
    541 
    542 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    543 {
    544     unsigned dst = currentInstruction[1].u.operand;
    545     unsigned value = currentInstruction[2].u.operand;
    546     unsigned baseVal = currentInstruction[3].u.operand;
    547     unsigned proto = currentInstruction[4].u.operand;
    548 
    549     linkSlowCaseIfNotJSCell(iter, value);
    550     linkSlowCaseIfNotJSCell(iter, baseVal);
    551     linkSlowCaseIfNotJSCell(iter, proto);
    552     linkSlowCase(iter);
    553 
    554     JITStubCall stubCall(this, cti_op_instanceof);
    555     stubCall.addArgument(value);
    556     stubCall.addArgument(baseVal);
    557     stubCall.addArgument(proto);
    558     stubCall.call(dst);
    559 }
    560 
    561 void JIT::emit_op_new_func(Instruction* currentInstruction)
    562 {
    563     JITStubCall stubCall(this, cti_op_new_func);
    564     stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
    565     stubCall.call(currentInstruction[1].u.operand);
    566 }
    567 
    568 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
    569 {
    570     int dst = currentInstruction[1].u.operand;
    571     JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[2].u.jsCell);
    572     ASSERT(globalObject->isGlobalObject());
    573     int index = currentInstruction[3].u.operand;
    574 
    575     loadPtr(&globalObject->d()->registers, regT2);
    576 
    577     emitLoad(index, regT1, regT0, regT2);
    578     emitStore(dst, regT1, regT0);
    579     map(m_bytecodeIndex + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
    580 }
    581 
    582 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
    583 {
    584     JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[1].u.jsCell);
    585     ASSERT(globalObject->isGlobalObject());
    586     int index = currentInstruction[2].u.operand;
    587     int value = currentInstruction[3].u.operand;
    588 
    589     emitLoad(value, regT1, regT0);
    590 
    591     loadPtr(&globalObject->d()->registers, regT2);
    592     emitStore(index, regT1, regT0, regT2);
    593     map(m_bytecodeIndex + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
    594 }
    595 
    596 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
    597 {
    598     int dst = currentInstruction[1].u.operand;
    599     int index = currentInstruction[2].u.operand;
    600     int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
    601 
    602     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
    603     while (skip--)
    604         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
    605 
    606     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
    607     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
    608     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
    609 
    610     emitLoad(index, regT1, regT0, regT2);
    611     emitStore(dst, regT1, regT0);
    612     map(m_bytecodeIndex + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
    613 }
    614 
    615 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
    616 {
    617     int index = currentInstruction[1].u.operand;
    618     int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
    619     int value = currentInstruction[3].u.operand;
    620 
    621     emitLoad(value, regT1, regT0);
    622 
    623     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
    624     while (skip--)
    625         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
    626 
    627     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
    628     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
    629     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
    630 
    631     emitStore(index, regT1, regT0, regT2);
    632     map(m_bytecodeIndex + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
    633 }
    634 
    635 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
    636 {
    637     JITStubCall stubCall(this, cti_op_tear_off_activation);
    638     stubCall.addArgument(currentInstruction[1].u.operand);
    639     stubCall.call();
    640 }
    641 
    642 void JIT::emit_op_tear_off_arguments(Instruction*)
    643 {
    644     JITStubCall(this, cti_op_tear_off_arguments).call();
    645 }
    646 
    647 void JIT::emit_op_new_array(Instruction* currentInstruction)
    648 {
    649     JITStubCall stubCall(this, cti_op_new_array);
    650     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
    651     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
    652     stubCall.call(currentInstruction[1].u.operand);
    653 }
    654 
    655 void JIT::emit_op_resolve(Instruction* currentInstruction)
    656 {
    657     JITStubCall stubCall(this, cti_op_resolve);
    658     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
    659     stubCall.call(currentInstruction[1].u.operand);
    660 }
    661 
    662 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
    663 {
    664     int dst = currentInstruction[1].u.operand;
    665     int src = currentInstruction[2].u.operand;
    666 
    667     emitLoad(src, regT1, regT0);
    668 
    669     Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
    670     addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
    671     isImm.link(this);
    672 
    673     if (dst != src)
    674         emitStore(dst, regT1, regT0);
    675     map(m_bytecodeIndex + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
    676 }
    677 
    678 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    679 {
    680     int dst = currentInstruction[1].u.operand;
    681 
    682     linkSlowCase(iter);
    683 
    684     JITStubCall stubCall(this, cti_op_to_primitive);
    685     stubCall.addArgument(regT1, regT0);
    686     stubCall.call(dst);
    687 }
    688 
    689 void JIT::emit_op_strcat(Instruction* currentInstruction)
    690 {
    691     JITStubCall stubCall(this, cti_op_strcat);
    692     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
    693     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
    694     stubCall.call(currentInstruction[1].u.operand);
    695 }
    696 
    697 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
    698 {
    699     JITStubCall stubCall(this, cti_op_resolve_base);
    700     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
    701     stubCall.call(currentInstruction[1].u.operand);
    702 }
    703 
    704 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
    705 {
    706     JITStubCall stubCall(this, cti_op_resolve_skip);
    707     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
    708     stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
    709     stubCall.call(currentInstruction[1].u.operand);
    710 }
    711 
    712 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
    713 {
    714     // FIXME: Optimize to use patching instead of so many memory accesses.
    715 
    716     unsigned dst = currentInstruction[1].u.operand;
    717     void* globalObject = currentInstruction[2].u.jsCell;
    718    
    719     unsigned currentIndex = m_globalResolveInfoIndex++;
    720     void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
    721     void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
    722 
    723     // Verify structure.
    724     move(ImmPtr(globalObject), regT0);
    725     loadPtr(structureAddress, regT1);
    726     addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
    727 
    728     // Load property.
    729     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
    730     load32(offsetAddr, regT3);
    731     load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
    732     load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
    733     emitStore(dst, regT1, regT0);
    734     map(m_bytecodeIndex + dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
    735 }
    736 
    737 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    738 {
    739     unsigned dst = currentInstruction[1].u.operand;
    740     void* globalObject = currentInstruction[2].u.jsCell;
    741     Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
    742 
    743     unsigned currentIndex = m_globalResolveInfoIndex++;
    744 
    745     linkSlowCase(iter);
    746     JITStubCall stubCall(this, cti_op_resolve_global);
    747     stubCall.addArgument(ImmPtr(globalObject));
    748     stubCall.addArgument(ImmPtr(ident));
    749     stubCall.addArgument(Imm32(currentIndex));
    750     stubCall.call(dst);
    751 }
    752 
    753 void JIT::emit_op_not(Instruction* currentInstruction)
    754 {
    755     unsigned dst = currentInstruction[1].u.operand;
    756     unsigned src = currentInstruction[2].u.operand;
    757 
    758     emitLoadTag(src, regT0);
    759 
    760     xor32(Imm32(JSValue::FalseTag), regT0);
    761     addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
    762     xor32(Imm32(JSValue::TrueTag), regT0);
    763 
    764     emitStoreBool(dst, regT0, (dst == src));
    765 }
    766 
    767 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    768 {
    769     unsigned dst = currentInstruction[1].u.operand;
    770     unsigned src = currentInstruction[2].u.operand;
    771 
    772     linkSlowCase(iter);
    773 
    774     JITStubCall stubCall(this, cti_op_not);
    775     stubCall.addArgument(src);
    776     stubCall.call(dst);
    777 }
    778 
    779 void JIT::emit_op_jfalse(Instruction* currentInstruction)
    780 {
    781     unsigned cond = currentInstruction[1].u.operand;
    782     unsigned target = currentInstruction[2].u.operand;
    783 
    784     emitLoad(cond, regT1, regT0);
    785 
    786     Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
    787     addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target);
    788 
    789     Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
    790     Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
    791     addJump(jump(), target);
    792 
    793     if (supportsFloatingPoint()) {
    794         isNotInteger.link(this);
    795 
    796         addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
    797 
    798         zeroDouble(fpRegT0);
    799         emitLoadDouble(cond, fpRegT1);
    800         addJump(branchDouble(DoubleEqualOrUnordered, fpRegT0, fpRegT1), target);
    801     } else
    802         addSlowCase(isNotInteger);
    803 
    804     isTrue.link(this);
    805     isTrue2.link(this);
    806 }
    807 
    808 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    809 {
    810     unsigned cond = currentInstruction[1].u.operand;
    811     unsigned target = currentInstruction[2].u.operand;
    812 
    813     linkSlowCase(iter);
    814     JITStubCall stubCall(this, cti_op_jtrue);
    815     stubCall.addArgument(cond);
    816     stubCall.call();
    817     emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
    818 }
    819 
    820 void JIT::emit_op_jtrue(Instruction* currentInstruction)
    821 {
    822     unsigned cond = currentInstruction[1].u.operand;
    823     unsigned target = currentInstruction[2].u.operand;
    824 
    825     emitLoad(cond, regT1, regT0);
    826 
    827     Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
    828     addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target);
    829 
    830     Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
    831     Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
    832     addJump(jump(), target);
    833 
    834     if (supportsFloatingPoint()) {
    835         isNotInteger.link(this);
    836 
    837         addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
    838 
    839         zeroDouble(fpRegT0);
    840         emitLoadDouble(cond, fpRegT1);
    841         addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target);
    842     } else
    843         addSlowCase(isNotInteger);
    844 
    845     isFalse.link(this);
    846     isFalse2.link(this);
    847 }
    848 
    849 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    850 {
    851     unsigned cond = currentInstruction[1].u.operand;
    852     unsigned target = currentInstruction[2].u.operand;
    853 
    854     linkSlowCase(iter);
    855     JITStubCall stubCall(this, cti_op_jtrue);
    856     stubCall.addArgument(cond);
    857     stubCall.call();
    858     emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
    859 }
    860 
    861 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
    862 {
    863     unsigned src = currentInstruction[1].u.operand;
    864     unsigned target = currentInstruction[2].u.operand;
    865 
    866     emitLoad(src, regT1, regT0);
    867 
    868     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
    869 
    870     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
    871     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
    872     addJump(branchTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
    873 
    874     Jump wasNotImmediate = jump();
    875 
    876     // Now handle the immediate cases - undefined & null
    877     isImmediate.link(this);
    878 
    879     set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
    880     set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
    881     or32(regT2, regT1);
    882 
    883     addJump(branchTest32(NonZero, regT1), target);
    884 
    885     wasNotImmediate.link(this);
    886 }
    887 
    888 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
    889 {
    890     unsigned src = currentInstruction[1].u.operand;
    891     unsigned target = currentInstruction[2].u.operand;
    892 
    893     emitLoad(src, regT1, regT0);
    894 
    895     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
    896 
    897     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
    898     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
    899     addJump(branchTest8(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
    900 
    901     Jump wasNotImmediate = jump();
    902 
    903     // Now handle the immediate cases - undefined & null
    904     isImmediate.link(this);
    905 
    906     set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
    907     set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
    908     or32(regT2, regT1);
    909 
    910     addJump(branchTest32(Zero, regT1), target);
    911 
    912     wasNotImmediate.link(this);
    913 }
    914 
    915 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
    916 {
    917     unsigned src = currentInstruction[1].u.operand;
    918     JSCell* ptr = currentInstruction[2].u.jsCell;
    919     unsigned target = currentInstruction[3].u.operand;
    920 
    921     emitLoad(src, regT1, regT0);
    922     addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target);
    923     addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target);
    924 }
    925 
    926 void JIT::emit_op_jsr(Instruction* currentInstruction)
    927 {
    928     int retAddrDst = currentInstruction[1].u.operand;
    929     int target = currentInstruction[2].u.operand;
    930     DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
    931     addJump(jump(), target);
    932     m_jsrSites.append(JSRInfo(storeLocation, label()));
    933 }
    934 
    935 void JIT::emit_op_sret(Instruction* currentInstruction)
    936 {
    937     jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
    938 }
    939 
    940 void JIT::emit_op_eq(Instruction* currentInstruction)
    941 {
    942     unsigned dst = currentInstruction[1].u.operand;
    943     unsigned src1 = currentInstruction[2].u.operand;
    944     unsigned src2 = currentInstruction[3].u.operand;
    945    
    946     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
    947     addSlowCase(branch32(NotEqual, regT1, regT3));
    948     addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
    949     addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
    950 
    951     set8(Equal, regT0, regT2, regT0);
    952     or32(Imm32(JSValue::FalseTag), regT0);
    953 
    954     emitStoreBool(dst, regT0);
    955 }
    956 
    957 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    958 {
    959     unsigned dst = currentInstruction[1].u.operand;
    960     unsigned op1 = currentInstruction[2].u.operand;
    961     unsigned op2 = currentInstruction[3].u.operand;
    962    
    963     JumpList storeResult;
    964     JumpList genericCase;
    965    
    966     genericCase.append(getSlowCase(iter)); // tags not equal
    967 
    968     linkSlowCase(iter); // tags equal and JSCell
    969     genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
    970     genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
    971 
    972     // String case.
    973     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
    974     stubCallEqStrings.addArgument(regT0);
    975     stubCallEqStrings.addArgument(regT2);
    976     stubCallEqStrings.call();
    977     storeResult.append(jump());
    978 
    979     // Generic case.
    980     genericCase.append(getSlowCase(iter)); // doubles
    981     genericCase.link(this);
    982     JITStubCall stubCallEq(this, cti_op_eq);
    983     stubCallEq.addArgument(op1);
    984     stubCallEq.addArgument(op2);
    985     stubCallEq.call(regT0);
    986 
    987     storeResult.link(this);
    988     or32(Imm32(JSValue::FalseTag), regT0);
    989     emitStoreBool(dst, regT0);
    990 }
    991 
    992 void JIT::emit_op_neq(Instruction* currentInstruction)
    993 {
    994     unsigned dst = currentInstruction[1].u.operand;
    995     unsigned src1 = currentInstruction[2].u.operand;
    996     unsigned src2 = currentInstruction[3].u.operand;
    997    
    998     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
    999     addSlowCase(branch32(NotEqual, regT1, regT3));
    1000     addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
    1001     addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
    1002 
    1003     set8(NotEqual, regT0, regT2, regT0);
    1004     or32(Imm32(JSValue::FalseTag), regT0);
    1005 
    1006     emitStoreBool(dst, regT0);
    1007 }
    1008 
    1009 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    1010 {
    1011     unsigned dst = currentInstruction[1].u.operand;
    1012    
    1013     JumpList storeResult;
    1014     JumpList genericCase;
    1015    
    1016     genericCase.append(getSlowCase(iter)); // tags not equal
    1017 
    1018     linkSlowCase(iter); // tags equal and JSCell
    1019     genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
    1020     genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
    1021 
    1022     // String case.
    1023     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
    1024     stubCallEqStrings.addArgument(regT0);
    1025     stubCallEqStrings.addArgument(regT2);
    1026     stubCallEqStrings.call(regT0);
    1027     storeResult.append(jump());
    1028 
    1029     // Generic case.
    1030     genericCase.append(getSlowCase(iter)); // doubles
    1031     genericCase.link(this);
    1032     JITStubCall stubCallEq(this, cti_op_eq);
    1033     stubCallEq.addArgument(regT1, regT0);
    1034     stubCallEq.addArgument(regT3, regT2);
    1035     stubCallEq.call(regT0);
    1036 
    1037     storeResult.link(this);
    1038     xor32(Imm32(0x1), regT0);
    1039     or32(Imm32(JSValue::FalseTag), regT0);
    1040     emitStoreBool(dst, regT0);
    1041 }
    1042 
    1043 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
    1044 {
    1045     unsigned dst = currentInstruction[1].u.operand;
    1046     unsigned src1 = currentInstruction[2].u.operand;
    1047     unsigned src2 = currentInstruction[3].u.operand;
    1048 
    1049     emitLoadTag(src1, regT0);
    1050     emitLoadTag(src2, regT1);
    1051 
    1052     // Jump to a slow case if either operand is double, or if both operands are
    1053     // cells and/or Int32s.
    1054     move(regT0, regT2);
    1055     and32(regT1, regT2);
    1056     addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
    1057     addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
    1058 
    1059     if (type == OpStrictEq)
    1060         set8(Equal, regT0, regT1, regT0);
    1061     else
    1062         set8(NotEqual, regT0, regT1, regT0);
    1063 
    1064     or32(Imm32(JSValue::FalseTag), regT0);
    1065 
    1066     emitStoreBool(dst, regT0);
    1067 }
    1068 
    1069 void JIT::emit_op_stricteq(Instruction* currentInstruction)
    1070 {
    1071     compileOpStrictEq(currentInstruction, OpStrictEq);
    1072 }
    1073 
    1074 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    1075 {
    1076     unsigned dst = currentInstruction[1].u.operand;
    1077     unsigned src1 = currentInstruction[2].u.operand;
    1078     unsigned src2 = currentInstruction[3].u.operand;
    1079 
    1080     linkSlowCase(iter);
    1081     linkSlowCase(iter);
    1082 
    1083     JITStubCall stubCall(this, cti_op_stricteq);
    1084     stubCall.addArgument(src1);
    1085     stubCall.addArgument(src2);
    1086     stubCall.call(dst);
    1087 }
    1088 
    1089 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
    1090 {
    1091     compileOpStrictEq(currentInstruction, OpNStrictEq);
    1092 }
    1093 
    1094 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    1095 {
    1096     unsigned dst = currentInstruction[1].u.operand;
    1097     unsigned src1 = currentInstruction[2].u.operand;
    1098     unsigned src2 = currentInstruction[3].u.operand;
    1099 
    1100     linkSlowCase(iter);
    1101     linkSlowCase(iter);
    1102 
    1103     JITStubCall stubCall(this, cti_op_nstricteq);
    1104     stubCall.addArgument(src1);
    1105     stubCall.addArgument(src2);
    1106     stubCall.call(dst);
    1107 }
    1108 
    1109 void JIT::emit_op_eq_null(Instruction* currentInstruction)
    1110 {
    1111     unsigned dst = currentInstruction[1].u.operand;
    1112     unsigned src = currentInstruction[2].u.operand;
    1113 
    1114     emitLoad(src, regT1, regT0);
    1115     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
    1116 
    1117     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
    1118     setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
    1119 
    1120     Jump wasNotImmediate = jump();
    1121 
    1122     isImmediate.link(this);
    1123 
    1124     set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
    1125     set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
    1126     or32(regT2, regT1);
    1127 
    1128     wasNotImmediate.link(this);
    1129 
    1130     or32(Imm32(JSValue::FalseTag), regT1);
    1131 
    1132     emitStoreBool(dst, regT1);
    1133 }
    1134 
    1135 void JIT::emit_op_neq_null(Instruction* currentInstruction)
    1136 {
    1137     unsigned dst = currentInstruction[1].u.operand;
    1138     unsigned src = currentInstruction[2].u.operand;
    1139 
    1140     emitLoad(src, regT1, regT0);
    1141     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
    1142 
    1143     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
    1144     setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
    1145 
    1146     Jump wasNotImmediate = jump();
    1147 
    1148     isImmediate.link(this);
    1149 
    1150     set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
    1151     set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
    1152     and32(regT2, regT1);
    1153 
    1154     wasNotImmediate.link(this);
    1155 
    1156     or32(Imm32(JSValue::FalseTag), regT1);
    1157 
    1158     emitStoreBool(dst, regT1);
    1159 }
    1160 
    1161 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
    1162 {
    1163     JITStubCall stubCall(this, cti_op_resolve_with_base);
    1164     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
    1165     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
    1166     stubCall.call(currentInstruction[2].u.operand);
    1167 }
    1168 
    1169 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
    1170 {
    1171     JITStubCall stubCall(this, cti_op_new_func_exp);
    1172     stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
    1173     stubCall.call(currentInstruction[1].u.operand);
    1174 }
    1175 
    1176 void JIT::emit_op_throw(Instruction* currentInstruction)
    1177 {
    1178     unsigned exception = currentInstruction[1].u.operand;
    1179     JITStubCall stubCall(this, cti_op_throw);
    1180     stubCall.addArgument(exception);
    1181     stubCall.call();
    1182 
    1183 #ifndef NDEBUG
    1184     // cti_op_throw always changes it's return address,
    1185     // this point in the code should never be reached.
    1186     breakpoint();
    1187 #endif
    1188 }
    1189 
    1190 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
    1191 {
    1192     int dst = currentInstruction[1].u.operand;
    1193     int base = currentInstruction[2].u.operand;
    1194     int i = currentInstruction[3].u.operand;
    1195     int size = currentInstruction[4].u.operand;
    1196     int breakTarget = currentInstruction[5].u.operand;
    1197 
    1198     JumpList isNotObject;
    1199 
    1200     emitLoad(base, regT1, regT0);
    1201     if (!m_codeBlock->isKnownNotImmediate(base))
    1202         isNotObject.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
    1203     if (base != m_codeBlock->thisRegister()) {
    1204         loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
    1205         isNotObject.append(branch8(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
    1206     }
    1207 
    1208     // We could inline the case where you have a valid cache, but
    1209     // this call doesn't seem to be hot.
    1210     Label isObject(this);
    1211     JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
    1212     getPnamesStubCall.addArgument(regT0);
    1213     getPnamesStubCall.call(dst);
    1214     load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
    1215     store32(Imm32(0), addressFor(i));
    1216     store32(regT3, addressFor(size));
    1217     Jump end = jump();
    1218 
    1219     isNotObject.link(this);
    1220     addJump(branch32(Equal, regT1, Imm32(JSValue::NullTag)), breakTarget);
    1221     addJump(branch32(Equal, regT1, Imm32(JSValue::UndefinedTag)), breakTarget);
    1222     JITStubCall toObjectStubCall(this, cti_to_object);
    1223     toObjectStubCall.addArgument(regT1, regT0);
    1224     toObjectStubCall.call(base);
    1225     jump().linkTo(isObject, this);
    1226    
    1227     end.link(this);
    1228 }
    1229 
    1230 void JIT::emit_op_next_pname(Instruction* currentInstruction)
    1231 {
    1232     int dst = currentInstruction[1].u.operand;
    1233     int base = currentInstruction[2].u.operand;
    1234     int i = currentInstruction[3].u.operand;
    1235     int size = currentInstruction[4].u.operand;
    1236     int it = currentInstruction[5].u.operand;
    1237     int target = currentInstruction[6].u.operand;
    1238    
    1239     JumpList callHasProperty;
    1240 
    1241     Label begin(this);
    1242     load32(addressFor(i), regT0);
    1243     Jump end = branch32(Equal, regT0, addressFor(size));
    1244 
    1245     // Grab key @ i
    1246     loadPtr(addressFor(it), regT1);
    1247     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
    1248     load32(BaseIndex(regT2, regT0, TimesEight), regT2);
    1249     store32(Imm32(JSValue::CellTag), tagFor(dst));
    1250     store32(regT2, payloadFor(dst));
    1251 
    1252     // Increment i
    1253     add32(Imm32(1), regT0);
    1254     store32(regT0, addressFor(i));
    1255 
    1256     // Verify that i is valid:
    1257     loadPtr(addressFor(base), regT0);
    1258 
    1259     // Test base's structure
    1260     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
    1261     callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
    1262 
    1263     // Test base's prototype chain
    1264     loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
    1265     loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
    1266     addJump(branchTestPtr(Zero, Address(regT3)), target);
    1267 
    1268     Label checkPrototype(this);
    1269     callHasProperty.append(branch32(Equal, Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), Imm32(JSValue::NullTag)));
    1270     loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
    1271     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
    1272     callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
    1273     addPtr(Imm32(sizeof(Structure*)), regT3);
    1274     branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
    1275 
    1276     // Continue loop.
    1277     addJump(jump(), target);
    1278 
    1279     // Slow case: Ask the object if i is valid.
    1280     callHasProperty.link(this);
    1281     loadPtr(addressFor(dst), regT1);
    1282     JITStubCall stubCall(this, cti_has_property);
    1283     stubCall.addArgument(regT0);
    1284     stubCall.addArgument(regT1);
    1285     stubCall.call();
    1286 
    1287     // Test for valid key.
    1288     addJump(branchTest32(NonZero, regT0), target);
    1289     jump().linkTo(begin, this);
    1290 
    1291     // End of loop.
    1292     end.link(this);
    1293 }
    1294 
    1295 void JIT::emit_op_push_scope(Instruction* currentInstruction)
    1296 {
    1297     JITStubCall stubCall(this, cti_op_push_scope);
    1298     stubCall.addArgument(currentInstruction[1].u.operand);
    1299     stubCall.call(currentInstruction[1].u.operand);
    1300 }
    1301 
    1302 void JIT::emit_op_pop_scope(Instruction*)
    1303 {
    1304     JITStubCall(this, cti_op_pop_scope).call();
    1305 }
    1306 
    1307 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
    1308 {
    1309     int dst = currentInstruction[1].u.operand;
    1310     int src = currentInstruction[2].u.operand;
    1311 
    1312     emitLoad(src, regT1, regT0);
    1313 
    1314     Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
    1315     addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::EmptyValueTag)));
    1316     isInt32.link(this);
    1317 
    1318     if (src != dst)
    1319         emitStore(dst, regT1, regT0);
    1320     map(m_bytecodeIndex + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
    1321 }
    1322 
    1323 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    1324 {
    1325     int dst = currentInstruction[1].u.operand;
    1326 
    1327     linkSlowCase(iter);
    1328 
    1329     JITStubCall stubCall(this, cti_op_to_jsnumber);
    1330     stubCall.addArgument(regT1, regT0);
    1331     stubCall.call(dst);
    1332 }
    1333 
    1334 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
    1335 {
    1336     JITStubCall stubCall(this, cti_op_push_new_scope);
    1337     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
    1338     stubCall.addArgument(currentInstruction[3].u.operand);
    1339     stubCall.call(currentInstruction[1].u.operand);
    1340 }
    1341 
    1342 void JIT::emit_op_catch(Instruction* currentInstruction)
    1343 {
    1344     unsigned exception = currentInstruction[1].u.operand;
    1345 
    1346     // This opcode only executes after a return from cti_op_throw.
    1347 
    1348     // cti_op_throw may have taken us to a call frame further up the stack; reload
    1349     // the call frame pointer to adjust.
    1350     peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
    1351 
    1352     // Now store the exception returned by cti_op_throw.
    1353     emitStore(exception, regT1, regT0);
    1354     map(m_bytecodeIndex + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
    1355 }
    1356 
    1357 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
    1358 {
    1359     JITStubCall stubCall(this, cti_op_jmp_scopes);
    1360     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
    1361     stubCall.call();
    1362     addJump(jump(), currentInstruction[2].u.operand);
    1363 }
    1364 
    1365 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
    1366 {
    1367     unsigned tableIndex = currentInstruction[1].u.operand;
    1368     unsigned defaultOffset = currentInstruction[2].u.operand;
    1369     unsigned scrutinee = currentInstruction[3].u.operand;
    1370 
    1371     // create jump table for switch destinations, track this switch statement.
    1372     SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
    1373     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
    1374     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
    1375 
    1376     JITStubCall stubCall(this, cti_op_switch_imm);
    1377     stubCall.addArgument(scrutinee);
    1378     stubCall.addArgument(Imm32(tableIndex));
    1379     stubCall.call();
    1380     jump(regT0);
    1381 }
    1382 
    1383 void JIT::emit_op_switch_char(Instruction* currentInstruction)
    1384 {
    1385     unsigned tableIndex = currentInstruction[1].u.operand;
    1386     unsigned defaultOffset = currentInstruction[2].u.operand;
    1387     unsigned scrutinee = currentInstruction[3].u.operand;
    1388 
    1389     // create jump table for switch destinations, track this switch statement.
    1390     SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
    1391     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
    1392     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
    1393 
    1394     JITStubCall stubCall(this, cti_op_switch_char);
    1395     stubCall.addArgument(scrutinee);
    1396     stubCall.addArgument(Imm32(tableIndex));
    1397     stubCall.call();
    1398     jump(regT0);
    1399 }
    1400 
    1401 void JIT::emit_op_switch_string(Instruction* currentInstruction)
    1402 {
    1403     unsigned tableIndex = currentInstruction[1].u.operand;
    1404     unsigned defaultOffset = currentInstruction[2].u.operand;
    1405     unsigned scrutinee = currentInstruction[3].u.operand;
    1406 
    1407     // create jump table for switch destinations, track this switch statement.
    1408     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
    1409     m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
    1410 
    1411     JITStubCall stubCall(this, cti_op_switch_string);
    1412     stubCall.addArgument(scrutinee);
    1413     stubCall.addArgument(Imm32(tableIndex));
    1414     stubCall.call();
    1415     jump(regT0);
    1416 }
    1417 
    1418 void JIT::emit_op_new_error(Instruction* currentInstruction)
    1419 {
    1420     unsigned dst = currentInstruction[1].u.operand;
    1421     unsigned type = currentInstruction[2].u.operand;
    1422     unsigned message = currentInstruction[3].u.operand;
    1423 
    1424     JITStubCall stubCall(this, cti_op_new_error);
    1425     stubCall.addArgument(Imm32(type));
    1426     stubCall.addArgument(m_codeBlock->getConstant(message));
    1427     stubCall.addArgument(Imm32(m_bytecodeIndex));
    1428     stubCall.call(dst);
    1429 }
    1430 
    1431 void JIT::emit_op_debug(Instruction* currentInstruction)
    1432 {
    1433 #if ENABLE(DEBUG_WITH_BREAKPOINT)
    1434     UNUSED_PARAM(currentInstruction);
    1435     breakpoint();
    1436 #else
    1437     JITStubCall stubCall(this, cti_op_debug);
    1438     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
    1439     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
    1440     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
    1441     stubCall.call();
    1442 #endif
    1443 }
    1444 
    1445 
    1446 void JIT::emit_op_enter(Instruction*)
    1447 {
    1448     // Even though JIT code doesn't use them, we initialize our constant
    1449     // registers to zap stale pointers, to avoid unnecessarily prolonging
    1450     // object lifetime and increasing GC pressure.
    1451     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
    1452         emitStore(i, jsUndefined());
    1453 }
    1454 
    1455 void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
    1456 {
    1457     emit_op_enter(currentInstruction);
    1458 
    1459     JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
    1460 }
    1461 
    1462 void JIT::emit_op_create_arguments(Instruction*)
    1463 {
    1464     Jump argsCreated = branch32(NotEqual, tagFor(RegisterFile::ArgumentsRegister, callFrameRegister), Imm32(JSValue::EmptyValueTag));
    1465 
    1466     // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
    1467     if (m_codeBlock->m_numParameters == 1)
    1468         JITStubCall(this, cti_op_create_arguments_no_params).call();
    1469     else
    1470         JITStubCall(this, cti_op_create_arguments).call();
    1471 
    1472     argsCreated.link(this);
    1473 }
    1474    
    1475 void JIT::emit_op_init_arguments(Instruction*)
    1476 {
    1477     emitStore(RegisterFile::ArgumentsRegister, JSValue(), callFrameRegister);
    1478 }
    1479 
    1480 void JIT::emit_op_convert_this(Instruction* currentInstruction)
    1481 {
    1482     unsigned thisRegister = currentInstruction[1].u.operand;
    1483    
    1484     emitLoad(thisRegister, regT1, regT0);
    1485 
    1486     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
    1487 
    1488     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
    1489     addSlowCase(branchTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
    1490 
    1491     map(m_bytecodeIndex + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
    1492 }
    1493 
    1494 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
    1495 {
    1496     unsigned thisRegister = currentInstruction[1].u.operand;
    1497 
    1498     linkSlowCase(iter);
    1499     linkSlowCase(iter);
    1500 
    1501     JITStubCall stubCall(this, cti_op_convert_this);
    1502     stubCall.addArgument(regT1, regT0);
    1503     stubCall.call(thisRegister);
    1504 }
    1505 
    1506 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
    1507 {
    1508     peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
    1509     Jump noProfiler = branchTestPtr(Zero, Address(regT2));
    1510 
    1511     JITStubCall stubCall(this, cti_op_profile_will_call);
    1512     stubCall.addArgument(currentInstruction[1].u.operand);
    1513     stubCall.call();
    1514     noProfiler.link(this);
    1515 }
    1516 
    1517 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
    1518 {
    1519     peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
    1520     Jump noProfiler = branchTestPtr(Zero, Address(regT2));
    1521 
    1522     JITStubCall stubCall(this, cti_op_profile_did_call);
    1523     stubCall.addArgument(currentInstruction[1].u.operand);
    1524     stubCall.call();
    1525     noProfiler.link(this);
    1526 }
    1527 
    1528 #else // USE(JSVALUE32_64)
     42#if !USE(JSVALUE32_64)
    152943
    153044#define RECORD_JUMP_TARGET(targetOffset) \
     
    30961610}
    30971611
    3098 #endif // USE(JSVALUE32_64)
     1612#endif // !USE(JSVALUE32_64)
    30991613
    31001614void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
Note: See TracChangeset for help on using the changeset viewer.