Changeset 38984 in webkit for trunk/JavaScriptCore/jit/JIT.cpp


Ignore:
Timestamp:
Dec 3, 2008, 9:43:14 PM (16 years ago)
Author:
[email protected]
Message:

2008-12-03 Gavin Barraclough <[email protected]>

Reviewed by Camron Zwarich.

Allow JIT to operate without the call-repatching optimization.
Controlled by ENABLE(JIT_OPTIMIZE_CALL), defaults on, disabling
this leads to significant performance regression.

https://bugs.webkit.org/show_bug.cgi?id=22639

  • JavaScriptCore.vcproj/JavaScriptCore/JavaScriptCore.vcproj:
  • JavaScriptCore.xcodeproj/project.pbxproj:
  • jit/JIT.cpp: (JSC::JIT::privateCompileSlowCases):
  • jit/JIT.h:
  • jit/JITCall.cpp: Copied from jit/JIT.cpp. (JSC::JIT::compileOpCallInitializeCallFrame): (JSC::JIT::compileOpCallSetupArgs): (JSC::JIT::compileOpCallEvalSetupArgs): (JSC::JIT::compileOpConstructSetupArgs): (JSC::JIT::compileOpCall): (JSC::JIT::compileOpCallSlowCase): (JSC::unreachable):
  • jit/JITInlineMethods.h: Copied from jit/JIT.cpp. (JSC::JIT::checkStructure): (JSC::JIT::emitFastArithPotentiallyReTagImmediate): (JSC::JIT::emitTagAsBoolImmediate):
  • wtf/Platform.h:
File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/JavaScriptCore/jit/JIT.cpp

    r38975 r38984  
    3030
    3131#include "CodeBlock.h"
     32#include "JITInlineMethods.h"
    3233#include "JSArray.h"
    3334#include "JSFunction.h"
     
    4041#endif
    4142
    42 #define __ m_assembler.
    43 
    44 #if PLATFORM(WIN)
    45 #undef FIELD_OFFSET // Fix conflict with winnt.h.
    46 #endif
    47 
    48 // FIELD_OFFSET: Like the C++ offsetof macro, but you can use it with classes.
    49 // The magic number 0x4000 is insignificant. We use it to avoid using NULL, since
    50 // NULL can cause compiler problems, especially in cases of multiple inheritance.
    51 #define FIELD_OFFSET(class, field) (reinterpret_cast<ptrdiff_t>(&(reinterpret_cast<class*>(0x4000)->field)) - 0x4000)
    52 
    5343using namespace std;
    5444
    5545namespace JSC {
    56 
    57 typedef X86Assembler::JmpSrc JmpSrc;
    5846
    5947#if PLATFORM(MAC)
     
    180168
    181169#endif
    182 
    183 static ALWAYS_INLINE uintptr_t asInteger(JSValue* value)
    184 {
    185     return reinterpret_cast<uintptr_t>(value);
    186 }
    187 
    188 ALWAYS_INLINE void JIT::killLastResultRegister()
    189 {
    190     m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
    191 }
    192 
    193 // get arg puts an arg from the SF register array into a h/w register
    194 ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst, unsigned currentInstructionIndex)
    195 {
    196     // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
    197     if (m_codeBlock->isConstantRegisterIndex(src)) {
    198         JSValue* value = m_codeBlock->getConstant(src);
    199         __ movl_i32r(asInteger(value), dst);
    200         killLastResultRegister();
    201         return;
    202     }
    203 
    204     if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src)) {
    205         bool atJumpTarget = false;
    206         while (m_jumpTargetsPosition < m_codeBlock->jumpTargets.size() && m_codeBlock->jumpTargets[m_jumpTargetsPosition] <= currentInstructionIndex) {
    207             if (m_codeBlock->jumpTargets[m_jumpTargetsPosition] == currentInstructionIndex)
    208                 atJumpTarget = true;
    209             ++m_jumpTargetsPosition;
    210         }
    211 
    212         if (!atJumpTarget) {
    213             // The argument we want is already stored in eax
    214             if (dst != X86::eax)
    215                 __ movl_rr(X86::eax, dst);
    216             killLastResultRegister();
    217             return;
    218         }
    219     }
    220 
    221     __ movl_mr(src * sizeof(Register), X86::edi, dst);
    222     killLastResultRegister();
    223 }
    224 
    225 ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2, unsigned i)
    226 {
    227     if (src2 == m_lastResultBytecodeRegister) {
    228         emitGetVirtualRegister(src2, dst2, i);
    229         emitGetVirtualRegister(src1, dst1, i);
    230     } else {
    231         emitGetVirtualRegister(src1, dst1, i);
    232         emitGetVirtualRegister(src2, dst2, i);
    233     }
    234 }
    235 
    236 // get arg puts an arg from the SF register array onto the stack, as an arg to a context threaded function.
    237 ALWAYS_INLINE void JIT::emitPutCTIArgFromVirtualRegister(unsigned src, unsigned offset, RegisterID scratch)
    238 {
    239     if (m_codeBlock->isConstantRegisterIndex(src)) {
    240         JSValue* value = m_codeBlock->getConstant(src);
    241         __ movl_i32m(asInteger(value), offset + sizeof(void*), X86::esp);
    242     } else {
    243         __ movl_mr(src * sizeof(Register), X86::edi, scratch);
    244         __ movl_rm(scratch, offset + sizeof(void*), X86::esp);
    245     }
    246 
    247     killLastResultRegister();
    248 }
    249 
    250 // puts an arg onto the stack, as an arg to a context threaded function.
    251 ALWAYS_INLINE void JIT::emitPutCTIArg(RegisterID src, unsigned offset)
    252 {
    253     __ movl_rm(src, offset + sizeof(void*), X86::esp);
    254 }
    255 
    256 ALWAYS_INLINE void JIT::emitGetCTIArg(unsigned offset, RegisterID dst)
    257 {
    258     __ movl_mr(offset + sizeof(void*), X86::esp, dst);
    259 }
    260 
    261 
    262 ALWAYS_INLINE void JIT::emitPutCTIArgConstant(unsigned value, unsigned offset)
    263 {
    264     __ movl_i32m(value, offset + sizeof(void*), X86::esp);
    265 }
    266 
    267 ALWAYS_INLINE JSValue* JIT::getConstantImmediateNumericArg(unsigned src)
    268 {
    269     if (m_codeBlock->isConstantRegisterIndex(src)) {
    270         JSValue* value = m_codeBlock->getConstant(src);
    271         return JSImmediate::isNumber(value) ? value : noValue();
    272     }
    273     return noValue();
    274 }
    275 
    276 ALWAYS_INLINE void JIT::emitPutCTIParam(void* value, unsigned name)
    277 {
    278     __ movl_i32m(reinterpret_cast<intptr_t>(value), name * sizeof(void*), X86::esp);
    279 }
    280 
    281 ALWAYS_INLINE void JIT::emitPutCTIParam(RegisterID from, unsigned name)
    282 {
    283     __ movl_rm(from, name * sizeof(void*), X86::esp);
    284 }
    285 
    286 ALWAYS_INLINE void JIT::emitGetCTIParam(unsigned name, RegisterID to)
    287 {
    288     __ movl_mr(name * sizeof(void*), X86::esp, to);
    289     killLastResultRegister();
    290 }
    291 
    292 ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
    293 {
    294     __ movl_rm(from, entry * sizeof(Register), X86::edi);
    295 }
    296 
    297 ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader(RegisterFile::CallFrameHeaderEntry entry, RegisterID to)
    298 {
    299     __ movl_mr(entry * sizeof(Register), X86::edi, to);
    300     killLastResultRegister();
    301 }
    302 
    303 ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
    304 {
    305     __ movl_rm(from, dst * sizeof(Register), X86::edi);
    306     m_lastResultBytecodeRegister = (from == X86::eax) ? dst : std::numeric_limits<int>::max();
    307     // FIXME: #ifndef NDEBUG, Write the correct m_type to the register.
    308 }
    309 
    310 ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
    311 {
    312     __ movl_i32m(asInteger(jsUndefined()), dst * sizeof(Register), X86::edi);
    313     // FIXME: #ifndef NDEBUG, Write the correct m_type to the register.
    314 }
    315170
    316171void ctiSetReturnAddress(void** where, void* what)
     
    362217#endif
    363218
    364 ALWAYS_INLINE JmpSrc JIT::emitNakedCall(unsigned bytecodeIndex, X86::RegisterID r)
    365 {
    366     JmpSrc call = __ call(r);
    367     m_calls.append(CallRecord(call, bytecodeIndex));
    368 
    369     return call;
    370 }
    371 
    372 ALWAYS_INLINE  JmpSrc JIT::emitNakedCall(unsigned bytecodeIndex, void* function)
    373 {
    374     JmpSrc call = __ call();
    375     m_calls.append(CallRecord(call, reinterpret_cast<CTIHelper_v>(function), bytecodeIndex));
    376     return call;
    377 }
    378 
    379 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_j helper)
    380 {
    381 #if ENABLE(OPCODE_SAMPLING)
    382     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, true), m_interpreter->sampler()->sampleSlot());
    383 #endif
    384     __ restoreArgumentReference();
    385     emitPutCTIParam(X86::edi, CTI_ARGS_callFrame);
    386     JmpSrc call = __ call();
    387     m_calls.append(CallRecord(call, helper, bytecodeIndex));
    388 #if ENABLE(OPCODE_SAMPLING)
    389     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, false), m_interpreter->sampler()->sampleSlot());
    390 #endif
    391     killLastResultRegister();
    392 
    393     return call;
    394 }
    395 
    396 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_o helper)
    397 {
    398 #if ENABLE(OPCODE_SAMPLING)
    399     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, true), m_interpreter->sampler()->sampleSlot());
    400 #endif
    401     __ restoreArgumentReference();
    402     emitPutCTIParam(X86::edi, CTI_ARGS_callFrame);
    403     JmpSrc call = __ call();
    404     m_calls.append(CallRecord(call, helper, bytecodeIndex));
    405 #if ENABLE(OPCODE_SAMPLING)
    406     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, false), m_interpreter->sampler()->sampleSlot());
    407 #endif
    408     killLastResultRegister();
    409 
    410     return call;
    411 }
    412 
    413 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_p helper)
    414 {
    415 #if ENABLE(OPCODE_SAMPLING)
    416     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, true), m_interpreter->sampler()->sampleSlot());
    417 #endif
    418     __ restoreArgumentReference();
    419     emitPutCTIParam(X86::edi, CTI_ARGS_callFrame);
    420     JmpSrc call = __ call();
    421     m_calls.append(CallRecord(call, helper, bytecodeIndex));
    422 #if ENABLE(OPCODE_SAMPLING)
    423     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, false), m_interpreter->sampler()->sampleSlot());
    424 #endif
    425     killLastResultRegister();
    426 
    427     return call;
    428 }
    429 
    430 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_b helper)
    431 {
    432 #if ENABLE(OPCODE_SAMPLING)
    433     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, true), m_interpreter->sampler()->sampleSlot());
    434 #endif
    435     __ restoreArgumentReference();
    436     emitPutCTIParam(X86::edi, CTI_ARGS_callFrame);
    437     JmpSrc call = __ call();
    438     m_calls.append(CallRecord(call, helper, bytecodeIndex));
    439 #if ENABLE(OPCODE_SAMPLING)
    440     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, false), m_interpreter->sampler()->sampleSlot());
    441 #endif
    442     killLastResultRegister();
    443 
    444     return call;
    445 }
    446 
    447 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_v helper)
    448 {
    449 #if ENABLE(OPCODE_SAMPLING)
    450     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, true), m_interpreter->sampler()->sampleSlot());
    451 #endif
    452     __ restoreArgumentReference();
    453     emitPutCTIParam(X86::edi, CTI_ARGS_callFrame);
    454     JmpSrc call = __ call();
    455     m_calls.append(CallRecord(call, helper, bytecodeIndex));
    456 #if ENABLE(OPCODE_SAMPLING)
    457     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, false), m_interpreter->sampler()->sampleSlot());
    458 #endif
    459     killLastResultRegister();
    460 
    461     return call;
    462 }
    463 
    464 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_s helper)
    465 {
    466 #if ENABLE(OPCODE_SAMPLING)
    467     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, true), m_interpreter->sampler()->sampleSlot());
    468 #endif
    469     __ restoreArgumentReference();
    470     emitPutCTIParam(X86::edi, CTI_ARGS_callFrame);
    471     JmpSrc call = __ call();
    472     m_calls.append(CallRecord(call, helper, bytecodeIndex));
    473 #if ENABLE(OPCODE_SAMPLING)
    474     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, false), m_interpreter->sampler()->sampleSlot());
    475 #endif
    476     killLastResultRegister();
    477 
    478     return call;
    479 }
    480 
    481 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_2 helper)
    482 {
    483 #if ENABLE(OPCODE_SAMPLING)
    484     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, true), m_interpreter->sampler()->sampleSlot());
    485 #endif
    486     __ restoreArgumentReference();
    487     emitPutCTIParam(X86::edi, CTI_ARGS_callFrame);
    488     JmpSrc call = __ call();
    489     m_calls.append(CallRecord(call, helper, bytecodeIndex));
    490 #if ENABLE(OPCODE_SAMPLING)
    491     __ movl_i32m(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions.begin() + bytecodeIndex, false), m_interpreter->sampler()->sampleSlot());
    492 #endif
    493     killLastResultRegister();
    494 
    495     return call;
    496 }
    497 
    498 JmpSrc JIT::checkStructure(RegisterID reg, Structure* structure)
    499 {
    500     __ cmpl_i32m(reinterpret_cast<uint32_t>(structure), FIELD_OFFSET(JSCell, m_structure), reg);
    501     return __ jne();
    502 }
    503 
    504 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, unsigned bytecodeIndex)
    505 {
    506     __ testl_i32r(JSImmediate::TagMask, reg);
    507     m_slowCases.append(SlowCaseEntry(__ jne(), bytecodeIndex));
    508 }
    509 
    510 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, unsigned bytecodeIndex, int vReg)
    511 {
    512     if (m_codeBlock->isKnownNotImmediate(vReg))
    513         return;
    514 
    515     emitJumpSlowCaseIfNotJSCell(reg, bytecodeIndex);
    516 }
    517 
    518 ALWAYS_INLINE bool JIT::linkSlowCaseIfNotJSCell(const Vector<SlowCaseEntry>::iterator& iter, int vReg)
    519 {
    520     if (m_codeBlock->isKnownNotImmediate(vReg))
    521         return false;
    522    
    523     __ link(iter->from, __ label());
    524     return true;
    525 }
    526 
    527 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmNum(RegisterID reg, unsigned bytecodeIndex)
    528 {
    529     __ testl_i32r(JSImmediate::TagBitTypeInteger, reg);
    530     m_slowCases.append(SlowCaseEntry(__ je(), bytecodeIndex));
    531 }
    532 
    533 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmNums(RegisterID reg1, RegisterID reg2, unsigned bytecodeIndex)
    534 {
    535     __ movl_rr(reg1, X86::ecx);
    536     __ andl_rr(reg2, X86::ecx);
    537     emitJumpSlowCaseIfNotImmNum(X86::ecx, bytecodeIndex);
    538 }
    539 
    540 ALWAYS_INLINE unsigned JIT::getDeTaggedConstantImmediate(JSValue* imm)
    541 {
    542     ASSERT(JSImmediate::isNumber(imm));
    543     return asInteger(imm) & ~JSImmediate::TagBitTypeInteger;
    544 }
    545 
    546 ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
    547 {
    548     __ subl_i8r(JSImmediate::TagBitTypeInteger, reg);
    549 }
    550 
    551 ALWAYS_INLINE JmpSrc JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
    552 {
    553     __ subl_i8r(JSImmediate::TagBitTypeInteger, reg);
    554     return __ je();
    555 }
    556 
    557 ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID reg)
    558 {
    559     __ addl_i8r(JSImmediate::TagBitTypeInteger, reg);
    560 }
    561 
    562 ALWAYS_INLINE void JIT::emitFastArithPotentiallyReTagImmediate(RegisterID reg)
    563 {
    564     __ orl_i8r(JSImmediate::TagBitTypeInteger, reg);
    565 }
    566 
    567 ALWAYS_INLINE void JIT::emitFastArithImmToInt(RegisterID reg)
    568 {
    569     __ sarl_i8r(1, reg);
    570 }
    571 
    572 ALWAYS_INLINE void JIT::emitFastArithIntToImmOrSlowCase(RegisterID reg, unsigned bytecodeIndex)
    573 {
    574     __ addl_rr(reg, reg);
    575     m_slowCases.append(SlowCaseEntry(__ jo(), bytecodeIndex));
    576     emitFastArithReTagImmediate(reg);
    577 }
    578 
    579 ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID reg)
    580 {
    581     __ addl_rr(reg, reg);
    582     emitFastArithReTagImmediate(reg);
    583 }
    584 
    585 ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
    586 {
    587     __ shl_i8r(JSImmediate::ExtendedPayloadShift, reg);
    588     __ orl_i8r(JSImmediate::FullTagTypeBool, reg);
    589 }
    590 
    591219JIT::JIT(JSGlobalData* globalData, CodeBlock* codeBlock)
    592220    : m_interpreter(globalData->interpreter)
     
    619247        break; \
    620248    }
    621 
    622 static void unreachable()
    623 {
    624     ASSERT_NOT_REACHED();
    625     exit(1);
    626 }
    627 
    628 void JIT::compileOpCallInitializeCallFrame()
    629 {
    630     __ movl_rm(X86::edx, RegisterFile::ArgumentCount * static_cast<int>(sizeof(Register)), X86::edi);
    631 
    632     __ movl_mr(FIELD_OFFSET(JSFunction, m_scopeChain) + FIELD_OFFSET(ScopeChain, m_node), X86::ecx, X86::edx); // newScopeChain
    633 
    634     __ movl_i32m(asInteger(noValue()), RegisterFile::OptionalCalleeArguments * static_cast<int>(sizeof(Register)), X86::edi);
    635     __ movl_rm(X86::ecx, RegisterFile::Callee * static_cast<int>(sizeof(Register)), X86::edi);
    636     __ movl_rm(X86::edx, RegisterFile::ScopeChain * static_cast<int>(sizeof(Register)), X86::edi);
    637 }
    638 
    639 void JIT::compileOpCallSetupArgs(Instruction* instruction)
    640 {
    641     int argCount = instruction[3].u.operand;
    642     int registerOffset = instruction[4].u.operand;
    643 
    644     // ecx holds func
    645     emitPutCTIArg(X86::ecx, 0);
    646     emitPutCTIArgConstant(registerOffset, 4);
    647     emitPutCTIArgConstant(argCount, 8);
    648     emitPutCTIArgConstant(reinterpret_cast<unsigned>(instruction), 12);
    649 }
    650 
    651 void JIT::compileOpCallEvalSetupArgs(Instruction* instruction)
    652 {
    653     int argCount = instruction[3].u.operand;
    654     int registerOffset = instruction[4].u.operand;
    655 
    656     // ecx holds func
    657     emitPutCTIArg(X86::ecx, 0);
    658     emitPutCTIArgConstant(registerOffset, 4);
    659     emitPutCTIArgConstant(argCount, 8);
    660     emitPutCTIArgConstant(reinterpret_cast<unsigned>(instruction), 12);
    661 }
    662 
    663 void JIT::compileOpConstructSetupArgs(Instruction* instruction)
    664 {
    665     int argCount = instruction[3].u.operand;
    666     int registerOffset = instruction[4].u.operand;
    667     int proto = instruction[5].u.operand;
    668     int thisRegister = instruction[6].u.operand;
    669 
    670     // ecx holds func
    671     emitPutCTIArg(X86::ecx, 0);
    672     emitPutCTIArgConstant(registerOffset, 4);
    673     emitPutCTIArgConstant(argCount, 8);
    674     emitPutCTIArgFromVirtualRegister(proto, 12, X86::eax);
    675     emitPutCTIArgConstant(thisRegister, 16);
    676     emitPutCTIArgConstant(reinterpret_cast<unsigned>(instruction), 20);
    677 }
    678 
    679 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned i, unsigned callLinkInfoIndex)
    680 {
    681     int dst = instruction[1].u.operand;
    682     int callee = instruction[2].u.operand;
    683     int argCount = instruction[3].u.operand;
    684     int registerOffset = instruction[4].u.operand;
    685 
    686     // Handle eval
    687     JmpSrc wasEval;
    688     if (opcodeID == op_call_eval) {
    689         emitGetVirtualRegister(callee, X86::ecx, i);
    690         compileOpCallEvalSetupArgs(instruction);
    691 
    692         emitCTICall(i, Interpreter::cti_op_call_eval);
    693         __ cmpl_i32r(asInteger(JSImmediate::impossibleValue()), X86::eax);
    694         wasEval = __ jne();
    695     }
    696 
    697     // This plants a check for a cached JSFunction value, so we can plant a fast link to the callee.
    698     // This deliberately leaves the callee in ecx, used when setting up the stack frame below
    699     emitGetVirtualRegister(callee, X86::ecx, i);
    700     __ cmpl_i32r(asInteger(JSImmediate::impossibleValue()), X86::ecx);
    701     JmpDst addressOfLinkedFunctionCheck = __ label();
    702     m_slowCases.append(SlowCaseEntry(__ jne(), i));
    703     ASSERT(X86Assembler::getDifferenceBetweenLabels(addressOfLinkedFunctionCheck, __ label()) == repatchOffsetOpCallCall);
    704     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
    705 
    706     // The following is the fast case, only used whan a callee can be linked.
    707 
    708     // In the case of OpConstruct, call out to a cti_ function to create the new object.
    709     if (opcodeID == op_construct) {
    710         int proto = instruction[5].u.operand;
    711         int thisRegister = instruction[6].u.operand;
    712 
    713         emitPutCTIArg(X86::ecx, 0);
    714         emitPutCTIArgFromVirtualRegister(proto, 12, X86::eax);
    715         emitCTICall(i, Interpreter::cti_op_construct_JSConstruct);
    716         emitPutVirtualRegister(thisRegister);
    717         emitGetVirtualRegister(callee, X86::ecx, i);
    718     }
    719 
    720     // Fast version of stack frame initialization, directly relative to edi.
    721     // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
    722     __ movl_i32m(asInteger(noValue()), (registerOffset + RegisterFile::OptionalCalleeArguments) * static_cast<int>(sizeof(Register)), X86::edi);
    723     __ movl_rm(X86::ecx, (registerOffset + RegisterFile::Callee) * static_cast<int>(sizeof(Register)), X86::edi);
    724     __ movl_mr(FIELD_OFFSET(JSFunction, m_scopeChain) + FIELD_OFFSET(ScopeChain, m_node), X86::ecx, X86::edx); // newScopeChain
    725     __ movl_i32m(argCount, (registerOffset + RegisterFile::ArgumentCount) * static_cast<int>(sizeof(Register)), X86::edi);
    726     __ movl_rm(X86::edi, (registerOffset + RegisterFile::CallerFrame) * static_cast<int>(sizeof(Register)), X86::edi);
    727     __ movl_rm(X86::edx, (registerOffset + RegisterFile::ScopeChain) * static_cast<int>(sizeof(Register)), X86::edi);
    728     __ addl_i32r(registerOffset * sizeof(Register), X86::edi);
    729 
    730     // Call to the callee
    731     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall(i, reinterpret_cast<void*>(unreachable));
    732    
    733     if (opcodeID == op_call_eval)
    734         __ link(wasEval, __ label());
    735 
    736     // Put the return value in dst. In the interpreter, op_ret does this.
    737     emitPutVirtualRegister(dst);
    738 
    739 #if ENABLE(CODEBLOCK_SAMPLING)
    740         __ movl_i32m(reinterpret_cast<unsigned>(m_codeBlock), m_interpreter->sampler()->codeBlockSlot());
    741 #endif
    742 }
    743249
    744250void JIT::compileOpStrictEq(Instruction* instruction, unsigned i, CompileOpStrictEqType type)
     
    28142320        case op_call_eval:
    28152321        case op_construct: {
    2816             int dst = instruction[i + 1].u.operand;
    2817             int callee = instruction[i + 2].u.operand;
    2818             int argCount = instruction[i + 3].u.operand;
    2819             int registerOffset = instruction[i + 4].u.operand;
    2820 
    2821             __ link(iter->from, __ label());
    2822 
    2823             // The arguments have been set up on the hot path for op_call_eval
    2824             if (opcodeID == op_call)
    2825                 compileOpCallSetupArgs(instruction + i);
    2826             else if (opcodeID == op_construct)
    2827                 compileOpConstructSetupArgs(instruction + i);
    2828 
    2829             // Fast check for JS function.
    2830             __ testl_i32r(JSImmediate::TagMask, X86::ecx);
    2831             JmpSrc callLinkFailNotObject = __ jne();
    2832             __ cmpl_i32m(reinterpret_cast<unsigned>(m_interpreter->m_jsFunctionVptr), X86::ecx);
    2833             JmpSrc callLinkFailNotJSFunction = __ jne();
    2834 
    2835             // First, in the case of a construct, allocate the new object.
    2836             if (opcodeID == op_construct) {
    2837                 emitCTICall(i, Interpreter::cti_op_construct_JSConstruct);
    2838                 emitPutVirtualRegister(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
    2839                 emitGetVirtualRegister(callee, X86::ecx, i);
    2840             }
    2841 
    2842             __ movl_i32r(argCount, X86::edx);
    2843 
    2844             // Speculatively roll the callframe, assuming argCount will match the arity.
    2845             __ movl_rm(X86::edi, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register)), X86::edi);
    2846             __ addl_i32r(registerOffset * static_cast<int>(sizeof(Register)), X86::edi);
    2847 
    2848             m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation =
    2849                 emitNakedCall(i, m_interpreter->m_ctiVirtualCallPreLink);
    2850 
    2851             JmpSrc storeResultForFirstRun = __ jmp();
    2852 
    2853             // This is the address for the cold path *after* the first run (which tries to link the call).
    2854             m_callStructureStubCompilationInfo[callLinkInfoIndex].coldPathOther = __ label();
    2855 
    2856             // The arguments have been set up on the hot path for op_call_eval
    2857             if (opcodeID == op_call)
    2858                 compileOpCallSetupArgs(instruction + i);
    2859             else if (opcodeID == op_construct)
    2860                 compileOpConstructSetupArgs(instruction + i);
    2861 
    2862             // Check for JSFunctions.
    2863             __ testl_i32r(JSImmediate::TagMask, X86::ecx);
    2864             JmpSrc isNotObject = __ jne();
    2865             __ cmpl_i32m(reinterpret_cast<unsigned>(m_interpreter->m_jsFunctionVptr), X86::ecx);
    2866             JmpSrc isJSFunction = __ je();
    2867 
    2868             // This handles host functions
    2869             JmpDst notJSFunctionlabel = __ label();
    2870             __ link(isNotObject, notJSFunctionlabel);
    2871             __ link(callLinkFailNotObject, notJSFunctionlabel);
    2872             __ link(callLinkFailNotJSFunction, notJSFunctionlabel);
    2873             emitCTICall(i, ((opcodeID == op_construct) ? Interpreter::cti_op_construct_NotJSConstruct : Interpreter::cti_op_call_NotJSFunction));
    2874             JmpSrc wasNotJSFunction = __ jmp();
    2875 
    2876             // Next, handle JSFunctions...
    2877             __ link(isJSFunction, __ label());
    2878 
    2879             // First, in the case of a construct, allocate the new object.
    2880             if (opcodeID == op_construct) {
    2881                 emitCTICall(i, Interpreter::cti_op_construct_JSConstruct);
    2882                 emitPutVirtualRegister(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
    2883                 emitGetVirtualRegister(callee, X86::ecx, i);
    2884             }
    2885 
    2886             // Speculatively roll the callframe, assuming argCount will match the arity.
    2887             __ movl_rm(X86::edi, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register)), X86::edi);
    2888             __ addl_i32r(registerOffset * static_cast<int>(sizeof(Register)), X86::edi);
    2889             __ movl_i32r(argCount, X86::edx);
    2890 
    2891             emitNakedCall(i, m_interpreter->m_ctiVirtualCall);
    2892 
    2893             // Put the return value in dst. In the interpreter, op_ret does this.
    2894             JmpDst storeResult = __ label();
    2895             __ link(wasNotJSFunction, storeResult);
    2896             __ link(storeResultForFirstRun, storeResult);
    2897             emitPutVirtualRegister(dst);
    2898 
    2899 #if ENABLE(CODEBLOCK_SAMPLING)
    2900             __ movl_i32m(reinterpret_cast<unsigned>(m_codeBlock), m_interpreter->sampler()->codeBlockSlot());
    2901 #endif
    2902             ++callLinkInfoIndex;
    2903 
     2322            compileOpCallSlowCase(instruction + i, i, iter, callLinkInfoIndex++, opcodeID);
    29042323            i += (opcodeID == op_construct ? 7 : 5);
    29052324            break;
     
    34632882   
    34642883    ctiRepatchCallByReturnAddress(returnAddress, code);
    3465 }
    3466 
    3467 void JIT::unlinkCall(CallLinkInfo* callLinkInfo)
    3468 {
    3469     // When the JSFunction is deleted the pointer embedded in the instruction stream will no longer be valid
    3470     // (and, if a new JSFunction happened to be constructed at the same location, we could get a false positive
    3471     // match).  Reset the check so it no longer matches.
    3472     reinterpret_cast<void**>(callLinkInfo->hotPathBegin)[-1] = asPointer(JSImmediate::impossibleValue());
    3473 }
    3474 
    3475 void JIT::linkCall(JSFunction* callee, CodeBlock* calleeCodeBlock, void* ctiCode, CallLinkInfo* callLinkInfo, int callerArgCount)
    3476 {
    3477     // Currently we only link calls with the exact number of arguments.
    3478     if (callerArgCount == calleeCodeBlock->numParameters) {
    3479         ASSERT(!callLinkInfo->isLinked());
    3480    
    3481         calleeCodeBlock->addCaller(callLinkInfo);
    3482    
    3483         reinterpret_cast<void**>(callLinkInfo->hotPathBegin)[-1] = callee;
    3484         ctiRepatchCallByReturnAddress(callLinkInfo->hotPathOther, ctiCode);
    3485     }
    3486 
    3487     // repatch the instruction that jumps out to the cold path, so that we only try to link once.
    3488     void* repatchCheck = reinterpret_cast<void*>(reinterpret_cast<ptrdiff_t>(callLinkInfo->hotPathBegin) + repatchOffsetOpCallCall);
    3489     ctiRepatchCallByReturnAddress(repatchCheck, callLinkInfo->coldPathOther);
    34902884}
    34912885
Note: See TracChangeset for help on using the changeset viewer.