Ignore:
Timestamp:
Dec 12, 2008, 7:18:10 PM (16 years ago)
Author:
[email protected]
Message:

2008-12-12 Gavin Barraclough <[email protected]>

Reviewed by Geoff Garen.

Remove loop counter 'i' from the JIT generation passes, replace with a member m_bytecodeIndex.

No impact on performance.

  • jit/JIT.cpp: (JSC::JIT::compileOpStrictEq): (JSC::JIT::emitSlowScriptCheck): (JSC::JIT::privateCompileMainPass): (JSC::JIT::privateCompileSlowCases): (JSC::JIT::privateCompile):
  • jit/JIT.h: (JSC::CallRecord::CallRecord): (JSC::JmpTable::JmpTable): (JSC::JIT::emitCTICall):
  • jit/JITArithmetic.cpp: (JSC::JIT::compileBinaryArithOp): (JSC::JIT::compileBinaryArithOpSlowCase):
  • jit/JITCall.cpp: (JSC::JIT::compileOpCall): (JSC::JIT::compileOpCallSlowCase):
  • jit/JITInlineMethods.h: (JSC::JIT::emitGetVirtualRegister): (JSC::JIT::emitGetVirtualRegisters): (JSC::JIT::emitNakedCall): (JSC::JIT::emitCTICall_internal): (JSC::JIT::emitJumpSlowCaseIfJSCell): (JSC::JIT::emitJumpSlowCaseIfNotJSCell): (JSC::JIT::emitJumpSlowCaseIfNotImmNum): (JSC::JIT::emitJumpSlowCaseIfNotImmNums): (JSC::JIT::emitFastArithIntToImmOrSlowCase): (JSC::JIT::addSlowCase): (JSC::JIT::addJump): (JSC::JIT::emitJumpSlowToHot):
  • jit/JITPropertyAccess.cpp: (JSC::JIT::compileGetByIdHotPath): (JSC::JIT::compileGetByIdSlowCase): (JSC::JIT::compilePutByIdHotPath): (JSC::JIT::compilePutByIdSlowCase):
File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/JavaScriptCore/jit/JITInlineMethods.h

    r39261 r39266  
    5757
    5858// get arg puts an arg from the SF register array into a h/w register
    59 ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst, unsigned currentInstructionIndex)
    60 {
     59ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
     60{
     61    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     62
    6163    // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
    6264    if (m_codeBlock->isConstantRegisterIndex(src)) {
     
    6971    if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src)) {
    7072        bool atJumpTarget = false;
    71         while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= currentInstructionIndex) {
    72             if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == currentInstructionIndex)
     73        while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeIndex) {
     74            if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeIndex)
    7375                atJumpTarget = true;
    7476            ++m_jumpTargetsPosition;
     
    8890}
    8991
    90 ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2, unsigned i)
     92ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
    9193{
    9294    if (src2 == m_lastResultBytecodeRegister) {
    93         emitGetVirtualRegister(src2, dst2, i);
    94         emitGetVirtualRegister(src1, dst1, i);
     95        emitGetVirtualRegister(src2, dst2);
     96        emitGetVirtualRegister(src1, dst1);
    9597    } else {
    96         emitGetVirtualRegister(src1, dst1, i);
    97         emitGetVirtualRegister(src2, dst2, i);
     98        emitGetVirtualRegister(src1, dst1);
     99        emitGetVirtualRegister(src2, dst2);
    98100    }
    99101}
     
    188190}
    189191
    190 ALWAYS_INLINE JmpSrc JIT::emitNakedCall(unsigned bytecodeIndex, X86::RegisterID r)
    191 {
     192ALWAYS_INLINE JmpSrc JIT::emitNakedCall(X86::RegisterID r)
     193{
     194    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     195
    192196    JmpSrc nakedCall = call(r);
    193     m_calls.append(CallRecord(nakedCall, bytecodeIndex));
     197    m_calls.append(CallRecord(nakedCall, m_bytecodeIndex));
    194198    return nakedCall;
    195199}
    196200
    197 ALWAYS_INLINE JmpSrc JIT::emitNakedCall(unsigned bytecodeIndex, void* function)
    198 {
     201ALWAYS_INLINE JmpSrc JIT::emitNakedCall(void* function)
     202{
     203    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     204
    199205    JmpSrc nakedCall = call();
    200     m_calls.append(CallRecord(nakedCall, reinterpret_cast<CTIHelper_v>(function), bytecodeIndex));
     206    m_calls.append(CallRecord(nakedCall, m_bytecodeIndex, function));
    201207    return nakedCall;
    202208}
    203209
    204 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_j helper)
    205 {
     210ALWAYS_INLINE JmpSrc JIT::emitCTICall_internal(void* helper)
     211{
     212    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     213
    206214#if ENABLE(OPCODE_SAMPLING)
    207     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, true)), m_interpreter->sampler()->sampleSlot());
     215    store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + m_bytecodeIndex, true)), m_interpreter->sampler()->sampleSlot());
    208216#endif
    209217    emitPutCTIParam(callFrameRegister, CTI_ARGS_callFrame);
    210218    JmpSrc ctiCall = call();
    211     m_calls.append(CallRecord(ctiCall, helper, bytecodeIndex));
     219    m_calls.append(CallRecord(ctiCall, m_bytecodeIndex, helper));
    212220#if ENABLE(OPCODE_SAMPLING)
    213     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, false)), m_interpreter->sampler()->sampleSlot());
     221    store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + m_bytecodeIndex, false)), m_interpreter->sampler()->sampleSlot());
    214222#endif
    215223    killLastResultRegister();
     
    218226}
    219227
    220 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_o helper)
    221 {
    222 #if ENABLE(OPCODE_SAMPLING)
    223     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, true)), m_interpreter->sampler()->sampleSlot());
    224 #endif
    225     emitPutCTIParam(callFrameRegister, CTI_ARGS_callFrame);
    226     JmpSrc ctiCall = call();
    227     m_calls.append(CallRecord(ctiCall, helper, bytecodeIndex));
    228 #if ENABLE(OPCODE_SAMPLING)
    229     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, false)), m_interpreter->sampler()->sampleSlot());
    230 #endif
    231     killLastResultRegister();
    232 
    233     return ctiCall;
    234 }
    235 
    236 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_p helper)
    237 {
    238 #if ENABLE(OPCODE_SAMPLING)
    239     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, true)), m_interpreter->sampler()->sampleSlot());
    240 #endif
    241     emitPutCTIParam(callFrameRegister, CTI_ARGS_callFrame);
    242     JmpSrc ctiCall = call();
    243     m_calls.append(CallRecord(ctiCall, helper, bytecodeIndex));
    244 #if ENABLE(OPCODE_SAMPLING)
    245     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, false)), m_interpreter->sampler()->sampleSlot());
    246 #endif
    247     killLastResultRegister();
    248 
    249     return ctiCall;
    250 }
    251 
    252 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_b helper)
    253 {
    254 #if ENABLE(OPCODE_SAMPLING)
    255     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, true)), m_interpreter->sampler()->sampleSlot());
    256 #endif
    257     emitPutCTIParam(callFrameRegister, CTI_ARGS_callFrame);
    258     JmpSrc ctiCall = call();
    259     m_calls.append(CallRecord(ctiCall, helper, bytecodeIndex));
    260 #if ENABLE(OPCODE_SAMPLING)
    261     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, false)), m_interpreter->sampler()->sampleSlot());
    262 #endif
    263     killLastResultRegister();
    264 
    265     return ctiCall;
    266 }
    267 
    268 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_v helper)
    269 {
    270 #if ENABLE(OPCODE_SAMPLING)
    271     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, true)), m_interpreter->sampler()->sampleSlot());
    272 #endif
    273     emitPutCTIParam(callFrameRegister, CTI_ARGS_callFrame);
    274     JmpSrc ctiCall = call();
    275     m_calls.append(CallRecord(ctiCall, helper, bytecodeIndex));
    276 #if ENABLE(OPCODE_SAMPLING)
    277     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, false)), m_interpreter->sampler()->sampleSlot());
    278 #endif
    279     killLastResultRegister();
    280 
    281     return ctiCall;
    282 }
    283 
    284 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_s helper)
    285 {
    286 #if ENABLE(OPCODE_SAMPLING)
    287     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, true)), m_interpreter->sampler()->sampleSlot());
    288 #endif
    289     emitPutCTIParam(callFrameRegister, CTI_ARGS_callFrame);
    290     JmpSrc ctiCall = call();
    291     m_calls.append(CallRecord(ctiCall, helper, bytecodeIndex));
    292 #if ENABLE(OPCODE_SAMPLING)
    293     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, false)), m_interpreter->sampler()->sampleSlot());
    294 #endif
    295     killLastResultRegister();
    296 
    297     return ctiCall;
    298 }
    299 
    300 ALWAYS_INLINE JmpSrc JIT::emitCTICall(unsigned bytecodeIndex, CTIHelper_2 helper)
    301 {
    302 #if ENABLE(OPCODE_SAMPLING)
    303     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, true)), m_interpreter->sampler()->sampleSlot());
    304 #endif
    305     emitPutCTIParam(callFrameRegister, CTI_ARGS_callFrame);
    306     JmpSrc ctiCall = call();
    307     m_calls.append(CallRecord(ctiCall, helper, bytecodeIndex));
    308 #if ENABLE(OPCODE_SAMPLING)
    309     store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin() + bytecodeIndex, false)), m_interpreter->sampler()->sampleSlot());
    310 #endif
    311     killLastResultRegister();
    312 
    313     return ctiCall;
    314 }
    315 
    316228ALWAYS_INLINE JmpSrc JIT::checkStructure(RegisterID reg, Structure* structure)
    317229{
     
    324236}
    325237
    326 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg, unsigned bytecodeIndex)
    327 {
    328     m_slowCases.append(SlowCaseEntry(emitJumpIfJSCell(reg), bytecodeIndex));
     238ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
     239{
     240    addSlowCase(emitJumpIfJSCell(reg));
    329241}
    330242
     
    334246}
    335247
    336 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, unsigned bytecodeIndex)
    337 {
    338     m_slowCases.append(SlowCaseEntry(emitJumpIfNotJSCell(reg), bytecodeIndex));
    339 }
    340 
    341 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, unsigned bytecodeIndex, int vReg)
     248ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
     249{
     250    addSlowCase(emitJumpIfNotJSCell(reg));
     251}
     252
     253ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
    342254{
    343255    if (!m_codeBlock->isKnownNotImmediate(vReg))
    344         emitJumpSlowCaseIfNotJSCell(reg, bytecodeIndex);
     256        emitJumpSlowCaseIfNotJSCell(reg);
    345257}
    346258
     
    351263}
    352264
    353 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmNum(RegisterID reg, unsigned bytecodeIndex)
    354 {
    355     m_slowCases.append(SlowCaseEntry(jz32(reg, Imm32(JSImmediate::TagBitTypeInteger)), bytecodeIndex));
    356 }
    357 
    358 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmNums(RegisterID reg1, RegisterID reg2, RegisterID scratch, unsigned bytecodeIndex)
     265ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmNum(RegisterID reg)
     266{
     267    addSlowCase(jz32(reg, Imm32(JSImmediate::TagBitTypeInteger)));
     268}
     269
     270ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmNums(RegisterID reg1, RegisterID reg2, RegisterID scratch)
    359271{
    360272    move(reg1, scratch);
    361273    and32(reg2, scratch);
    362     emitJumpSlowCaseIfNotImmNum(scratch, bytecodeIndex);
     274    emitJumpSlowCaseIfNotImmNum(scratch);
    363275}
    364276
     
    394306}
    395307
    396 ALWAYS_INLINE void JIT::emitFastArithIntToImmOrSlowCase(RegisterID reg, unsigned bytecodeIndex)
    397 {
    398     m_slowCases.append(SlowCaseEntry(joAdd32(reg, reg), bytecodeIndex));
     308ALWAYS_INLINE void JIT::emitFastArithIntToImmOrSlowCase(RegisterID reg)
     309{
     310    addSlowCase(joAdd32(reg, reg));
    399311    emitFastArithReTagImmediate(reg);
    400312}
     
    412324}
    413325
     326ALWAYS_INLINE void JIT::addSlowCase(JmpSrc jump)
     327{
     328    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     329
     330    m_slowCases.append(SlowCaseEntry(jump, m_bytecodeIndex));
     331}
     332
     333ALWAYS_INLINE void JIT::addJump(JmpSrc jump, int relativeOffset)
     334{
     335    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     336
     337    m_jmpTable.append(JmpTable(jump, m_bytecodeIndex + relativeOffset));
     338}
     339
     340ALWAYS_INLINE void JIT::emitJumpSlowToHot(JmpSrc jump, int relativeOffset)
     341{
     342    ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
     343
     344    __ link(jump, m_labels[m_bytecodeIndex + relativeOffset]);
     345}
     346
    414347}
    415348
Note: See TracChangeset for help on using the changeset viewer.