Ignore:
Timestamp:
Sep 26, 2021, 2:20:52 PM (4 years ago)
Author:
[email protected]
Message:

Unreviewed, reverting r283083 and r283088.
https://bugs.webkit.org/show_bug.cgi?id=230806

Windows pors are crashing

Reverted changesets:

"Build an unlinked baseline JIT"
https://bugs.webkit.org/show_bug.cgi?id=229223
https://commits.webkit.org/r283083

"Make byte codes with arithmetic profiles switch to using an
index instead of a pointer in metadata"
https://bugs.webkit.org/show_bug.cgi?id=230798
https://commits.webkit.org/r283088

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/JavaScriptCore/jit/JIT.cpp

    r283083 r283089  
    7070
    7171JIT::JIT(VM& vm, CodeBlock* codeBlock, BytecodeIndex loopOSREntryBytecodeIndex)
    72     : JSInterfaceJIT(&vm, nullptr)
     72    : JSInterfaceJIT(&vm, codeBlock)
    7373    , m_interpreter(vm.interpreter)
    7474    , m_labels(codeBlock ? codeBlock->instructions().size() : 0)
     
    7878    , m_loopOSREntryBytecodeIndex(loopOSREntryBytecodeIndex)
    7979{
    80     m_globalObjectConstant = m_constantPool.add(JITConstantPool::Type::GlobalObject);
    81     m_profiledCodeBlock = codeBlock;
    82     m_unlinkedCodeBlock = codeBlock->unlinkedCodeBlock();
    8380}
    8481
     
    9491
    9592    JumpList skipOptimize;
    96     loadPtr(addressFor(CallFrameSlot::codeBlock), regT0);
    97     skipOptimize.append(branchAdd32(Signed, TrustedImm32(Options::executionCounterIncrementForEntry()), Address(regT0, CodeBlock::offsetOfJITExecuteCounter())));
     93   
     94    skipOptimize.append(branchAdd32(Signed, TrustedImm32(Options::executionCounterIncrementForEntry()), AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
    9895    ASSERT(!m_bytecodeIndex.offset());
    9996
     
    117114}
    118115
    119 void JIT::emitNotifyWriteWatchpoint(GPRReg pointerToSet)
    120 {
    121     auto ok = branchTestPtr(Zero, pointerToSet);
     116void JIT::emitNotifyWrite(GPRReg pointerToSet)
     117{
    122118    addSlowCase(branch8(NotEqual, Address(pointerToSet, WatchpointSet::offsetOfState()), TrustedImm32(IsInvalidated)));
    123     ok.link(this);
    124 }
    125 
    126 void JIT::emitVarReadOnlyCheck(ResolveType resolveType, GPRReg scratchGPR)
    127 {
    128     if (resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks) {
    129         loadGlobalObject(scratchGPR);
    130         loadPtr(Address(scratchGPR, OBJECT_OFFSETOF(JSGlobalObject, m_varReadOnlyWatchpoint)), scratchGPR);
    131         addSlowCase(branch8(Equal, Address(scratchGPR, WatchpointSet::offsetOfState()), TrustedImm32(IsInvalidated)));
    132     }
     119}
     120
     121void JIT::emitVarReadOnlyCheck(ResolveType resolveType)
     122{
     123    if (resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks)
     124        addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varReadOnlyWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
    133125}
    134126
     
    138130        return;
    139131   
    140     addPtr(TrustedImm32(stackPointerOffsetFor(m_unlinkedCodeBlock) * sizeof(Register)), callFrameRegister, regT0);
     132    addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, regT0);
    141133    Jump ok = branchPtr(Equal, regT0, stackPointerRegister);
    142134    breakpoint();
    143135    ok.link(this);
    144 }
    145 
    146 void JIT::resetSP()
    147 {
    148     addPtr(TrustedImm32(stackPointerOffsetFor(m_unlinkedCodeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
    149     checkStackPointerAlignment();
    150136}
    151137
     
    196182}
    197183
    198 void JIT::emitPutCodeBlockToFrameInPrologue(GPRReg result)
    199 {
    200     RELEASE_ASSERT(m_unlinkedCodeBlock->codeType() == FunctionCode);
    201     emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, result);
    202     loadPtr(Address(result, JSFunction::offsetOfExecutableOrRareData()), result);
    203     auto hasExecutable = branchTestPtr(Zero, result, CCallHelpers::TrustedImm32(JSFunction::rareDataTag));
    204     loadPtr(Address(result, FunctionRareData::offsetOfExecutable() - JSFunction::rareDataTag), result);
    205     hasExecutable.link(this);
    206     if (m_unlinkedCodeBlock->isConstructor())
    207         loadPtr(Address(result, FunctionExecutable::offsetOfCodeBlockForConstruct()), result);
    208     else
    209         loadPtr(Address(result, FunctionExecutable::offsetOfCodeBlockForCall()), result);
    210 
    211     loadPtr(Address(result, ExecutableToCodeBlockEdge::offsetOfCodeBlock()), result);
    212     emitPutToCallFrameHeader(result, CallFrameSlot::codeBlock);
    213 
    214 #if ASSERT_ENABLED
    215     probeDebug([=] (Probe::Context& ctx) {
    216         CodeBlock* codeBlock = ctx.fp<CallFrame*>()->codeBlock();
    217         RELEASE_ASSERT(codeBlock->jitType() == JITType::BaselineJIT);
    218     });
    219 #endif
    220 }
    221 
    222184void JIT::privateCompileMainPass()
    223185{
    224186    if (JITInternal::verbose)
    225         dataLog("Compiling ", *m_profiledCodeBlock, "\n");
     187        dataLog("Compiling ", *m_codeBlock, "\n");
    226188   
    227189    jitAssertTagsInPlace();
    228190    jitAssertArgumentCountSane();
    229191   
    230     auto& instructions = m_unlinkedCodeBlock->instructions();
    231     unsigned instructionCount = m_unlinkedCodeBlock->instructions().size();
     192    auto& instructions = m_codeBlock->instructions();
     193    unsigned instructionCount = m_codeBlock->instructions().size();
    232194
    233195    m_callLinkInfoIndex = 0;
    234196
     197    VM& vm = m_codeBlock->vm();
    235198    BytecodeIndex startBytecodeIndex(0);
     199    if (m_loopOSREntryBytecodeIndex && (m_codeBlock->inherits<ProgramCodeBlock>(vm) || m_codeBlock->inherits<ModuleProgramCodeBlock>(vm))) {
     200        // We can only do this optimization because we execute ProgramCodeBlock's exactly once.
     201        // This optimization would be invalid otherwise. When the LLInt determines it wants to
     202        // do OSR entry into the baseline JIT in a loop, it will pass in the bytecode offset it
     203        // was executing at when it kicked off our compilation. We only need to compile code for
     204        // anything reachable from that bytecode offset.
     205
     206        // We only bother building the bytecode graph if it could save time and executable
     207        // memory. We pick an arbitrary offset where we deem this is profitable.
     208        if (m_loopOSREntryBytecodeIndex.offset() >= 200) {
     209            // As a simplification, we don't find all bytecode ranges that are unreachable.
     210            // Instead, we just find the minimum bytecode offset that is reachable, and
     211            // compile code from that bytecode offset onwards.
     212
     213            BytecodeGraph graph(m_codeBlock, m_codeBlock->instructions());
     214            BytecodeBasicBlock* block = graph.findBasicBlockForBytecodeOffset(m_loopOSREntryBytecodeIndex.offset());
     215            RELEASE_ASSERT(block);
     216
     217            GraphNodeWorklist<BytecodeBasicBlock*> worklist;
     218            startBytecodeIndex = BytecodeIndex();
     219            worklist.push(block);
     220
     221            while (BytecodeBasicBlock* block = worklist.pop()) {
     222                startBytecodeIndex = BytecodeIndex(std::min(startBytecodeIndex.offset(), block->leaderOffset()));
     223                for (unsigned successorIndex : block->successors())
     224                    worklist.push(&graph[successorIndex]);
     225
     226                // Also add catch blocks for bytecodes that throw.
     227                if (m_codeBlock->numberOfExceptionHandlers()) {
     228                    for (unsigned bytecodeOffset = block->leaderOffset(); bytecodeOffset < block->leaderOffset() + block->totalLength();) {
     229                        auto instruction = instructions.at(bytecodeOffset);
     230                        if (auto* handler = m_codeBlock->handlerForBytecodeIndex(BytecodeIndex(bytecodeOffset)))
     231                            worklist.push(graph.findBasicBlockWithLeaderOffset(handler->target));
     232
     233                        bytecodeOffset += instruction->size();
     234                    }
     235                }
     236            }
     237        }
     238    }
    236239
    237240    m_bytecodeCountHavingSlowCase = 0;
     
    276279        unsigned bytecodeOffset = m_bytecodeIndex.offset();
    277280        if (UNLIKELY(Options::traceBaselineJITExecution())) {
     281            CodeBlock* codeBlock = m_codeBlock;
    278282            probeDebug([=] (Probe::Context& ctx) {
    279                 CodeBlock* codeBlock = ctx.fp<CallFrame*>()->codeBlock();
    280283                dataLogLn("JIT [", bytecodeOffset, "] ", opcodeNames[opcodeID], " cfr ", RawPointer(ctx.fp()), " @ ", codeBlock);
    281284            });
    282285        }
    283 
    284         if (opcodeID != op_catch)
    285             assertStackPointerOffset();
    286286
    287287        switch (opcodeID) {
     
    528528        BytecodeIndex firstTo = m_bytecodeIndex;
    529529
    530         const Instruction* currentInstruction = m_unlinkedCodeBlock->instructions().at(m_bytecodeIndex).ptr();
     530        const Instruction* currentInstruction = m_codeBlock->instructions().at(m_bytecodeIndex).ptr();
    531531       
    532532        if (JITInternal::verbose)
     
    546546        if (UNLIKELY(Options::traceBaselineJITExecution())) {
    547547            unsigned bytecodeOffset = m_bytecodeIndex.offset();
     548            CodeBlock* codeBlock = m_codeBlock;
    548549            probeDebug([=] (Probe::Context& ctx) {
    549                 CodeBlock* codeBlock = ctx.fp<CallFrame*>()->codeBlock();
    550550                dataLogLn("JIT [", bytecodeOffset, "] SLOW ", opcodeNames[opcodeID], " cfr ", RawPointer(ctx.fp()), " @ ", codeBlock);
    551551            });
     
    675675}
    676676
    677 void JIT::emitMaterializeMetadataAndConstantPoolRegisters()
    678 {
    679     loadPtr(addressFor(CallFrameSlot::codeBlock), regT0);
    680     loadPtr(Address(regT0, CodeBlock::offsetOfMetadataTable()), s_metadataGPR);
    681     loadPtr(Address(regT0, CodeBlock::offsetOfJITData()), regT0);
    682     loadPtr(Address(regT0, CodeBlock::JITData::offsetOfJITConstantPool()), s_constantsGPR);
    683 }
    684 
    685 void JIT::emitRestoreCalleeSaves()
    686 {
    687     Base::emitRestoreCalleeSavesFor(&RegisterAtOffsetList::llintBaselineCalleeSaveRegisters());
    688 }
    689 
    690677void JIT::compileAndLinkWithoutFinalizing(JITCompilationEffort effort)
    691678{
    692     DFG::CapabilityLevel level = m_profiledCodeBlock->capabilityLevel();
     679    DFG::CapabilityLevel level = m_codeBlock->capabilityLevel();
    693680    switch (level) {
    694681    case DFG::CannotCompile:
    695682        m_canBeOptimized = false;
     683        m_canBeOptimizedOrInlined = false;
    696684        m_shouldEmitProfiling = false;
    697685        break;
     
    699687    case DFG::CanCompileAndInline:
    700688        m_canBeOptimized = true;
     689        m_canBeOptimizedOrInlined = true;
    701690        m_shouldEmitProfiling = true;
    702691        break;
     
    705694        break;
    706695    }
    707 
    708     if (m_unlinkedCodeBlock->numberOfUnlinkedSwitchJumpTables() || m_unlinkedCodeBlock->numberOfUnlinkedStringSwitchJumpTables()) {
    709         if (m_unlinkedCodeBlock->numberOfUnlinkedSwitchJumpTables())
    710             m_switchJumpTables = FixedVector<SimpleJumpTable>(m_unlinkedCodeBlock->numberOfUnlinkedSwitchJumpTables());
    711         if (m_unlinkedCodeBlock->numberOfUnlinkedStringSwitchJumpTables())
    712             m_stringSwitchJumpTables = FixedVector<StringJumpTable>(m_unlinkedCodeBlock->numberOfUnlinkedStringSwitchJumpTables());
    713     }
    714 
    715     if (UNLIKELY(Options::dumpDisassembly() || (m_vm->m_perBytecodeProfiler && Options::disassembleBaselineForProfiler()))) {
    716         // FIXME: build a disassembler off of UnlinkedCodeBlock.
    717         m_disassembler = makeUnique<JITDisassembler>(m_profiledCodeBlock);
    718     }
     696   
     697    switch (m_codeBlock->codeType()) {
     698    case GlobalCode:
     699    case ModuleCode:
     700    case EvalCode:
     701        m_codeBlock->m_shouldAlwaysBeInlined = false;
     702        break;
     703    case FunctionCode:
     704        // We could have already set it to false because we detected an uninlineable call.
     705        // Don't override that observation.
     706        m_codeBlock->m_shouldAlwaysBeInlined &= canInline(level) && DFG::mightInlineFunction(m_codeBlock);
     707        break;
     708    }
     709
     710    if (m_codeBlock->numberOfUnlinkedSwitchJumpTables() || m_codeBlock->numberOfUnlinkedStringSwitchJumpTables()) {
     711        ConcurrentJSLocker locker(m_codeBlock->m_lock);
     712        if (m_codeBlock->numberOfUnlinkedSwitchJumpTables())
     713            m_codeBlock->ensureJITData(locker).m_switchJumpTables = FixedVector<SimpleJumpTable>(m_codeBlock->numberOfUnlinkedSwitchJumpTables());
     714        if (m_codeBlock->numberOfUnlinkedStringSwitchJumpTables())
     715            m_codeBlock->ensureJITData(locker).m_stringSwitchJumpTables = FixedVector<StringJumpTable>(m_codeBlock->numberOfUnlinkedStringSwitchJumpTables());
     716    }
     717
     718    if (UNLIKELY(Options::dumpDisassembly() || (m_vm->m_perBytecodeProfiler && Options::disassembleBaselineForProfiler())))
     719        m_disassembler = makeUnique<JITDisassembler>(m_codeBlock);
    719720    if (UNLIKELY(m_vm->m_perBytecodeProfiler)) {
    720         // FIXME: build profiler disassembler off UnlinkedCodeBlock.
    721721        m_compilation = adoptRef(
    722722            new Profiler::Compilation(
    723                 m_vm->m_perBytecodeProfiler->ensureBytecodesFor(m_profiledCodeBlock),
     723                m_vm->m_perBytecodeProfiler->ensureBytecodesFor(m_codeBlock),
    724724                Profiler::Baseline));
    725         m_compilation->addProfiledBytecodes(*m_vm->m_perBytecodeProfiler, m_profiledCodeBlock);
     725        m_compilation->addProfiledBytecodes(*m_vm->m_perBytecodeProfiler, m_codeBlock);
    726726    }
    727727   
     
    743743
    744744    emitFunctionPrologue();
    745     if (m_unlinkedCodeBlock->codeType() == FunctionCode)
    746         emitPutCodeBlockToFrameInPrologue();
     745    emitPutToCallFrameHeader(m_codeBlock, CallFrameSlot::codeBlock);
    747746
    748747    Label beginLabel(this);
    749748
    750     int frameTopOffset = stackPointerOffsetFor(m_unlinkedCodeBlock) * sizeof(Register);
     749    int frameTopOffset = stackPointerOffsetFor(m_codeBlock) * sizeof(Register);
    751750    unsigned maxFrameSize = -frameTopOffset;
    752751    addPtr(TrustedImm32(frameTopOffset), callFrameRegister, regT1);
     
    759758    checkStackPointerAlignment();
    760759
    761     emitSaveCalleeSavesFor(&RegisterAtOffsetList::llintBaselineCalleeSaveRegisters());
     760    emitSaveCalleeSaves();
    762761    emitMaterializeTagCheckRegisters();
    763     emitMaterializeMetadataAndConstantPoolRegisters();
    764 
    765     if (m_unlinkedCodeBlock->codeType() == FunctionCode) {
     762
     763    if (m_codeBlock->codeType() == FunctionCode) {
    766764        ASSERT(!m_bytecodeIndex);
    767         if (shouldEmitProfiling() && (!m_unlinkedCodeBlock->isConstructor() || m_unlinkedCodeBlock->numParameters() > 1)) {
    768             emitGetFromCallFrameHeaderPtr(CallFrameSlot::codeBlock, regT2);
    769             loadPtr(Address(regT2, CodeBlock::offsetOfArgumentValueProfiles() + FixedVector<ValueProfile>::offsetOfStorage()), regT2);
    770 
    771             for (unsigned argument = 0; argument < m_unlinkedCodeBlock->numParameters(); ++argument) {
     765        if (shouldEmitProfiling()) {
     766            for (unsigned argument = 0; argument < m_codeBlock->numParameters(); ++argument) {
    772767                // If this is a constructor, then we want to put in a dummy profiling site (to
    773768                // keep things consistent) but we don't actually want to record the dummy value.
    774                 if (m_unlinkedCodeBlock->isConstructor() && !argument)
     769                if (m_codeBlock->isConstructor() && !argument)
    775770                    continue;
    776771                int offset = CallFrame::argumentOffsetIncludingThis(argument) * static_cast<int>(sizeof(Register));
     
    783778                load32(Address(callFrameRegister, offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultRegs.tagGPR());
    784779#endif
    785                 storeValue(resultRegs, Address(regT2, argument * sizeof(ValueProfile) + ValueProfile::offsetOfFirstBucket()));
     780                emitValueProfilingSite(m_codeBlock->valueProfileForArgument(argument), resultRegs);
    786781            }
    787782        }
    788783    }
    789784   
    790     RELEASE_ASSERT(!JITCode::isJIT(m_profiledCodeBlock->jitType()));
     785    RELEASE_ASSERT(!JITCode::isJIT(m_codeBlock->jitType()));
    791786
    792787    if (UNLIKELY(sizeMarker))
     
    805800    if (maxFrameExtentForSlowPathCall)
    806801        addPtr(TrustedImm32(-static_cast<int32_t>(maxFrameExtentForSlowPathCall)), stackPointerRegister);
    807     emitGetFromCallFrameHeaderPtr(CallFrameSlot::codeBlock, regT0);
    808     callOperationWithCallFrameRollbackOnException(operationThrowStackOverflowError, regT0);
     802    callOperationWithCallFrameRollbackOnException(operationThrowStackOverflowError, m_codeBlock);
    809803
    810804    // If the number of parameters is 1, we never require arity fixup.
    811     bool requiresArityFixup = m_unlinkedCodeBlock->numParameters() != 1;
    812     if (m_unlinkedCodeBlock->codeType() == FunctionCode && requiresArityFixup) {
     805    bool requiresArityFixup = m_codeBlock->m_numParameters != 1;
     806    if (m_codeBlock->codeType() == FunctionCode && requiresArityFixup) {
    813807        m_arityCheck = label();
    814 
     808        store8(TrustedImm32(0), &m_codeBlock->m_shouldAlwaysBeInlined);
    815809        emitFunctionPrologue();
    816         emitPutCodeBlockToFrameInPrologue(regT0);
    817         store8(TrustedImm32(0), Address(regT0, CodeBlock::offsetOfShouldAlwaysBeInlined()));
     810        emitPutToCallFrameHeader(m_codeBlock, CallFrameSlot::codeBlock);
    818811
    819812        load32(payloadFor(CallFrameSlot::argumentCountIncludingThis), regT1);
    820         branch32(AboveOrEqual, regT1, TrustedImm32(m_unlinkedCodeBlock->numParameters())).linkTo(beginLabel, this);
     813        branch32(AboveOrEqual, regT1, TrustedImm32(m_codeBlock->m_numParameters)).linkTo(beginLabel, this);
    821814
    822815        m_bytecodeIndex = BytecodeIndex(0);
     
    824817        if (maxFrameExtentForSlowPathCall)
    825818            addPtr(TrustedImm32(-static_cast<int32_t>(maxFrameExtentForSlowPathCall)), stackPointerRegister);
    826         loadPtr(Address(regT0, CodeBlock::offsetOfGlobalObject()), argumentGPR0);
    827         callOperationWithCallFrameRollbackOnException(m_unlinkedCodeBlock->isConstructor() ? operationConstructArityCheck : operationCallArityCheck, argumentGPR0);
     819        callOperationWithCallFrameRollbackOnException(m_codeBlock->isConstructor() ? operationConstructArityCheck : operationCallArityCheck, m_codeBlock->globalObject());
    828820        if (maxFrameExtentForSlowPathCall)
    829821            addPtr(TrustedImm32(maxFrameExtentForSlowPathCall), stackPointerRegister);
     
    848840    m_pcToCodeOriginMapBuilder.appendItem(label(), PCToCodeOriginMapBuilder::defaultCodeOrigin());
    849841
    850     m_linkBuffer = std::unique_ptr<LinkBuffer>(new LinkBuffer(*this, m_unlinkedCodeBlock, LinkBuffer::Profile::BaselineJIT, effort));
     842    m_linkBuffer = std::unique_ptr<LinkBuffer>(new LinkBuffer(*this, m_codeBlock, LinkBuffer::Profile::BaselineJIT, effort));
    851843    link();
    852844}
     
    867859        case SwitchRecord::Immediate:
    868860        case SwitchRecord::Character: {
    869             const UnlinkedSimpleJumpTable& unlinkedTable = m_unlinkedCodeBlock->unlinkedSwitchJumpTable(tableIndex);
    870             SimpleJumpTable& linkedTable = m_switchJumpTables[tableIndex];
     861            const UnlinkedSimpleJumpTable& unlinkedTable = m_codeBlock->unlinkedSwitchJumpTable(tableIndex);
     862            SimpleJumpTable& linkedTable = m_codeBlock->switchJumpTable(tableIndex);
    871863            linkedTable.m_ctiDefault = patchBuffer.locationOf<JSSwitchPtrTag>(m_labels[bytecodeOffset + record.defaultOffset]);
    872864            for (unsigned j = 0; j < unlinkedTable.m_branchOffsets.size(); ++j) {
     
    880872
    881873        case SwitchRecord::String: {
    882             const UnlinkedStringJumpTable& unlinkedTable = m_unlinkedCodeBlock->unlinkedStringSwitchJumpTable(tableIndex);
    883             StringJumpTable& linkedTable = m_stringSwitchJumpTables[tableIndex];
     874            const UnlinkedStringJumpTable& unlinkedTable = m_codeBlock->unlinkedStringSwitchJumpTable(tableIndex);
     875            StringJumpTable& linkedTable = m_codeBlock->stringSwitchJumpTable(tableIndex);
    884876            auto ctiDefault = patchBuffer.locationOf<JSSwitchPtrTag>(m_labels[bytecodeOffset + record.defaultOffset]);
    885877            for (auto& location : unlinkedTable.m_offsetTable.values()) {
     
    915907    }
    916908
    917 #if USE(JSVALUE64)
    918     auto finalizeICs = [&] (auto& generators) {
    919         for (auto& gen : generators) {
    920             gen.m_unlinkedStubInfo->start = patchBuffer.locationOf<JITStubRoutinePtrTag>(gen.m_start);
    921             gen.m_unlinkedStubInfo->doneLocation = patchBuffer.locationOf<JSInternalPtrTag>(gen.m_done);
    922             gen.m_unlinkedStubInfo->slowPathStartLocation = patchBuffer.locationOf<JITStubRoutinePtrTag>(gen.m_slowPathBegin);
    923         }
    924     };
    925 
    926     finalizeICs(m_getByIds);
    927     finalizeICs(m_getByVals);
    928     finalizeICs(m_getByIdsWithThis);
    929     finalizeICs(m_putByIds);
    930     finalizeICs(m_putByVals);
    931     finalizeICs(m_delByIds);
    932     finalizeICs(m_delByVals);
    933     finalizeICs(m_inByIds);
    934     finalizeICs(m_inByVals);
    935     finalizeICs(m_instanceOfs);
    936     finalizeICs(m_privateBrandAccesses);
    937 #else
    938909    finalizeInlineCaches(m_getByIds, patchBuffer);
    939910    finalizeInlineCaches(m_getByVals, patchBuffer);
     
    947918    finalizeInlineCaches(m_instanceOfs, patchBuffer);
    948919    finalizeInlineCaches(m_privateBrandAccesses, patchBuffer);
    949 #endif
    950920
    951921    for (auto& compilationInfo : m_callCompilationInfo) {
    952 #if USE(JSVALUE64)
    953         UnlinkedCallLinkInfo& info = *compilationInfo.unlinkedCallLinkInfo;
    954         info.doneLocation = patchBuffer.locationOf<JSInternalPtrTag>(compilationInfo.doneLocation);
    955 #else
    956922        CallLinkInfo& info = *compilationInfo.callLinkInfo;
    957923        info.setCodeLocations(
    958924            patchBuffer.locationOf<JSInternalPtrTag>(compilationInfo.slowPathStart),
    959925            patchBuffer.locationOf<JSInternalPtrTag>(compilationInfo.doneLocation));
    960 #endif
    961 
    962     }
    963 
    964     JITCodeMapBuilder jitCodeMapBuilder;
    965     for (unsigned bytecodeOffset = 0; bytecodeOffset < m_labels.size(); ++bytecodeOffset) {
    966         if (m_labels[bytecodeOffset].isSet())
    967             jitCodeMapBuilder.append(BytecodeIndex(bytecodeOffset), patchBuffer.locationOf<JSEntryPtrTag>(m_labels[bytecodeOffset]));
     926    }
     927
     928    {
     929        JITCodeMapBuilder jitCodeMapBuilder;
     930        for (unsigned bytecodeOffset = 0; bytecodeOffset < m_labels.size(); ++bytecodeOffset) {
     931            if (m_labels[bytecodeOffset].isSet())
     932                jitCodeMapBuilder.append(BytecodeIndex(bytecodeOffset), patchBuffer.locationOf<JSEntryPtrTag>(m_labels[bytecodeOffset]));
     933        }
     934        m_codeBlock->setJITCodeMap(jitCodeMapBuilder.finalize());
    968935    }
    969936
     
    974941
    975942    if (UNLIKELY(m_compilation)) {
    976         // FIXME: should we make the bytecode profiler know about UnlinkedCodeBlock?
    977943        if (Options::disassembleBaselineForProfiler())
    978944            m_disassembler->reportToProfiler(m_compilation.get(), patchBuffer);
    979         m_vm->m_perBytecodeProfiler->addCompilation(m_profiledCodeBlock, *m_compilation);
     945        m_vm->m_perBytecodeProfiler->addCompilation(m_codeBlock, *m_compilation);
    980946    }
    981947
     
    983949        m_pcToCodeOriginMap = makeUnique<PCToCodeOriginMap>(WTFMove(m_pcToCodeOriginMapBuilder), patchBuffer);
    984950   
    985     // FIXME: Make a version of CodeBlockWithJITType that knows about UnlinkedCodeBlock.
    986951    CodeRef<JSEntryPtrTag> result = FINALIZE_CODE(
    987952        patchBuffer, JSEntryPtrTag,
    988         "Baseline JIT code for %s", toCString(CodeBlockWithJITType(m_profiledCodeBlock, JITType::BaselineJIT)).data());
     953        "Baseline JIT code for %s", toCString(CodeBlockWithJITType(m_codeBlock, JITType::BaselineJIT)).data());
    989954   
    990955    MacroAssemblerCodePtr<JSEntryPtrTag> withArityCheck = patchBuffer.locationOf<JSEntryPtrTag>(m_arityCheck);
    991     m_jitCode = adoptRef(*new BaselineJITCode(result, withArityCheck));
    992 
    993     m_jitCode->m_unlinkedCalls = WTFMove(m_unlinkedCalls);
    994     m_jitCode->m_evalCallLinkInfos = WTFMove(m_evalCallLinkInfos);
    995     m_jitCode->m_unlinkedStubInfos = WTFMove(m_unlinkedStubInfos);
    996     m_jitCode->m_switchJumpTables = WTFMove(m_switchJumpTables);
    997     m_jitCode->m_stringSwitchJumpTables = WTFMove(m_stringSwitchJumpTables);
    998     m_jitCode->m_jitCodeMap = jitCodeMapBuilder.finalize();
    999     m_jitCode->adoptMathICs(m_mathICs);
    1000     m_jitCode->m_constantPool = WTFMove(m_constantPool);
    1001 #if USE(JSVALUE64)
    1002     m_jitCode->m_isShareable = m_isShareable;
    1003 #else
    1004     m_jitCode->m_isShareable = false;
    1005 #endif
     956    m_jitCode = adoptRef(*new DirectJITCode(result, withArityCheck, JITType::BaselineJIT));
    1006957
    1007958    if (JITInternal::verbose)
    1008         dataLogF("JIT generated code for %p at [%p, %p).\n", m_unlinkedCodeBlock, result.executableMemory()->start().untaggedPtr(), result.executableMemory()->end().untaggedPtr());
    1009 }
    1010 
    1011 CompilationResult JIT::finalizeOnMainThread(CodeBlock* codeBlock)
     959        dataLogF("JIT generated code for %p at [%p, %p).\n", m_codeBlock, result.executableMemory()->start().untaggedPtr(), result.executableMemory()->end().untaggedPtr());
     960}
     961
     962CompilationResult JIT::finalizeOnMainThread()
    1012963{
    1013964    RELEASE_ASSERT(!isCompilationThread());
     
    1018969    m_linkBuffer->runMainThreadFinalizationTasks();
    1019970
     971    {
     972        ConcurrentJSLocker locker(m_codeBlock->m_lock);
     973        m_codeBlock->shrinkToFit(locker, CodeBlock::ShrinkMode::LateShrink);
     974    }
     975
     976    for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) {
     977        HandlerInfo& handler = m_codeBlock->exceptionHandler(i);
     978        // FIXME: <rdar://problem/39433318>.
     979        handler.nativeCode = m_codeBlock->jitCodeMap().find(BytecodeIndex(handler.target)).retagged<ExceptionHandlerPtrTag>();
     980    }
     981
    1020982    if (m_pcToCodeOriginMap)
    1021         m_jitCode->m_pcToCodeOriginMap = WTFMove(m_pcToCodeOriginMap);
     983        m_codeBlock->setPCToCodeOriginMap(WTFMove(m_pcToCodeOriginMap));
    1022984
    1023985    m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT->add(
    1024986        static_cast<double>(m_jitCode->size()) /
    1025         static_cast<double>(m_unlinkedCodeBlock->instructionsSize()));
    1026 
    1027     codeBlock->setupWithUnlinkedBaselineCode(m_jitCode.releaseNonNull());
     987        static_cast<double>(m_codeBlock->instructionsSize()));
     988
     989    m_codeBlock->setJITCode(m_jitCode.releaseNonNull());
    1028990
    1029991    return CompilationSuccessful;
     
    1037999}
    10381000
    1039 CompilationResult JIT::privateCompile(CodeBlock* codeBlock, JITCompilationEffort effort)
     1001CompilationResult JIT::privateCompile(JITCompilationEffort effort)
    10401002{
    10411003    doMainThreadPreparationBeforeCompile();
    10421004    compileAndLinkWithoutFinalizing(effort);
    1043     return finalizeOnMainThread(codeBlock);
     1005    return finalizeOnMainThread();
    10441006}
    10451007
     
    10811043}
    10821044
    1083 unsigned JIT::frameRegisterCountFor(UnlinkedCodeBlock* codeBlock)
     1045unsigned JIT::frameRegisterCountFor(CodeBlock* codeBlock)
    10841046{
    10851047    ASSERT(static_cast<unsigned>(codeBlock->numCalleeLocals()) == WTF::roundUpToMultipleOf(stackAlignmentRegisters(), static_cast<unsigned>(codeBlock->numCalleeLocals())));
     
    10881050}
    10891051
    1090 unsigned JIT::frameRegisterCountFor(CodeBlock* codeBlock)
    1091 {
    1092     return frameRegisterCountFor(codeBlock->unlinkedCodeBlock());
    1093 }
    1094 
    1095 int JIT::stackPointerOffsetFor(UnlinkedCodeBlock* codeBlock)
     1052int JIT::stackPointerOffsetFor(CodeBlock* codeBlock)
    10961053{
    10971054    return virtualRegisterForLocal(frameRegisterCountFor(codeBlock) - 1).offset();
    1098 }
    1099 
    1100 int JIT::stackPointerOffsetFor(CodeBlock* codeBlock)
    1101 {
    1102     return stackPointerOffsetFor(codeBlock->unlinkedCodeBlock());
    11031055}
    11041056
Note: See TracChangeset for help on using the changeset viewer.