Ignore:
Timestamp:
Jan 8, 2018, 1:05:17 PM (8 years ago)
Author:
[email protected]
Message:

Apply poisoning to more pointers in JSC.
https://bugs.webkit.org/show_bug.cgi?id=181096
<rdar://problem/36182970>

Reviewed by JF Bastien.

Source/JavaScriptCore:

  • assembler/MacroAssembler.h:

(JSC::MacroAssembler::xorPtr):

  • assembler/MacroAssemblerARM64.h:

(JSC::MacroAssemblerARM64::xor64):

  • assembler/MacroAssemblerX86_64.h:

(JSC::MacroAssemblerX86_64::xor64):

  • Add xorPtr implementation.
  • bytecode/CodeBlock.cpp:

(JSC::CodeBlock::inferredName const):
(JSC::CodeBlock::CodeBlock):
(JSC::CodeBlock::finishCreation):
(JSC::CodeBlock::~CodeBlock):
(JSC::CodeBlock::setConstantRegisters):
(JSC::CodeBlock::visitWeakly):
(JSC::CodeBlock::visitChildren):
(JSC::CodeBlock::propagateTransitions):
(JSC::CodeBlock::WeakReferenceHarvester::visitWeakReferences):
(JSC::CodeBlock::finalizeLLIntInlineCaches):
(JSC::CodeBlock::finalizeBaselineJITInlineCaches):
(JSC::CodeBlock::UnconditionalFinalizer::finalizeUnconditionally):
(JSC::CodeBlock::jettison):
(JSC::CodeBlock::predictedMachineCodeSize):
(JSC::CodeBlock::findPC):

  • bytecode/CodeBlock.h:

(JSC::CodeBlock::UnconditionalFinalizer::UnconditionalFinalizer):
(JSC::CodeBlock::WeakReferenceHarvester::WeakReferenceHarvester):
(JSC::CodeBlock::stubInfoBegin):
(JSC::CodeBlock::stubInfoEnd):
(JSC::CodeBlock::callLinkInfosBegin):
(JSC::CodeBlock::callLinkInfosEnd):
(JSC::CodeBlock::instructions):
(JSC::CodeBlock::instructions const):
(JSC::CodeBlock::vm const):

  • dfg/DFGOSRExitCompilerCommon.h:

(JSC::DFG::adjustFrameAndStackInOSRExitCompilerThunk):

  • jit/JIT.h:
  • llint/LLIntOfflineAsmConfig.h:
  • llint/LowLevelInterpreter.asm:
  • llint/LowLevelInterpreter64.asm:
  • parser/UnlinkedSourceCode.h:
  • runtime/JSCPoison.h:
  • runtime/JSGlobalObject.cpp:

(JSC::JSGlobalObject::init):

  • runtime/JSGlobalObject.h:
  • runtime/JSScriptFetchParameters.h:
  • runtime/JSScriptFetcher.h:
  • runtime/StructureTransitionTable.h:
  • wasm/js/JSWebAssemblyCodeBlock.cpp:

(JSC::JSWebAssemblyCodeBlock::JSWebAssemblyCodeBlock):
(JSC::JSWebAssemblyCodeBlock::visitChildren):
(JSC::JSWebAssemblyCodeBlock::UnconditionalFinalizer::finalizeUnconditionally):

  • wasm/js/JSWebAssemblyCodeBlock.h:

Source/WTF:

Added support for PoisonedBag and PoisonedRefCountedArray.

  • wtf/Bag.h:

(WTF::Private::BagNode::BagNode):
(WTF::Bag::Bag):
(WTF::Bag::operator=):
(WTF::Bag::clear):
(WTF::Bag::add):
(WTF::Bag::begin):
(WTF::Bag::unwrappedHead):
(WTF::Bag::Node::Node): Deleted.

  • wtf/BagToHashMap.h:

(WTF::toHashMap):

  • wtf/Poisoned.h:

(WTF::constExprPoisonRandom):
(WTF::makeConstExprPoison):

  • wtf/RefCountedArray.h:

(WTF::RefCountedArray::RefCountedArray):
(WTF::RefCountedArray::clone const):
(WTF::RefCountedArray::operator=):
(WTF::RefCountedArray::~RefCountedArray):
(WTF::RefCountedArray::refCount const):
(WTF::RefCountedArray::size const):
(WTF::RefCountedArray::data):
(WTF::RefCountedArray::begin):
(WTF::RefCountedArray::end):
(WTF::RefCountedArray::data const):
(WTF::RefCountedArray::begin const):
(WTF::RefCountedArray::operator== const):
(WTF::RefCountedArray::Header::fromPayload):

  • wtf/WTFAssertions.cpp:
File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp

    r226436 r226530  
    11/*
    2  * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
     2 * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
    33 * Copyright (C) 2008 Cameron Zwarich <[email protected]>
    44 *
     
    308308    , m_isStrictMode(other.m_isStrictMode)
    309309    , m_codeType(other.m_codeType)
    310     , m_unlinkedCode(*other.m_vm, this, other.m_unlinkedCode.get())
     310    , m_unlinkedCode(*other.vm(), this, other.m_unlinkedCode.get())
    311311    , m_numberOfArgumentsToSkip(other.m_numberOfArgumentsToSkip)
    312312    , m_hasDebuggerStatement(false)
    313313    , m_steppingMode(SteppingModeDisabled)
    314314    , m_numBreakpoints(0)
    315     , m_ownerExecutable(*other.m_vm, this, other.m_ownerExecutable.get())
     315    , m_ownerExecutable(*other.vm(), this, other.m_ownerExecutable.get())
    316316    , m_vm(other.m_vm)
    317317    , m_instructions(other.m_instructions)
     
    330330    , m_reoptimizationRetryCounter(0)
    331331    , m_creationTime(MonotonicTime::now())
     332    , m_unconditionalFinalizer(makePoisonedUnique<UnconditionalFinalizer>(*this))
     333    , m_weakReferenceHarvester(makePoisonedUnique<WeakReferenceHarvester>(*this))
    332334{
    333335    m_visitWeaklyHasBeenCalled = false;
     
    388390    , m_reoptimizationRetryCounter(0)
    389391    , m_creationTime(MonotonicTime::now())
     392    , m_unconditionalFinalizer(makePoisonedUnique<UnconditionalFinalizer>(*this))
     393    , m_weakReferenceHarvester(makePoisonedUnique<WeakReferenceHarvester>(*this))
    390394{
    391395    m_visitWeaklyHasBeenCalled = false;
     
    423427
    424428    if (unlinkedCodeBlock->usesGlobalObject())
    425         m_constantRegisters[unlinkedCodeBlock->globalObjectRegister().toConstantIndex()].set(*m_vm, this, m_globalObject.get());
     429        m_constantRegisters[unlinkedCodeBlock->globalObjectRegister().toConstantIndex()].set(vm, this, m_globalObject.get());
    426430
    427431    for (unsigned i = 0; i < LinkTimeConstantCount; i++) {
    428432        LinkTimeConstant type = static_cast<LinkTimeConstant>(i);
    429433        if (unsigned registerIndex = unlinkedCodeBlock->registerIndexForLinkTimeConstant(type))
    430             m_constantRegisters[registerIndex].set(*m_vm, this, m_globalObject->jsCellForLinkTimeConstant(type));
     434            m_constantRegisters[registerIndex].set(vm, this, m_globalObject->jsCellForLinkTimeConstant(type));
    431435    }
    432436
     
    435439    if (UnlinkedModuleProgramCodeBlock* unlinkedModuleProgramCodeBlock = jsDynamicCast<UnlinkedModuleProgramCodeBlock*>(vm, unlinkedCodeBlock)) {
    436440        SymbolTable* clonedSymbolTable = jsCast<ModuleProgramExecutable*>(ownerExecutable)->moduleEnvironmentSymbolTable();
    437         if (m_vm->typeProfiler()) {
     441        if (vm.typeProfiler()) {
    438442            ConcurrentJSLocker locker(clonedSymbolTable->m_lock);
    439443            clonedSymbolTable->prepareForTypeProfiling(locker);
     
    448452        if (shouldUpdateFunctionHasExecutedCache)
    449453            vm.functionHasExecutedCache()->insertUnexecutedRange(ownerExecutable->sourceID(), unlinkedExecutable->typeProfilingStartOffset(), unlinkedExecutable->typeProfilingEndOffset());
    450         m_functionDecls[i].set(*m_vm, this, unlinkedExecutable->link(*m_vm, ownerExecutable->source()));
     454        m_functionDecls[i].set(vm, this, unlinkedExecutable->link(vm, ownerExecutable->source()));
    451455    }
    452456
     
    456460        if (shouldUpdateFunctionHasExecutedCache)
    457461            vm.functionHasExecutedCache()->insertUnexecutedRange(ownerExecutable->sourceID(), unlinkedExecutable->typeProfilingStartOffset(), unlinkedExecutable->typeProfilingEndOffset());
    458         m_functionExprs[i].set(*m_vm, this, unlinkedExecutable->link(*m_vm, ownerExecutable->source()));
     462        m_functionExprs[i].set(vm, this, unlinkedExecutable->link(vm, ownerExecutable->source()));
    459463    }
    460464
     
    854858CodeBlock::~CodeBlock()
    855859{
    856     if (UNLIKELY(m_vm->m_perBytecodeProfiler))
    857         m_vm->m_perBytecodeProfiler->notifyDestruction(this);
     860    VM& vm = *m_vm;
     861    if (UNLIKELY(vm.m_perBytecodeProfiler))
     862        vm.m_perBytecodeProfiler->notifyDestruction(this);
    858863
    859864    if (unlinkedCodeBlock()->didOptimize() == MixedTriState)
     
    877882
    878883#if ENABLE(JIT)
    879     for (Bag<StructureStubInfo>::iterator iter = m_stubInfos.begin(); !!iter; ++iter) {
     884    for (auto iter = m_stubInfos.begin(); !!iter; ++iter) {
    880885        StructureStubInfo* stub = *iter;
    881886        stub->aboutToDie();
     
    910915void CodeBlock::setConstantRegisters(const Vector<WriteBarrier<Unknown>>& constants, const Vector<SourceCodeRepresentation>& constantsSourceCodeRepresentation)
    911916{
    912     auto scope = DECLARE_THROW_SCOPE(*m_vm);
     917    VM& vm = *m_vm;
     918    auto scope = DECLARE_THROW_SCOPE(vm);
    913919    JSGlobalObject* globalObject = m_globalObject.get();
    914920    ExecState* exec = globalObject->globalExec();
     
    917923    size_t count = constants.size();
    918924    m_constantRegisters.resizeToFit(count);
    919     bool hasTypeProfiler = !!m_vm->typeProfiler();
     925    bool hasTypeProfiler = !!vm.typeProfiler();
    920926    for (size_t i = 0; i < count; i++) {
    921927        JSValue constant = constants[i].get();
    922928
    923929        if (!constant.isEmpty()) {
    924             if (SymbolTable* symbolTable = jsDynamicCast<SymbolTable*>(*m_vm, constant)) {
     930            if (SymbolTable* symbolTable = jsDynamicCast<SymbolTable*>(vm, constant)) {
    925931                if (hasTypeProfiler) {
    926932                    ConcurrentJSLocker locker(symbolTable->m_lock);
     
    928934                }
    929935
    930                 SymbolTable* clone = symbolTable->cloneScopePart(*m_vm);
     936                SymbolTable* clone = symbolTable->cloneScopePart(vm);
    931937                if (wasCompiledWithDebuggingOpcodes())
    932938                    clone->setRareDataCodeBlock(this);
    933939
    934940                constant = clone;
    935             } else if (isTemplateRegistryKey(*m_vm, constant)) {
     941            } else if (isTemplateRegistryKey(vm, constant)) {
    936942                auto* templateObject = globalObject->templateRegistry().getTemplateObject(exec, jsCast<JSTemplateRegistryKey*>(constant));
    937943                RETURN_IF_EXCEPTION(scope, void());
     
    940946        }
    941947
    942         m_constantRegisters[i].set(*m_vm, this, constant);
     948        m_constantRegisters[i].set(vm, this, constant);
    943949    }
    944950
     
    990996    // is probably quite close to 1. So we add one no matter what and when it runs, it
    991997    // figures out whether it has any work to do.
    992     visitor.addUnconditionalFinalizer(&m_unconditionalFinalizer);
     998    visitor.addUnconditionalFinalizer(m_unconditionalFinalizer.get());
    993999
    9941000    if (!JITCode::isOptimizingJIT(jitType()))
     
    10021008    // jettisoning, and trying to find structures that would be live based on some
    10031009    // inline cache. So it makes sense to register them regardless.
    1004     visitor.addWeakReferenceHarvester(&m_weakReferenceHarvester);
     1010    visitor.addWeakReferenceHarvester(m_weakReferenceHarvester.get());
    10051011
    10061012#if ENABLE(DFG_JIT)
     
    10461052    // is probably quite close to 1. So we add one no matter what and when it runs, it
    10471053    // figures out whether it has any work to do.
    1048     visitor.addUnconditionalFinalizer(&m_unconditionalFinalizer);
     1054    visitor.addUnconditionalFinalizer(m_unconditionalFinalizer.get());
    10491055
    10501056    if (CodeBlock* otherBlock = specialOSREntryBlockOrNull())
     
    11621168        return;
    11631169
     1170    VM& vm = *m_vm;
    11641171    bool allAreMarkedSoFar = true;
    11651172       
     
    11751182                    break;
    11761183                Structure* oldStructure =
    1177                     m_vm->heap.structureIDTable().get(oldStructureID);
     1184                    vm.heap.structureIDTable().get(oldStructureID);
    11781185                Structure* newStructure =
    1179                     m_vm->heap.structureIDTable().get(newStructureID);
     1186                    vm.heap.structureIDTable().get(newStructureID);
    11801187                if (Heap::isMarked(oldStructure))
    11811188                    visitor.appendUnbarriered(newStructure);
     
    11921199#if ENABLE(JIT)
    11931200    if (JITCode::isJIT(jitType())) {
    1194         for (Bag<StructureStubInfo>::iterator iter = m_stubInfos.begin(); !!iter; ++iter)
     1201        for (auto iter = m_stubInfos.begin(); !!iter; ++iter)
    11951202            allAreMarkedSoFar &= (*iter)->propagateTransitions(visitor);
    11961203    }
     
    12801287void CodeBlock::WeakReferenceHarvester::visitWeakReferences(SlotVisitor& visitor)
    12811288{
    1282     CodeBlock* codeBlock =
    1283         bitwise_cast<CodeBlock*>(
    1284             bitwise_cast<char*>(this) - OBJECT_OFFSETOF(CodeBlock, m_weakReferenceHarvester));
    1285    
    1286     codeBlock->propagateTransitions(NoLockingNecessary, visitor);
    1287     codeBlock->determineLiveness(NoLockingNecessary, visitor);
     1289    codeBlock.propagateTransitions(NoLockingNecessary, visitor);
     1290    codeBlock.determineLiveness(NoLockingNecessary, visitor);
    12881291}
    12891292
     
    12981301void CodeBlock::finalizeLLIntInlineCaches()
    12991302{
     1303    VM& vm = *m_vm;
    13001304    const Vector<unsigned>& propertyAccessInstructions = m_unlinkedCode->propertyAccessInstructions();
    13011305    for (size_t size = propertyAccessInstructions.size(), i = 0; i < size; ++i) {
     
    13061310        case op_get_by_id_unset: {
    13071311            StructureID oldStructureID = curInstruction[4].u.structureID;
    1308             if (!oldStructureID || Heap::isMarked(m_vm->heap.structureIDTable().get(oldStructureID)))
     1312            if (!oldStructureID || Heap::isMarked(vm.heap.structureIDTable().get(oldStructureID)))
    13091313                break;
    13101314            if (Options::verboseOSR())
     
    13171321            StructureID newStructureID = curInstruction[6].u.structureID;
    13181322            StructureChain* chain = curInstruction[7].u.structureChain.get();
    1319             if ((!oldStructureID || Heap::isMarked(m_vm->heap.structureIDTable().get(oldStructureID))) &&
    1320                 (!newStructureID || Heap::isMarked(m_vm->heap.structureIDTable().get(newStructureID))) &&
    1321                 (!chain || Heap::isMarked(chain)))
     1323            if ((!oldStructureID || Heap::isMarked(vm.heap.structureIDTable().get(oldStructureID)))
     1324                && (!newStructureID || Heap::isMarked(vm.heap.structureIDTable().get(newStructureID)))
     1325                && (!chain || Heap::isMarked(chain)))
    13221326                break;
    13231327            if (Options::verboseOSR())
     
    14111415        (*iter)->visitWeak(*vm());
    14121416
    1413     for (Bag<StructureStubInfo>::iterator iter = m_stubInfos.begin(); !!iter; ++iter) {
     1417    for (auto iter = m_stubInfos.begin(); !!iter; ++iter) {
    14141418        StructureStubInfo& stubInfo = **iter;
    14151419        stubInfo.visitWeakReferences(this);
     
    14201424void CodeBlock::UnconditionalFinalizer::finalizeUnconditionally()
    14211425{
    1422     CodeBlock* codeBlock = bitwise_cast<CodeBlock*>(
    1423         bitwise_cast<char*>(this) - OBJECT_OFFSETOF(CodeBlock, m_unconditionalFinalizer));
    1424    
    1425     codeBlock->updateAllPredictions();
    1426    
    1427     if (!Heap::isMarked(codeBlock)) {
    1428         if (codeBlock->shouldJettisonDueToWeakReference())
    1429             codeBlock->jettison(Profiler::JettisonDueToWeakReference);
     1426    codeBlock.updateAllPredictions();
     1427   
     1428    if (!Heap::isMarked(&codeBlock)) {
     1429        if (codeBlock.shouldJettisonDueToWeakReference())
     1430            codeBlock.jettison(Profiler::JettisonDueToWeakReference);
    14301431        else
    1431             codeBlock->jettison(Profiler::JettisonDueToOldAge);
     1432            codeBlock.jettison(Profiler::JettisonDueToOldAge);
    14321433        return;
    14331434    }
    14341435
    1435     if (JITCode::couldBeInterpreted(codeBlock->jitType()))
    1436         codeBlock->finalizeLLIntInlineCaches();
     1436    if (JITCode::couldBeInterpreted(codeBlock.jitType()))
     1437        codeBlock.finalizeLLIntInlineCaches();
    14371438
    14381439#if ENABLE(JIT)
    1439     if (!!codeBlock->jitCode())
    1440         codeBlock->finalizeBaselineJITInlineCaches();
     1440    if (!!codeBlock.jitCode())
     1441        codeBlock.finalizeBaselineJITInlineCaches();
    14411442#endif
    14421443}
     
    19601961#endif // ENABLE(DFG_JIT)
    19611962
     1963    VM& vm = *m_vm;
    19621964    DeferGCForAWhile deferGC(*heap());
    19631965   
     
    19761978        if (!jitCode()->dfgCommon()->invalidate()) {
    19771979            // We've already been invalidated.
    1978             RELEASE_ASSERT(this != replacement() || (m_vm->heap.isCurrentThreadBusy() && !Heap::isMarked(ownerScriptExecutable())));
     1980            RELEASE_ASSERT(this != replacement() || (vm.heap.isCurrentThreadBusy() && !Heap::isMarked(ownerScriptExecutable())));
    19791981            return;
    19801982        }
     
    20082010    // Jettison can happen during GC. We don't want to install code to a dead executable
    20092011    // because that would add a dead object to the remembered set.
    2010     if (m_vm->heap.isCurrentThreadBusy() && !Heap::isMarked(ownerScriptExecutable()))
     2012    if (vm.heap.isCurrentThreadBusy() && !Heap::isMarked(ownerScriptExecutable()))
    20112013        return;
    20122014
    20132015    // This accomplishes (2).
    2014     ownerScriptExecutable()->installCode(
    2015         *m_vm, alternative(), codeType(), specializationKind());
     2016    ownerScriptExecutable()->installCode(vm, alternative(), codeType(), specializationKind());
    20162017
    20172018#if ENABLE(DFG_JIT)
     
    27682769size_t CodeBlock::predictedMachineCodeSize()
    27692770{
     2771    VM* vm = m_vm.unpoisoned();
    27702772    // This will be called from CodeBlock::CodeBlock before either m_vm or the
    27712773    // instructions have been initialized. It's OK to return 0 because what will really
    27722774    // matter is the recomputation of this value when the slow path is triggered.
    2773     if (!m_vm)
     2775    if (!vm)
    27742776        return 0;
    27752777   
    2776     if (!*m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT)
     2778    if (!*vm->machineCodeBytesPerBytecodeWordForBaselineJIT)
    27772779        return 0; // It's as good of a prediction as we'll get.
    27782780   
    27792781    // Be conservative: return a size that will be an overestimation 84% of the time.
    2780     double multiplier = m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT->mean() +
    2781         m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT->standardDeviation();
     2782    double multiplier = vm->machineCodeBytesPerBytecodeWordForBaselineJIT->mean() +
     2783        vm->machineCodeBytesPerBytecodeWordForBaselineJIT->standardDeviation();
    27822784   
    27832785    // Be paranoid: silently reject bogus multipiers. Silently doing the "wrong" thing
     
    30673069    }
    30683070
    3069     for (Bag<StructureStubInfo>::iterator iter = m_stubInfos.begin(); !!iter; ++iter) {
     3071    for (auto iter = m_stubInfos.begin(); !!iter; ++iter) {
    30703072        StructureStubInfo* stub = *iter;
    30713073        if (stub->containsPC(pc))
Note: See TracChangeset for help on using the changeset viewer.