Changeset 226530 in webkit for trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp
- Timestamp:
- Jan 8, 2018, 1:05:17 PM (8 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/bytecode/CodeBlock.cpp
r226436 r226530 1 1 /* 2 * Copyright (C) 2008-201 7Apple Inc. All rights reserved.2 * Copyright (C) 2008-2018 Apple Inc. All rights reserved. 3 3 * Copyright (C) 2008 Cameron Zwarich <[email protected]> 4 4 * … … 308 308 , m_isStrictMode(other.m_isStrictMode) 309 309 , m_codeType(other.m_codeType) 310 , m_unlinkedCode(*other. m_vm, this, other.m_unlinkedCode.get())310 , m_unlinkedCode(*other.vm(), this, other.m_unlinkedCode.get()) 311 311 , m_numberOfArgumentsToSkip(other.m_numberOfArgumentsToSkip) 312 312 , m_hasDebuggerStatement(false) 313 313 , m_steppingMode(SteppingModeDisabled) 314 314 , m_numBreakpoints(0) 315 , m_ownerExecutable(*other. m_vm, this, other.m_ownerExecutable.get())315 , m_ownerExecutable(*other.vm(), this, other.m_ownerExecutable.get()) 316 316 , m_vm(other.m_vm) 317 317 , m_instructions(other.m_instructions) … … 330 330 , m_reoptimizationRetryCounter(0) 331 331 , m_creationTime(MonotonicTime::now()) 332 , m_unconditionalFinalizer(makePoisonedUnique<UnconditionalFinalizer>(*this)) 333 , m_weakReferenceHarvester(makePoisonedUnique<WeakReferenceHarvester>(*this)) 332 334 { 333 335 m_visitWeaklyHasBeenCalled = false; … … 388 390 , m_reoptimizationRetryCounter(0) 389 391 , m_creationTime(MonotonicTime::now()) 392 , m_unconditionalFinalizer(makePoisonedUnique<UnconditionalFinalizer>(*this)) 393 , m_weakReferenceHarvester(makePoisonedUnique<WeakReferenceHarvester>(*this)) 390 394 { 391 395 m_visitWeaklyHasBeenCalled = false; … … 423 427 424 428 if (unlinkedCodeBlock->usesGlobalObject()) 425 m_constantRegisters[unlinkedCodeBlock->globalObjectRegister().toConstantIndex()].set( *m_vm, this, m_globalObject.get());429 m_constantRegisters[unlinkedCodeBlock->globalObjectRegister().toConstantIndex()].set(vm, this, m_globalObject.get()); 426 430 427 431 for (unsigned i = 0; i < LinkTimeConstantCount; i++) { 428 432 LinkTimeConstant type = static_cast<LinkTimeConstant>(i); 429 433 if (unsigned registerIndex = unlinkedCodeBlock->registerIndexForLinkTimeConstant(type)) 430 m_constantRegisters[registerIndex].set( *m_vm, this, m_globalObject->jsCellForLinkTimeConstant(type));434 m_constantRegisters[registerIndex].set(vm, this, m_globalObject->jsCellForLinkTimeConstant(type)); 431 435 } 432 436 … … 435 439 if (UnlinkedModuleProgramCodeBlock* unlinkedModuleProgramCodeBlock = jsDynamicCast<UnlinkedModuleProgramCodeBlock*>(vm, unlinkedCodeBlock)) { 436 440 SymbolTable* clonedSymbolTable = jsCast<ModuleProgramExecutable*>(ownerExecutable)->moduleEnvironmentSymbolTable(); 437 if ( m_vm->typeProfiler()) {441 if (vm.typeProfiler()) { 438 442 ConcurrentJSLocker locker(clonedSymbolTable->m_lock); 439 443 clonedSymbolTable->prepareForTypeProfiling(locker); … … 448 452 if (shouldUpdateFunctionHasExecutedCache) 449 453 vm.functionHasExecutedCache()->insertUnexecutedRange(ownerExecutable->sourceID(), unlinkedExecutable->typeProfilingStartOffset(), unlinkedExecutable->typeProfilingEndOffset()); 450 m_functionDecls[i].set( *m_vm, this, unlinkedExecutable->link(*m_vm, ownerExecutable->source()));454 m_functionDecls[i].set(vm, this, unlinkedExecutable->link(vm, ownerExecutable->source())); 451 455 } 452 456 … … 456 460 if (shouldUpdateFunctionHasExecutedCache) 457 461 vm.functionHasExecutedCache()->insertUnexecutedRange(ownerExecutable->sourceID(), unlinkedExecutable->typeProfilingStartOffset(), unlinkedExecutable->typeProfilingEndOffset()); 458 m_functionExprs[i].set( *m_vm, this, unlinkedExecutable->link(*m_vm, ownerExecutable->source()));462 m_functionExprs[i].set(vm, this, unlinkedExecutable->link(vm, ownerExecutable->source())); 459 463 } 460 464 … … 854 858 CodeBlock::~CodeBlock() 855 859 { 856 if (UNLIKELY(m_vm->m_perBytecodeProfiler)) 857 m_vm->m_perBytecodeProfiler->notifyDestruction(this); 860 VM& vm = *m_vm; 861 if (UNLIKELY(vm.m_perBytecodeProfiler)) 862 vm.m_perBytecodeProfiler->notifyDestruction(this); 858 863 859 864 if (unlinkedCodeBlock()->didOptimize() == MixedTriState) … … 877 882 878 883 #if ENABLE(JIT) 879 for ( Bag<StructureStubInfo>::iteratoriter = m_stubInfos.begin(); !!iter; ++iter) {884 for (auto iter = m_stubInfos.begin(); !!iter; ++iter) { 880 885 StructureStubInfo* stub = *iter; 881 886 stub->aboutToDie(); … … 910 915 void CodeBlock::setConstantRegisters(const Vector<WriteBarrier<Unknown>>& constants, const Vector<SourceCodeRepresentation>& constantsSourceCodeRepresentation) 911 916 { 912 auto scope = DECLARE_THROW_SCOPE(*m_vm); 917 VM& vm = *m_vm; 918 auto scope = DECLARE_THROW_SCOPE(vm); 913 919 JSGlobalObject* globalObject = m_globalObject.get(); 914 920 ExecState* exec = globalObject->globalExec(); … … 917 923 size_t count = constants.size(); 918 924 m_constantRegisters.resizeToFit(count); 919 bool hasTypeProfiler = !! m_vm->typeProfiler();925 bool hasTypeProfiler = !!vm.typeProfiler(); 920 926 for (size_t i = 0; i < count; i++) { 921 927 JSValue constant = constants[i].get(); 922 928 923 929 if (!constant.isEmpty()) { 924 if (SymbolTable* symbolTable = jsDynamicCast<SymbolTable*>( *m_vm, constant)) {930 if (SymbolTable* symbolTable = jsDynamicCast<SymbolTable*>(vm, constant)) { 925 931 if (hasTypeProfiler) { 926 932 ConcurrentJSLocker locker(symbolTable->m_lock); … … 928 934 } 929 935 930 SymbolTable* clone = symbolTable->cloneScopePart( *m_vm);936 SymbolTable* clone = symbolTable->cloneScopePart(vm); 931 937 if (wasCompiledWithDebuggingOpcodes()) 932 938 clone->setRareDataCodeBlock(this); 933 939 934 940 constant = clone; 935 } else if (isTemplateRegistryKey( *m_vm, constant)) {941 } else if (isTemplateRegistryKey(vm, constant)) { 936 942 auto* templateObject = globalObject->templateRegistry().getTemplateObject(exec, jsCast<JSTemplateRegistryKey*>(constant)); 937 943 RETURN_IF_EXCEPTION(scope, void()); … … 940 946 } 941 947 942 m_constantRegisters[i].set( *m_vm, this, constant);948 m_constantRegisters[i].set(vm, this, constant); 943 949 } 944 950 … … 990 996 // is probably quite close to 1. So we add one no matter what and when it runs, it 991 997 // figures out whether it has any work to do. 992 visitor.addUnconditionalFinalizer( &m_unconditionalFinalizer);998 visitor.addUnconditionalFinalizer(m_unconditionalFinalizer.get()); 993 999 994 1000 if (!JITCode::isOptimizingJIT(jitType())) … … 1002 1008 // jettisoning, and trying to find structures that would be live based on some 1003 1009 // inline cache. So it makes sense to register them regardless. 1004 visitor.addWeakReferenceHarvester( &m_weakReferenceHarvester);1010 visitor.addWeakReferenceHarvester(m_weakReferenceHarvester.get()); 1005 1011 1006 1012 #if ENABLE(DFG_JIT) … … 1046 1052 // is probably quite close to 1. So we add one no matter what and when it runs, it 1047 1053 // figures out whether it has any work to do. 1048 visitor.addUnconditionalFinalizer( &m_unconditionalFinalizer);1054 visitor.addUnconditionalFinalizer(m_unconditionalFinalizer.get()); 1049 1055 1050 1056 if (CodeBlock* otherBlock = specialOSREntryBlockOrNull()) … … 1162 1168 return; 1163 1169 1170 VM& vm = *m_vm; 1164 1171 bool allAreMarkedSoFar = true; 1165 1172 … … 1175 1182 break; 1176 1183 Structure* oldStructure = 1177 m_vm->heap.structureIDTable().get(oldStructureID);1184 vm.heap.structureIDTable().get(oldStructureID); 1178 1185 Structure* newStructure = 1179 m_vm->heap.structureIDTable().get(newStructureID);1186 vm.heap.structureIDTable().get(newStructureID); 1180 1187 if (Heap::isMarked(oldStructure)) 1181 1188 visitor.appendUnbarriered(newStructure); … … 1192 1199 #if ENABLE(JIT) 1193 1200 if (JITCode::isJIT(jitType())) { 1194 for ( Bag<StructureStubInfo>::iteratoriter = m_stubInfos.begin(); !!iter; ++iter)1201 for (auto iter = m_stubInfos.begin(); !!iter; ++iter) 1195 1202 allAreMarkedSoFar &= (*iter)->propagateTransitions(visitor); 1196 1203 } … … 1280 1287 void CodeBlock::WeakReferenceHarvester::visitWeakReferences(SlotVisitor& visitor) 1281 1288 { 1282 CodeBlock* codeBlock = 1283 bitwise_cast<CodeBlock*>( 1284 bitwise_cast<char*>(this) - OBJECT_OFFSETOF(CodeBlock, m_weakReferenceHarvester)); 1285 1286 codeBlock->propagateTransitions(NoLockingNecessary, visitor); 1287 codeBlock->determineLiveness(NoLockingNecessary, visitor); 1289 codeBlock.propagateTransitions(NoLockingNecessary, visitor); 1290 codeBlock.determineLiveness(NoLockingNecessary, visitor); 1288 1291 } 1289 1292 … … 1298 1301 void CodeBlock::finalizeLLIntInlineCaches() 1299 1302 { 1303 VM& vm = *m_vm; 1300 1304 const Vector<unsigned>& propertyAccessInstructions = m_unlinkedCode->propertyAccessInstructions(); 1301 1305 for (size_t size = propertyAccessInstructions.size(), i = 0; i < size; ++i) { … … 1306 1310 case op_get_by_id_unset: { 1307 1311 StructureID oldStructureID = curInstruction[4].u.structureID; 1308 if (!oldStructureID || Heap::isMarked( m_vm->heap.structureIDTable().get(oldStructureID)))1312 if (!oldStructureID || Heap::isMarked(vm.heap.structureIDTable().get(oldStructureID))) 1309 1313 break; 1310 1314 if (Options::verboseOSR()) … … 1317 1321 StructureID newStructureID = curInstruction[6].u.structureID; 1318 1322 StructureChain* chain = curInstruction[7].u.structureChain.get(); 1319 if ((!oldStructureID || Heap::isMarked( m_vm->heap.structureIDTable().get(oldStructureID))) &&1320 (!newStructureID || Heap::isMarked(m_vm->heap.structureIDTable().get(newStructureID))) &&1321 (!chain || Heap::isMarked(chain)))1323 if ((!oldStructureID || Heap::isMarked(vm.heap.structureIDTable().get(oldStructureID))) 1324 && (!newStructureID || Heap::isMarked(vm.heap.structureIDTable().get(newStructureID))) 1325 && (!chain || Heap::isMarked(chain))) 1322 1326 break; 1323 1327 if (Options::verboseOSR()) … … 1411 1415 (*iter)->visitWeak(*vm()); 1412 1416 1413 for ( Bag<StructureStubInfo>::iteratoriter = m_stubInfos.begin(); !!iter; ++iter) {1417 for (auto iter = m_stubInfos.begin(); !!iter; ++iter) { 1414 1418 StructureStubInfo& stubInfo = **iter; 1415 1419 stubInfo.visitWeakReferences(this); … … 1420 1424 void CodeBlock::UnconditionalFinalizer::finalizeUnconditionally() 1421 1425 { 1422 CodeBlock* codeBlock = bitwise_cast<CodeBlock*>( 1423 bitwise_cast<char*>(this) - OBJECT_OFFSETOF(CodeBlock, m_unconditionalFinalizer)); 1424 1425 codeBlock->updateAllPredictions(); 1426 1427 if (!Heap::isMarked(codeBlock)) { 1428 if (codeBlock->shouldJettisonDueToWeakReference()) 1429 codeBlock->jettison(Profiler::JettisonDueToWeakReference); 1426 codeBlock.updateAllPredictions(); 1427 1428 if (!Heap::isMarked(&codeBlock)) { 1429 if (codeBlock.shouldJettisonDueToWeakReference()) 1430 codeBlock.jettison(Profiler::JettisonDueToWeakReference); 1430 1431 else 1431 codeBlock ->jettison(Profiler::JettisonDueToOldAge);1432 codeBlock.jettison(Profiler::JettisonDueToOldAge); 1432 1433 return; 1433 1434 } 1434 1435 1435 if (JITCode::couldBeInterpreted(codeBlock ->jitType()))1436 codeBlock ->finalizeLLIntInlineCaches();1436 if (JITCode::couldBeInterpreted(codeBlock.jitType())) 1437 codeBlock.finalizeLLIntInlineCaches(); 1437 1438 1438 1439 #if ENABLE(JIT) 1439 if (!!codeBlock ->jitCode())1440 codeBlock ->finalizeBaselineJITInlineCaches();1440 if (!!codeBlock.jitCode()) 1441 codeBlock.finalizeBaselineJITInlineCaches(); 1441 1442 #endif 1442 1443 } … … 1960 1961 #endif // ENABLE(DFG_JIT) 1961 1962 1963 VM& vm = *m_vm; 1962 1964 DeferGCForAWhile deferGC(*heap()); 1963 1965 … … 1976 1978 if (!jitCode()->dfgCommon()->invalidate()) { 1977 1979 // We've already been invalidated. 1978 RELEASE_ASSERT(this != replacement() || ( m_vm->heap.isCurrentThreadBusy() && !Heap::isMarked(ownerScriptExecutable())));1980 RELEASE_ASSERT(this != replacement() || (vm.heap.isCurrentThreadBusy() && !Heap::isMarked(ownerScriptExecutable()))); 1979 1981 return; 1980 1982 } … … 2008 2010 // Jettison can happen during GC. We don't want to install code to a dead executable 2009 2011 // because that would add a dead object to the remembered set. 2010 if ( m_vm->heap.isCurrentThreadBusy() && !Heap::isMarked(ownerScriptExecutable()))2012 if (vm.heap.isCurrentThreadBusy() && !Heap::isMarked(ownerScriptExecutable())) 2011 2013 return; 2012 2014 2013 2015 // This accomplishes (2). 2014 ownerScriptExecutable()->installCode( 2015 *m_vm, alternative(), codeType(), specializationKind()); 2016 ownerScriptExecutable()->installCode(vm, alternative(), codeType(), specializationKind()); 2016 2017 2017 2018 #if ENABLE(DFG_JIT) … … 2768 2769 size_t CodeBlock::predictedMachineCodeSize() 2769 2770 { 2771 VM* vm = m_vm.unpoisoned(); 2770 2772 // This will be called from CodeBlock::CodeBlock before either m_vm or the 2771 2773 // instructions have been initialized. It's OK to return 0 because what will really 2772 2774 // matter is the recomputation of this value when the slow path is triggered. 2773 if (! m_vm)2775 if (!vm) 2774 2776 return 0; 2775 2777 2776 if (!* m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT)2778 if (!*vm->machineCodeBytesPerBytecodeWordForBaselineJIT) 2777 2779 return 0; // It's as good of a prediction as we'll get. 2778 2780 2779 2781 // Be conservative: return a size that will be an overestimation 84% of the time. 2780 double multiplier = m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT->mean() +2781 m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT->standardDeviation();2782 double multiplier = vm->machineCodeBytesPerBytecodeWordForBaselineJIT->mean() + 2783 vm->machineCodeBytesPerBytecodeWordForBaselineJIT->standardDeviation(); 2782 2784 2783 2785 // Be paranoid: silently reject bogus multipiers. Silently doing the "wrong" thing … … 3067 3069 } 3068 3070 3069 for ( Bag<StructureStubInfo>::iteratoriter = m_stubInfos.begin(); !!iter; ++iter) {3071 for (auto iter = m_stubInfos.begin(); !!iter; ++iter) { 3070 3072 StructureStubInfo* stub = *iter; 3071 3073 if (stub->containsPC(pc))
Note:
See TracChangeset
for help on using the changeset viewer.