Changeset 148696 in webkit for trunk/Source/JavaScriptCore/jit
- Timestamp:
- Apr 18, 2013, 12:32:17 PM (12 years ago)
- Location:
- trunk/Source/JavaScriptCore/jit
- Files:
-
- 29 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/JavaScriptCore/jit/ClosureCallStubRoutine.cpp
r139541 r148696 31 31 #include "Executable.h" 32 32 #include "Heap.h" 33 #include " JSGlobalData.h"33 #include "VM.h" 34 34 #include "Operations.h" 35 35 #include "SlotVisitor.h" … … 39 39 40 40 ClosureCallStubRoutine::ClosureCallStubRoutine( 41 const MacroAssemblerCodeRef& code, JSGlobalData& globalData, const JSCell* owner,41 const MacroAssemblerCodeRef& code, VM& vm, const JSCell* owner, 42 42 Structure* structure, ExecutableBase* executable, const CodeOrigin& codeOrigin) 43 : GCAwareJITStubRoutine(code, globalData, true)44 , m_structure( globalData, owner, structure)45 , m_executable( globalData, owner, executable)43 : GCAwareJITStubRoutine(code, vm, true) 44 , m_structure(vm, owner, structure) 45 , m_executable(vm, owner, executable) 46 46 , m_codeOrigin(codeOrigin) 47 47 { -
trunk/Source/JavaScriptCore/jit/ClosureCallStubRoutine.h
r135336 r148696 39 39 public: 40 40 ClosureCallStubRoutine( 41 const MacroAssemblerCodeRef&, JSGlobalData&, const JSCell* owner,41 const MacroAssemblerCodeRef&, VM&, const JSCell* owner, 42 42 Structure*, ExecutableBase*, const CodeOrigin&); 43 43 -
trunk/Source/JavaScriptCore/jit/ExecutableAllocator.cpp
r140584 r148696 168 168 #endif 169 169 170 ExecutableAllocator::ExecutableAllocator( JSGlobalData&)170 ExecutableAllocator::ExecutableAllocator(VM&) 171 171 #if ENABLE(ASSEMBLER_WX_EXCLUSIVE) 172 172 : m_allocator(adoptPtr(new DemandExecutableAllocator())) … … 213 213 } 214 214 215 PassRefPtr<ExecutableMemoryHandle> ExecutableAllocator::allocate( JSGlobalData&, size_t sizeInBytes, void* ownerUID, JITCompilationEffort effort)215 PassRefPtr<ExecutableMemoryHandle> ExecutableAllocator::allocate(VM&, size_t sizeInBytes, void* ownerUID, JITCompilationEffort effort) 216 216 { 217 217 RefPtr<ExecutableMemoryHandle> result = allocator()->allocate(sizeInBytes, ownerUID); -
trunk/Source/JavaScriptCore/jit/ExecutableAllocator.h
r140584 r148696 75 75 namespace JSC { 76 76 77 class JSGlobalData;78 void releaseExecutableMemory( JSGlobalData&);77 class VM; 78 void releaseExecutableMemory(VM&); 79 79 80 80 static const unsigned jitAllocationGranule = 32; … … 119 119 120 120 public: 121 ExecutableAllocator( JSGlobalData&);121 ExecutableAllocator(VM&); 122 122 ~ExecutableAllocator(); 123 123 … … 136 136 #endif 137 137 138 PassRefPtr<ExecutableMemoryHandle> allocate( JSGlobalData&, size_t sizeInBytes, void* ownerUID, JITCompilationEffort);138 PassRefPtr<ExecutableMemoryHandle> allocate(VM&, size_t sizeInBytes, void* ownerUID, JITCompilationEffort); 139 139 140 140 #if ENABLE(ASSEMBLER_WX_EXCLUSIVE) -
trunk/Source/JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp
r140594 r148696 121 121 } 122 122 123 ExecutableAllocator::ExecutableAllocator( JSGlobalData&)123 ExecutableAllocator::ExecutableAllocator(VM&) 124 124 { 125 125 ASSERT(allocator); … … 157 157 } 158 158 159 PassRefPtr<ExecutableMemoryHandle> ExecutableAllocator::allocate( JSGlobalData& globalData, size_t sizeInBytes, void* ownerUID, JITCompilationEffort effort)159 PassRefPtr<ExecutableMemoryHandle> ExecutableAllocator::allocate(VM& vm, size_t sizeInBytes, void* ownerUID, JITCompilationEffort effort) 160 160 { 161 161 RefPtr<ExecutableMemoryHandle> result = allocator->allocate(sizeInBytes, ownerUID); … … 163 163 if (effort == JITCompilationCanFail) 164 164 return result; 165 releaseExecutableMemory( globalData);165 releaseExecutableMemory(vm); 166 166 result = allocator->allocate(sizeInBytes, ownerUID); 167 167 RELEASE_ASSERT(result); -
trunk/Source/JavaScriptCore/jit/GCAwareJITStubRoutine.cpp
r140619 r148696 30 30 31 31 #include "Heap.h" 32 #include " JSGlobalData.h"32 #include "VM.h" 33 33 #include "Operations.h" 34 34 #include "SlotVisitor.h" … … 38 38 39 39 GCAwareJITStubRoutine::GCAwareJITStubRoutine( 40 const MacroAssemblerCodeRef& code, JSGlobalData& globalData, bool isClosureCall)40 const MacroAssemblerCodeRef& code, VM& vm, bool isClosureCall) 41 41 : JITStubRoutine(code) 42 42 , m_mayBeExecuting(false) … … 44 44 , m_isClosureCall(isClosureCall) 45 45 { 46 globalData.heap.m_jitStubRoutines.add(this);46 vm.heap.m_jitStubRoutines.add(this); 47 47 } 48 48 … … 80 80 81 81 MarkingGCAwareJITStubRoutineWithOneObject::MarkingGCAwareJITStubRoutineWithOneObject( 82 const MacroAssemblerCodeRef& code, JSGlobalData& globalData, const JSCell* owner,82 const MacroAssemblerCodeRef& code, VM& vm, const JSCell* owner, 83 83 JSCell* object) 84 : GCAwareJITStubRoutine(code, globalData)85 , m_object( globalData, owner, object)84 : GCAwareJITStubRoutine(code, vm) 85 , m_object(vm, owner, object) 86 86 { 87 87 } … … 98 98 PassRefPtr<JITStubRoutine> createJITStubRoutine( 99 99 const MacroAssemblerCodeRef& code, 100 JSGlobalData& globalData,100 VM& vm, 101 101 const JSCell*, 102 102 bool makesCalls) … … 106 106 107 107 return static_pointer_cast<JITStubRoutine>( 108 adoptRef(new GCAwareJITStubRoutine(code, globalData)));108 adoptRef(new GCAwareJITStubRoutine(code, vm))); 109 109 } 110 110 111 111 PassRefPtr<JITStubRoutine> createJITStubRoutine( 112 112 const MacroAssemblerCodeRef& code, 113 JSGlobalData& globalData,113 VM& vm, 114 114 const JSCell* owner, 115 115 bool makesCalls, … … 120 120 121 121 return static_pointer_cast<JITStubRoutine>( 122 adoptRef(new MarkingGCAwareJITStubRoutineWithOneObject(code, globalData, owner, object)));122 adoptRef(new MarkingGCAwareJITStubRoutineWithOneObject(code, vm, owner, object))); 123 123 } 124 124 -
trunk/Source/JavaScriptCore/jit/GCAwareJITStubRoutine.h
r135336 r148696 55 55 class GCAwareJITStubRoutine : public JITStubRoutine { 56 56 public: 57 GCAwareJITStubRoutine(const MacroAssemblerCodeRef&, JSGlobalData&, bool isClosureCall = false);57 GCAwareJITStubRoutine(const MacroAssemblerCodeRef&, VM&, bool isClosureCall = false); 58 58 virtual ~GCAwareJITStubRoutine(); 59 59 … … 85 85 public: 86 86 MarkingGCAwareJITStubRoutineWithOneObject( 87 const MacroAssemblerCodeRef&, JSGlobalData&, const JSCell* owner, JSCell*);87 const MacroAssemblerCodeRef&, VM&, const JSCell* owner, JSCell*); 88 88 virtual ~MarkingGCAwareJITStubRoutineWithOneObject(); 89 89 … … 103 103 // PassRefPtr<JITStubRoutine> createJITStubRoutine( 104 104 // const MacroAssemblerCodeRef& code, 105 // JSGlobalData& globalData,105 // VM& vm, 106 106 // const JSCell* owner, 107 107 // bool makesCalls, … … 115 115 116 116 PassRefPtr<JITStubRoutine> createJITStubRoutine( 117 const MacroAssemblerCodeRef&, JSGlobalData&, const JSCell* owner, bool makesCalls);117 const MacroAssemblerCodeRef&, VM&, const JSCell* owner, bool makesCalls); 118 118 PassRefPtr<JITStubRoutine> createJITStubRoutine( 119 const MacroAssemblerCodeRef&, JSGlobalData&, const JSCell* owner, bool makesCalls,119 const MacroAssemblerCodeRef&, VM&, const JSCell* owner, bool makesCalls, 120 120 JSCell*); 121 121 -
trunk/Source/JavaScriptCore/jit/JIT.cpp
r148639 r148696 71 71 } 72 72 73 JIT::JIT( JSGlobalData* globalData, CodeBlock* codeBlock)74 : m_interpreter( globalData->interpreter)75 , m_ globalData(globalData)73 JIT::JIT(VM* vm, CodeBlock* codeBlock) 74 : m_interpreter(vm->interpreter) 75 , m_vm(vm) 76 76 , m_codeBlock(codeBlock) 77 77 , m_labels(codeBlock ? codeBlock->numberOfInstructions() : 0) … … 121 121 void JIT::emitWatchdogTimerCheck() 122 122 { 123 if (!m_ globalData->watchdog.isEnabled())123 if (!m_vm->watchdog.isEnabled()) 124 124 return; 125 125 126 Jump skipCheck = branchTest8(Zero, AbsoluteAddress(m_ globalData->watchdog.timerDidFireAddress()));126 Jump skipCheck = branchTest8(Zero, AbsoluteAddress(m_vm->watchdog.timerDidFireAddress())); 127 127 JITStubCall stubCall(this, cti_handle_watchdog_timer); 128 128 stubCall.call(); … … 612 612 #endif 613 613 614 if (Options::showDisassembly() || m_ globalData->m_perBytecodeProfiler)614 if (Options::showDisassembly() || m_vm->m_perBytecodeProfiler) 615 615 m_disassembler = adoptPtr(new JITDisassembler(m_codeBlock)); 616 if (m_ globalData->m_perBytecodeProfiler) {617 m_compilation = m_ globalData->m_perBytecodeProfiler->newCompilation(m_codeBlock, Profiler::Baseline);618 m_compilation->addProfiledBytecodes(*m_ globalData->m_perBytecodeProfiler, m_codeBlock);616 if (m_vm->m_perBytecodeProfiler) { 617 m_compilation = m_vm->m_perBytecodeProfiler->newCompilation(m_codeBlock, Profiler::Baseline); 618 m_compilation->addProfiledBytecodes(*m_vm->m_perBytecodeProfiler, m_codeBlock); 619 619 } 620 620 … … 667 667 668 668 addPtr(TrustedImm32(m_codeBlock->m_numCalleeRegisters * sizeof(Register)), callFrameRegister, regT1); 669 stackCheck = branchPtr(Below, AbsoluteAddress(m_ globalData->interpreter->stack().addressOfEnd()), regT1);669 stackCheck = branchPtr(Below, AbsoluteAddress(m_vm->interpreter->stack().addressOfEnd()), regT1); 670 670 } 671 671 … … 711 711 m_disassembler->setEndOfCode(label()); 712 712 713 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock, effort);713 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock, effort); 714 714 if (patchBuffer.didFailToAllocate()) 715 715 return JITCode(); … … 810 810 CodeRef result = patchBuffer.finalizeCodeWithoutDisassembly(); 811 811 812 m_ globalData->machineCodeBytesPerBytecodeWordForBaselineJIT.add(812 m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT.add( 813 813 static_cast<double>(result.size()) / 814 814 static_cast<double>(m_codeBlock->instructions().size())); … … 823 823 } 824 824 825 void JIT::linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JIT::CodePtr code, CallLinkInfo* callLinkInfo, JSGlobalData* globalData, CodeSpecializationKind kind)825 void JIT::linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JIT::CodePtr code, CallLinkInfo* callLinkInfo, VM* vm, CodeSpecializationKind kind) 826 826 { 827 827 RepatchBuffer repatchBuffer(callerCodeBlock); 828 828 829 829 ASSERT(!callLinkInfo->isLinked()); 830 callLinkInfo->callee.set(* globalData, callLinkInfo->hotPathBegin, callerCodeBlock->ownerExecutable(), callee);831 callLinkInfo->lastSeenCallee.set(* globalData, callerCodeBlock->ownerExecutable(), callee);830 callLinkInfo->callee.set(*vm, callLinkInfo->hotPathBegin, callerCodeBlock->ownerExecutable(), callee); 831 callLinkInfo->lastSeenCallee.set(*vm, callerCodeBlock->ownerExecutable(), callee); 832 832 repatchBuffer.relink(callLinkInfo->hotPathOther, code); 833 833 … … 840 840 || callLinkInfo->callType == CallLinkInfo::CallVarargs); 841 841 if (callLinkInfo->callType == CallLinkInfo::Call) { 842 repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->getCTIStub(linkClosureCallGenerator).code());842 repatchBuffer.relink(callLinkInfo->callReturnLocation, vm->getCTIStub(linkClosureCallGenerator).code()); 843 843 return; 844 844 } 845 845 846 repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->getCTIStub(virtualCallGenerator).code());846 repatchBuffer.relink(callLinkInfo->callReturnLocation, vm->getCTIStub(virtualCallGenerator).code()); 847 847 return; 848 848 } 849 849 850 850 ASSERT(kind == CodeForConstruct); 851 repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->getCTIStub(virtualConstructGenerator).code());851 repatchBuffer.relink(callLinkInfo->callReturnLocation, vm->getCTIStub(virtualConstructGenerator).code()); 852 852 } 853 853 … … 856 856 RepatchBuffer repatchBuffer(callerCodeBlock); 857 857 858 repatchBuffer.relink(callLinkInfo->callReturnLocation, callerCodeBlock-> globalData()->getCTIStub(virtualCallGenerator).code());858 repatchBuffer.relink(callLinkInfo->callReturnLocation, callerCodeBlock->vm()->getCTIStub(virtualCallGenerator).code()); 859 859 } 860 860 -
trunk/Source/JavaScriptCore/jit/JIT.h
r148639 r148696 303 303 304 304 public: 305 static JITCode compile( JSGlobalData* globalData, CodeBlock* codeBlock, JITCompilationEffort effort, CodePtr* functionEntryArityCheck = 0)306 { 307 return JIT( globalData, codeBlock).privateCompile(functionEntryArityCheck, effort);308 } 309 310 static void compileClosureCall( JSGlobalData* globalData, CallLinkInfo* callLinkInfo, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr)311 { 312 JIT jit( globalData, callerCodeBlock);305 static JITCode compile(VM* vm, CodeBlock* codeBlock, JITCompilationEffort effort, CodePtr* functionEntryArityCheck = 0) 306 { 307 return JIT(vm, codeBlock).privateCompile(functionEntryArityCheck, effort); 308 } 309 310 static void compileClosureCall(VM* vm, CallLinkInfo* callLinkInfo, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr) 311 { 312 JIT jit(vm, callerCodeBlock); 313 313 jit.m_bytecodeOffset = callLinkInfo->codeOrigin.bytecodeIndex; 314 314 jit.privateCompileClosureCall(callLinkInfo, calleeCodeBlock, expectedStructure, expectedExecutable, codePtr); 315 315 } 316 316 317 static void compileGetByIdProto( JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress)318 { 319 JIT jit( globalData, codeBlock);317 static void compileGetByIdProto(VM* vm, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress) 318 { 319 JIT jit(vm, codeBlock); 320 320 jit.m_bytecodeOffset = stubInfo->bytecodeIndex; 321 321 jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame); 322 322 } 323 323 324 static void compileGetByIdSelfList( JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset)325 { 326 JIT jit( globalData, codeBlock);324 static void compileGetByIdSelfList(VM* vm, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset) 325 { 326 JIT jit(vm, codeBlock); 327 327 jit.m_bytecodeOffset = stubInfo->bytecodeIndex; 328 328 jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset); 329 329 } 330 static void compileGetByIdProtoList( JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset)331 { 332 JIT jit( globalData, codeBlock);330 static void compileGetByIdProtoList(VM* vm, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset) 331 { 332 JIT jit(vm, codeBlock); 333 333 jit.m_bytecodeOffset = stubInfo->bytecodeIndex; 334 334 jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame); 335 335 } 336 static void compileGetByIdChainList( JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset)337 { 338 JIT jit( globalData, codeBlock);336 static void compileGetByIdChainList(VM* vm, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset) 337 { 338 JIT jit(vm, codeBlock); 339 339 jit.m_bytecodeOffset = stubInfo->bytecodeIndex; 340 340 jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame); 341 341 } 342 342 343 static void compileGetByIdChain( JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress)344 { 345 JIT jit( globalData, codeBlock);343 static void compileGetByIdChain(VM* vm, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress) 344 { 345 JIT jit(vm, codeBlock); 346 346 jit.m_bytecodeOffset = stubInfo->bytecodeIndex; 347 347 jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame); 348 348 } 349 349 350 static void compilePutByIdTransition( JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, PropertyOffset cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)351 { 352 JIT jit( globalData, codeBlock);350 static void compilePutByIdTransition(VM* vm, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, PropertyOffset cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct) 351 { 352 JIT jit(vm, codeBlock); 353 353 jit.m_bytecodeOffset = stubInfo->bytecodeIndex; 354 354 jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct); 355 355 } 356 356 357 static void compileGetByVal( JSGlobalData* globalData, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)358 { 359 JIT jit( globalData, codeBlock);357 static void compileGetByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode) 358 { 359 JIT jit(vm, codeBlock); 360 360 jit.m_bytecodeOffset = byValInfo->bytecodeIndex; 361 361 jit.privateCompileGetByVal(byValInfo, returnAddress, arrayMode); 362 362 } 363 363 364 static void compilePutByVal( JSGlobalData* globalData, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)365 { 366 JIT jit( globalData, codeBlock);364 static void compilePutByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode) 365 { 366 JIT jit(vm, codeBlock); 367 367 jit.m_bytecodeOffset = byValInfo->bytecodeIndex; 368 368 jit.privateCompilePutByVal(byValInfo, returnAddress, arrayMode); 369 369 } 370 370 371 static CodeRef compileCTINativeCall( JSGlobalData* globalData, NativeFunction func)372 { 373 if (! globalData->canUseJIT()) {371 static CodeRef compileCTINativeCall(VM* vm, NativeFunction func) 372 { 373 if (!vm->canUseJIT()) { 374 374 #if ENABLE(LLINT) 375 375 return CodeRef::createLLIntCodeRef(llint_native_call_trampoline); … … 378 378 #endif 379 379 } 380 JIT jit( globalData, 0);381 return jit.privateCompileCTINativeCall( globalData, func);380 JIT jit(vm, 0); 381 return jit.privateCompileCTINativeCall(vm, func); 382 382 } 383 383 … … 387 387 static void patchPutByIdReplace(CodeBlock*, StructureStubInfo*, Structure*, PropertyOffset cachedOffset, ReturnAddressPtr, bool direct); 388 388 389 static void compilePatchGetArrayLength( JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)390 { 391 JIT jit( globalData, codeBlock);389 static void compilePatchGetArrayLength(VM* vm, CodeBlock* codeBlock, ReturnAddressPtr returnAddress) 390 { 391 JIT jit(vm, codeBlock); 392 392 #if ENABLE(DFG_JIT) 393 393 // Force profiling to be enabled during stub generation. … … 399 399 } 400 400 401 static void linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, JSGlobalData*, CodeSpecializationKind);401 static void linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, VM*, CodeSpecializationKind); 402 402 static void linkSlowCall(CodeBlock* callerCodeBlock, CallLinkInfo*); 403 403 404 404 private: 405 JIT( JSGlobalData*, CodeBlock* = 0);405 JIT(VM*, CodeBlock* = 0); 406 406 407 407 void privateCompileMainPass(); … … 422 422 void privateCompilePutByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode); 423 423 424 Label privateCompileCTINativeCall( JSGlobalData*, bool isConstruct = false);425 CodeRef privateCompileCTINativeCall( JSGlobalData*, NativeFunction);424 Label privateCompileCTINativeCall(VM*, bool isConstruct = false); 425 CodeRef privateCompileCTINativeCall(VM*, NativeFunction); 426 426 void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress); 427 427 … … 897 897 898 898 Interpreter* m_interpreter; 899 JSGlobalData* m_globalData;899 VM* m_vm; 900 900 CodeBlock* m_codeBlock; 901 901 … … 936 936 RefPtr<Profiler::Compilation> m_compilation; 937 937 WeakRandom m_randomGenerator; 938 static CodeRef stringGetByValStubGenerator( JSGlobalData*);938 static CodeRef stringGetByValStubGenerator(VM*); 939 939 940 940 #if ENABLE(VALUE_PROFILER) -
trunk/Source/JavaScriptCore/jit/JITCall.cpp
r145933 r148696 89 89 // regT1: newCallFrame 90 90 91 slowCase.append(branchPtr(Below, AbsoluteAddress(m_ globalData->interpreter->stack().addressOfEnd()), regT1));91 slowCase.append(branchPtr(Below, AbsoluteAddress(m_vm->interpreter->stack().addressOfEnd()), regT1)); 92 92 93 93 // Initialize ArgumentCount. … … 140 140 141 141 emitGetFromCallFrameHeader64(JSStack::Callee, regT0); 142 emitNakedCall(m_ globalData->getCTIStub(virtualCallGenerator).code());142 emitNakedCall(m_vm->getCTIStub(virtualCallGenerator).code()); 143 143 144 144 sampleCodeBlock(m_codeBlock); … … 221 221 linkSlowCase(iter); 222 222 223 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_ globalData->getCTIStub(linkConstructGenerator).code() : m_globalData->getCTIStub(linkCallGenerator).code());223 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_vm->getCTIStub(linkConstructGenerator).code() : m_vm->getCTIStub(linkCallGenerator).code()); 224 224 225 225 sampleCodeBlock(m_codeBlock); … … 245 245 Jump slow = jump(); 246 246 247 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);247 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 248 248 249 249 patchBuffer.link(call, FunctionPtr(codePtr.executableAddress())); 250 250 patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0)); 251 patchBuffer.link(slow, CodeLocationLabel(m_ globalData->getCTIStub(virtualCallGenerator).code()));251 patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(virtualCallGenerator).code())); 252 252 253 253 RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine( … … 259 259 codePtr.executableAddress(), 260 260 toCString(pointerDump(calleeCodeBlock)).data())), 261 *m_ globalData, m_codeBlock->ownerExecutable(), expectedStructure, expectedExecutable,261 *m_vm, m_codeBlock->ownerExecutable(), expectedStructure, expectedExecutable, 262 262 callLinkInfo->codeOrigin)); 263 263 … … 267 267 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo->hotPathBegin), 268 268 CodeLocationLabel(stubRoutine->code().code())); 269 repatchBuffer.relink(callLinkInfo->callReturnLocation, m_ globalData->getCTIStub(virtualCallGenerator).code());269 repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(virtualCallGenerator).code()); 270 270 271 271 callLinkInfo->stub = stubRoutine.release(); -
trunk/Source/JavaScriptCore/jit/JITCall32_64.cpp
r145933 r148696 163 163 // regT3: newCallFrame 164 164 165 slowCase.append(branchPtr(Below, AbsoluteAddress(m_ globalData->interpreter->stack().addressOfEnd()), regT3));165 slowCase.append(branchPtr(Below, AbsoluteAddress(m_vm->interpreter->stack().addressOfEnd()), regT3)); 166 166 167 167 // Initialize ArgumentCount. … … 216 216 217 217 emitLoad(JSStack::Callee, regT1, regT0); 218 emitNakedCall(m_ globalData->getCTIStub(virtualCallGenerator).code());218 emitNakedCall(m_vm->getCTIStub(virtualCallGenerator).code()); 219 219 220 220 sampleCodeBlock(m_codeBlock); … … 301 301 linkSlowCase(iter); 302 302 303 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_ globalData->getCTIStub(linkConstructGenerator).code() : m_globalData->getCTIStub(linkCallGenerator).code());303 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_vm->getCTIStub(linkConstructGenerator).code() : m_vm->getCTIStub(linkCallGenerator).code()); 304 304 305 305 sampleCodeBlock(m_codeBlock); … … 325 325 Jump slow = jump(); 326 326 327 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);327 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 328 328 329 329 patchBuffer.link(call, FunctionPtr(codePtr.executableAddress())); 330 330 patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0)); 331 patchBuffer.link(slow, CodeLocationLabel(m_ globalData->getCTIStub(virtualCallGenerator).code()));331 patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(virtualCallGenerator).code())); 332 332 333 333 RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine( … … 339 339 codePtr.executableAddress(), 340 340 toCString(pointerDump(calleeCodeBlock)).data())), 341 *m_ globalData, m_codeBlock->ownerExecutable(), expectedStructure, expectedExecutable,341 *m_vm, m_codeBlock->ownerExecutable(), expectedStructure, expectedExecutable, 342 342 callLinkInfo->codeOrigin)); 343 343 … … 347 347 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo->hotPathBegin), 348 348 CodeLocationLabel(stubRoutine->code().code())); 349 repatchBuffer.relink(callLinkInfo->callReturnLocation, m_ globalData->getCTIStub(virtualCallGenerator).code());349 repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(virtualCallGenerator).code()); 350 350 351 351 callLinkInfo->stub = stubRoutine.release(); -
trunk/Source/JavaScriptCore/jit/JITCode.h
r140718 r148696 39 39 40 40 #if ENABLE(JIT) 41 class JSGlobalData;41 class VM; 42 42 class JSStack; 43 43 #endif … … 131 131 #if ENABLE(JIT) 132 132 // Execute the code! 133 inline JSValue execute(JSStack* stack, CallFrame* callFrame, JSGlobalData* globalData)134 { 135 JSValue result = JSValue::decode(ctiTrampoline(m_ref.code().executableAddress(), stack, callFrame, 0, 0, globalData));136 return globalData->exception ? jsNull() : result;133 inline JSValue execute(JSStack* stack, CallFrame* callFrame, VM* vm) 134 { 135 JSValue result = JSValue::decode(ctiTrampoline(m_ref.code().executableAddress(), stack, callFrame, 0, 0, vm)); 136 return vm->exception ? jsNull() : result; 137 137 } 138 138 #endif -
trunk/Source/JavaScriptCore/jit/JITDriver.h
r133688 r148696 41 41 inline bool jitCompileIfAppropriate(ExecState* exec, OwnPtr<CodeBlockType>& codeBlock, JITCode& jitCode, JITCode::JITType jitType, unsigned bytecodeIndex, JITCompilationEffort effort) 42 42 { 43 JSGlobalData& globalData = exec->globalData();43 VM& vm = exec->vm(); 44 44 45 45 if (jitType == codeBlock->getJITType()) 46 46 return true; 47 47 48 if (! globalData.canUseJIT())48 if (!vm.canUseJIT()) 49 49 return true; 50 50 … … 65 65 return false; 66 66 } 67 jitCode = JIT::compile(& globalData, codeBlock.get(), effort);67 jitCode = JIT::compile(&vm, codeBlock.get(), effort); 68 68 if (!jitCode) { 69 69 jitCode = oldJITCode; … … 78 78 inline bool jitCompileFunctionIfAppropriate(ExecState* exec, OwnPtr<FunctionCodeBlock>& codeBlock, JITCode& jitCode, MacroAssemblerCodePtr& jitCodeWithArityCheck, JITCode::JITType jitType, unsigned bytecodeIndex, JITCompilationEffort effort) 79 79 { 80 JSGlobalData& globalData = exec->globalData();80 VM& vm = exec->vm(); 81 81 82 82 if (jitType == codeBlock->getJITType()) 83 83 return true; 84 84 85 if (! globalData.canUseJIT())85 if (!vm.canUseJIT()) 86 86 return true; 87 87 … … 104 104 return false; 105 105 } 106 jitCode = JIT::compile(& globalData, codeBlock.get(), effort, &jitCodeWithArityCheck);106 jitCode = JIT::compile(&vm, codeBlock.get(), effort, &jitCodeWithArityCheck); 107 107 if (!jitCode) { 108 108 jitCode = oldJITCode; -
trunk/Source/JavaScriptCore/jit/JITExceptions.cpp
r140718 r148696 31 31 #include "Interpreter.h" 32 32 #include "JSCJSValue.h" 33 #include " JSGlobalData.h"33 #include "VM.h" 34 34 #include "Operations.h" 35 35 … … 38 38 namespace JSC { 39 39 40 ExceptionHandler genericThrow( JSGlobalData* globalData, ExecState* callFrame, JSValue exceptionValue, unsigned vPCIndex)40 ExceptionHandler genericThrow(VM* vm, ExecState* callFrame, JSValue exceptionValue, unsigned vPCIndex) 41 41 { 42 42 RELEASE_ASSERT(exceptionValue); 43 43 44 globalData->exception = JSValue();45 HandlerInfo* handler = globalData->interpreter->throwException(callFrame, exceptionValue, vPCIndex); // This may update callFrame & exceptionValue!46 globalData->exception = exceptionValue;44 vm->exception = JSValue(); 45 HandlerInfo* handler = vm->interpreter->throwException(callFrame, exceptionValue, vPCIndex); // This may update callFrame & exceptionValue! 46 vm->exception = exceptionValue; 47 47 48 48 void* catchRoutine; … … 54 54 catchRoutine = FunctionPtr(LLInt::getCodePtr(ctiOpThrowNotCaught)).value(); 55 55 56 globalData->callFrameForThrow = callFrame;57 globalData->targetMachinePCForThrow = catchRoutine;58 globalData->targetInterpreterPCForThrow = catchPCForInterpreter;56 vm->callFrameForThrow = callFrame; 57 vm->targetMachinePCForThrow = catchRoutine; 58 vm->targetInterpreterPCForThrow = catchPCForInterpreter; 59 59 60 60 RELEASE_ASSERT(catchRoutine); … … 63 63 } 64 64 65 ExceptionHandler jitThrow( JSGlobalData* globalData, ExecState* callFrame, JSValue exceptionValue, ReturnAddressPtr faultLocation)65 ExceptionHandler jitThrow(VM* vm, ExecState* callFrame, JSValue exceptionValue, ReturnAddressPtr faultLocation) 66 66 { 67 return genericThrow( globalData, callFrame, exceptionValue, callFrame->codeBlock()->bytecodeOffset(callFrame, faultLocation));67 return genericThrow(vm, callFrame, exceptionValue, callFrame->codeBlock()->bytecodeOffset(callFrame, faultLocation)); 68 68 } 69 69 -
trunk/Source/JavaScriptCore/jit/JITExceptions.h
r140718 r148696 35 35 36 36 class ExecState; 37 class JSGlobalData;37 class VM; 38 38 39 39 // This header gives other parts of the system access to the JIT's prototocol … … 45 45 }; 46 46 47 ExceptionHandler genericThrow( JSGlobalData*, ExecState*, JSValue exceptionValue, unsigned vPCIndex);47 ExceptionHandler genericThrow(VM*, ExecState*, JSValue exceptionValue, unsigned vPCIndex); 48 48 49 ExceptionHandler jitThrow( JSGlobalData*, ExecState*, JSValue exceptionValue, ReturnAddressPtr faultLocation);49 ExceptionHandler jitThrow(VM*, ExecState*, JSValue exceptionValue, ReturnAddressPtr faultLocation); 50 50 51 51 } // namespace JSC -
trunk/Source/JavaScriptCore/jit/JITInlines.h
r146869 r148696 79 79 ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures) 80 80 { 81 failures.append(branchPtr(NotEqual, Address(src, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get())));81 failures.append(branchPtr(NotEqual, Address(src, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); 82 82 failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1))); 83 83 loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst); … … 187 187 #endif 188 188 } 189 storePtr(callFrameRegister, &m_ globalData->topCallFrame);189 storePtr(callFrameRegister, &m_vm->topCallFrame); 190 190 } 191 191 -
trunk/Source/JavaScriptCore/jit/JITOpcodes.cpp
r147184 r148696 44 44 #if USE(JSVALUE64) 45 45 46 JIT::CodeRef JIT::privateCompileCTINativeCall( JSGlobalData* globalData, NativeFunction)47 { 48 return globalData->getCTIStub(nativeCallGenerator);46 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction) 47 { 48 return vm->getCTIStub(nativeCallGenerator); 49 49 } 50 50 … … 98 98 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure(); 99 99 size_t allocationSize = JSObject::allocationSize(structure->inlineCapacity()); 100 MarkedAllocator* allocator = &m_ globalData->heap.allocatorForObjectWithoutDestructor(allocationSize);100 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize); 101 101 102 102 RegisterID resultReg = regT0; … … 351 351 352 352 Jump isImm = emitJumpIfNotJSCell(regT0); 353 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get())));353 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); 354 354 isImm.link(this); 355 355 … … 714 714 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code. 715 715 move(regT0, callFrameRegister); 716 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));717 load64(Address(regT3, OBJECT_OFFSETOF( JSGlobalData, exception)), regT0);718 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF( JSGlobalData, exception)));716 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, vm) / sizeof(void*)); 717 load64(Address(regT3, OBJECT_OFFSETOF(VM, exception)), regT0); 718 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(VM, exception))); 719 719 emitPutVirtualRegister(currentInstruction[1].u.operand); 720 720 } … … 911 911 emitValueProfilingSite(); 912 912 } 913 addSlowCase(branchPtr(Equal, Address(regT1, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get())));913 addSlowCase(branchPtr(Equal, Address(regT1, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); 914 914 } 915 915 … … 982 982 linkSlowCase(iter); 983 983 if (shouldEmitProfiling()) 984 move(TrustedImm64(JSValue::encode(m_ globalData->stringStructure.get())), regT0);984 move(TrustedImm64(JSValue::encode(m_vm->stringStructure.get())), regT0); 985 985 isNotUndefined.link(this); 986 986 emitValueProfilingSite(); -
trunk/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
r147184 r148696 42 42 namespace JSC { 43 43 44 JIT::CodeRef JIT::privateCompileCTINativeCall( JSGlobalData* globalData, NativeFunction func)44 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction func) 45 45 { 46 46 Call nativeCall; 47 47 48 48 emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock); 49 storePtr(callFrameRegister, &m_ globalData->topCallFrame);49 storePtr(callFrameRegister, &m_vm->topCallFrame); 50 50 51 51 #if CPU(X86) … … 155 155 156 156 // Check for an exception 157 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(& globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));157 Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&vm->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag)); 158 158 159 159 // Return. … … 166 166 preserveReturnAddressAfterCall(regT1); 167 167 168 move(TrustedImmPtr(& globalData->exceptionLocation), regT2);168 move(TrustedImmPtr(&vm->exceptionLocation), regT2); 169 169 storePtr(regT1, regT2); 170 170 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*)); 171 171 172 storePtr(callFrameRegister, &m_ globalData->topCallFrame);172 storePtr(callFrameRegister, &m_vm->topCallFrame); 173 173 // Set the return address. 174 174 move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1); … … 178 178 179 179 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object. 180 LinkBuffer patchBuffer(*m_ globalData, this, GLOBAL_THUNK_ID);180 LinkBuffer patchBuffer(*m_vm, this, GLOBAL_THUNK_ID); 181 181 182 182 patchBuffer.link(nativeCall, FunctionPtr(func)); … … 216 216 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure(); 217 217 size_t allocationSize = JSObject::allocationSize(structure->inlineCapacity()); 218 MarkedAllocator* allocator = &m_ globalData->heap.allocatorForObjectWithoutDestructor(allocationSize);218 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize); 219 219 220 220 RegisterID resultReg = regT0; … … 420 420 421 421 Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)); 422 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get())));422 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); 423 423 isImm.link(this); 424 424 … … 644 644 645 645 linkSlowCase(iter); // tags equal and JSCell 646 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get())));647 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get())));646 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); 647 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); 648 648 649 649 // String case. … … 692 692 693 693 linkSlowCase(iter); // tags equal and JSCell 694 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get())));695 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get())));694 genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); 695 genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); 696 696 697 697 // String case. … … 729 729 // Jump to a slow case if both are strings. 730 730 Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)); 731 Jump firstNotString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get()));732 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get())));731 Jump firstNotString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())); 732 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); 733 733 notCell.link(this); 734 734 firstNotString.link(this); … … 1022 1022 1023 1023 // Now store the exception returned by cti_op_throw. 1024 loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, globalData)), regT3);1025 load32(Address(regT3, OBJECT_OFFSETOF( JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);1026 load32(Address(regT3, OBJECT_OFFSETOF( JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);1027 store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF( JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));1028 store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF( JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));1024 loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, vm)), regT3); 1025 load32(Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); 1026 load32(Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); 1027 store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); 1028 store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(VM, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); 1029 1029 1030 1030 unsigned exception = currentInstruction[1].u.operand; … … 1201 1201 emitValueProfilingSite(); 1202 1202 } 1203 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get())));1203 addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); 1204 1204 } 1205 1205 … … 1224 1224 if (shouldEmitProfiling()) { 1225 1225 move(TrustedImm32(JSValue::CellTag), regT1); 1226 move(TrustedImmPtr(m_ globalData->stringStructure.get()), regT0);1226 move(TrustedImmPtr(m_vm->stringStructure.get()), regT0); 1227 1227 } 1228 1228 isNotUndefined.link(this); -
trunk/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
r147047 r148696 54 54 #if USE(JSVALUE64) 55 55 56 JIT::CodeRef JIT::stringGetByValStubGenerator( JSGlobalData* globalData)56 JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm) 57 57 { 58 58 JSInterfaceJIT jit; 59 59 JumpList failures; 60 failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr( globalData->stringStructure.get())));60 failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(vm->stringStructure.get()))); 61 61 62 62 // Load string length to regT2, and start the process of loading the data pointer into regT0 … … 82 82 83 83 failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100))); 84 jit.move(TrustedImmPtr( globalData->smallStrings.singleCharacterStrings()), regT1);84 jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1); 85 85 jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0); 86 86 jit.ret(); … … 90 90 jit.ret(); 91 91 92 LinkBuffer patchBuffer(* globalData, &jit, GLOBAL_THUNK_ID);92 LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID); 93 93 return FINALIZE_CODE(patchBuffer, ("String get_by_val stub")); 94 94 } … … 211 211 Jump nonCell = jump(); 212 212 linkSlowCase(iter); // base array check 213 Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get()));214 emitNakedCall(CodeLocationLabel(m_ globalData->getCTIStub(stringGetByValStubGenerator).code()));213 Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())); 214 emitNakedCall(CodeLocationLabel(m_vm->getCTIStub(stringGetByValStubGenerator).code())); 215 215 Jump failed = branchTest64(Zero, regT0); 216 216 emitPutVirtualRegister(dst, regT0); … … 536 536 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg); 537 537 538 if (*ident == m_ globalData->propertyNames->length && shouldEmitProfiling()) {538 if (*ident == m_vm->propertyNames->length && shouldEmitProfiling()) { 539 539 loadPtr(Address(regT0, JSCell::structureOffset()), regT1); 540 540 emitArrayProfilingSiteForBytecodeIndex(regT1, regT2, m_bytecodeOffset); … … 747 747 Call failureCall = tailRecursiveCall(); 748 748 749 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);749 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 750 750 751 751 patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail)); … … 761 761 ("Baseline put_by_id transition for %s, return point %p", 762 762 toCString(*m_codeBlock).data(), returnAddress.value())), 763 *m_ globalData,763 *m_vm, 764 764 m_codeBlock->ownerExecutable(), 765 765 willNeedStorageRealloc, … … 814 814 Jump success = jump(); 815 815 816 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);816 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 817 817 818 818 // Use the patch information to link the failure cases back to the original slow case routine. … … 876 876 compileGetDirectOffset(protoObject, regT0, cachedOffset); 877 877 Jump success = jump(); 878 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);878 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 879 879 880 880 // Use the patch information to link the failure cases back to the original slow case routine. … … 900 900 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset( 901 901 stubInfo->patch.baseline.u.get.putResult).executableAddress())), 902 *m_ globalData,902 *m_vm, 903 903 m_codeBlock->ownerExecutable(), 904 904 needsStubLink); … … 940 940 Jump success = jump(); 941 941 942 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);942 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 943 943 944 944 if (needsStubLink) { … … 965 965 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset( 966 966 stubInfo->patch.baseline.u.get.putResult).executableAddress())), 967 *m_ globalData,967 *m_vm, 968 968 m_codeBlock->ownerExecutable(), 969 969 needsStubLink); 970 970 971 polymorphicStructures->list[currentIndex].set(*m_ globalData, m_codeBlock->ownerExecutable(), stubCode, structure, isDirect);971 polymorphicStructures->list[currentIndex].set(*m_vm, m_codeBlock->ownerExecutable(), stubCode, structure, isDirect); 972 972 973 973 // Finally patch the jump to slow case back in the hot path to jump here instead. … … 1015 1015 Jump success = jump(); 1016 1016 1017 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);1017 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 1018 1018 1019 1019 if (needsStubLink) { … … 1039 1039 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset( 1040 1040 stubInfo->patch.baseline.u.get.putResult).executableAddress())), 1041 *m_ globalData,1041 *m_vm, 1042 1042 m_codeBlock->ownerExecutable(), 1043 1043 needsStubLink); 1044 prototypeStructures->list[currentIndex].set(*m_ globalData, m_codeBlock->ownerExecutable(), stubCode, structure, prototypeStructure, isDirect);1044 prototypeStructures->list[currentIndex].set(*m_vm, m_codeBlock->ownerExecutable(), stubCode, structure, prototypeStructure, isDirect); 1045 1045 1046 1046 // Finally patch the jump to slow case back in the hot path to jump here instead. … … 1093 1093 Jump success = jump(); 1094 1094 1095 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);1095 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 1096 1096 1097 1097 if (needsStubLink) { … … 1116 1116 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset( 1117 1117 stubInfo->patch.baseline.u.get.putResult).executableAddress())), 1118 *m_ globalData,1118 *m_vm, 1119 1119 m_codeBlock->ownerExecutable(), 1120 1120 needsStubLink); 1121 1121 1122 1122 // Track the stub we have created so that it will be deleted later. 1123 prototypeStructures->list[currentIndex].set(callFrame-> globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);1123 prototypeStructures->list[currentIndex].set(callFrame->vm(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect); 1124 1124 1125 1125 // Finally patch the jump to slow case back in the hot path to jump here instead. … … 1169 1169 Jump success = jump(); 1170 1170 1171 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);1171 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 1172 1172 1173 1173 if (needsStubLink) { … … 1191 1191 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset( 1192 1192 stubInfo->patch.baseline.u.get.putResult).executableAddress())), 1193 *m_ globalData,1193 *m_vm, 1194 1194 m_codeBlock->ownerExecutable(), 1195 1195 needsStubLink); … … 1418 1418 break; 1419 1419 case JITInt8Array: 1420 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_ globalData->int8ArrayDescriptor(), 1, SignedTypedArray);1420 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->int8ArrayDescriptor(), 1, SignedTypedArray); 1421 1421 break; 1422 1422 case JITInt16Array: 1423 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_ globalData->int16ArrayDescriptor(), 2, SignedTypedArray);1423 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->int16ArrayDescriptor(), 2, SignedTypedArray); 1424 1424 break; 1425 1425 case JITInt32Array: 1426 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_ globalData->int32ArrayDescriptor(), 4, SignedTypedArray);1426 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->int32ArrayDescriptor(), 4, SignedTypedArray); 1427 1427 break; 1428 1428 case JITUint8Array: 1429 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_ globalData->uint8ArrayDescriptor(), 1, UnsignedTypedArray);1429 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->uint8ArrayDescriptor(), 1, UnsignedTypedArray); 1430 1430 break; 1431 1431 case JITUint8ClampedArray: 1432 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_ globalData->uint8ClampedArrayDescriptor(), 1, UnsignedTypedArray);1432 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->uint8ClampedArrayDescriptor(), 1, UnsignedTypedArray); 1433 1433 break; 1434 1434 case JITUint16Array: 1435 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_ globalData->uint16ArrayDescriptor(), 2, UnsignedTypedArray);1435 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->uint16ArrayDescriptor(), 2, UnsignedTypedArray); 1436 1436 break; 1437 1437 case JITUint32Array: 1438 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_ globalData->uint32ArrayDescriptor(), 4, UnsignedTypedArray);1438 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->uint32ArrayDescriptor(), 4, UnsignedTypedArray); 1439 1439 break; 1440 1440 case JITFloat32Array: 1441 slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, m_ globalData->float32ArrayDescriptor(), 4);1441 slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, m_vm->float32ArrayDescriptor(), 4); 1442 1442 break; 1443 1443 case JITFloat64Array: 1444 slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, m_ globalData->float64ArrayDescriptor(), 8);1444 slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, m_vm->float64ArrayDescriptor(), 8); 1445 1445 break; 1446 1446 default: … … 1450 1450 Jump done = jump(); 1451 1451 1452 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);1452 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 1453 1453 1454 1454 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath)); … … 1487 1487 break; 1488 1488 case JITInt8Array: 1489 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_ globalData->int8ArrayDescriptor(), 1, SignedTypedArray, TruncateRounding);1489 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->int8ArrayDescriptor(), 1, SignedTypedArray, TruncateRounding); 1490 1490 break; 1491 1491 case JITInt16Array: 1492 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_ globalData->int16ArrayDescriptor(), 2, SignedTypedArray, TruncateRounding);1492 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->int16ArrayDescriptor(), 2, SignedTypedArray, TruncateRounding); 1493 1493 break; 1494 1494 case JITInt32Array: 1495 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_ globalData->int32ArrayDescriptor(), 4, SignedTypedArray, TruncateRounding);1495 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->int32ArrayDescriptor(), 4, SignedTypedArray, TruncateRounding); 1496 1496 break; 1497 1497 case JITUint8Array: 1498 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_ globalData->uint8ArrayDescriptor(), 1, UnsignedTypedArray, TruncateRounding);1498 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->uint8ArrayDescriptor(), 1, UnsignedTypedArray, TruncateRounding); 1499 1499 break; 1500 1500 case JITUint8ClampedArray: 1501 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_ globalData->uint8ClampedArrayDescriptor(), 1, UnsignedTypedArray, ClampRounding);1501 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->uint8ClampedArrayDescriptor(), 1, UnsignedTypedArray, ClampRounding); 1502 1502 break; 1503 1503 case JITUint16Array: 1504 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_ globalData->uint16ArrayDescriptor(), 2, UnsignedTypedArray, TruncateRounding);1504 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->uint16ArrayDescriptor(), 2, UnsignedTypedArray, TruncateRounding); 1505 1505 break; 1506 1506 case JITUint32Array: 1507 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_ globalData->uint32ArrayDescriptor(), 4, UnsignedTypedArray, TruncateRounding);1507 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->uint32ArrayDescriptor(), 4, UnsignedTypedArray, TruncateRounding); 1508 1508 break; 1509 1509 case JITFloat32Array: 1510 slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, m_ globalData->float32ArrayDescriptor(), 4);1510 slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, m_vm->float32ArrayDescriptor(), 4); 1511 1511 break; 1512 1512 case JITFloat64Array: 1513 slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, m_ globalData->float64ArrayDescriptor(), 8);1513 slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, m_vm->float64ArrayDescriptor(), 8); 1514 1514 break; 1515 1515 default: … … 1520 1520 Jump done = jump(); 1521 1521 1522 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);1522 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 1523 1523 1524 1524 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath)); -
trunk/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
r145000 r148696 93 93 } 94 94 95 JIT::CodeRef JIT::stringGetByValStubGenerator( JSGlobalData* globalData)95 JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm) 96 96 { 97 97 JSInterfaceJIT jit; 98 98 JumpList failures; 99 failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr( globalData->stringStructure.get())));99 failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(vm->stringStructure.get()))); 100 100 101 101 // Load string length to regT1, and start the process of loading the data pointer into regT0 … … 122 122 123 123 failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100))); 124 jit.move(TrustedImmPtr( globalData->smallStrings.singleCharacterStrings()), regT1);124 jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1); 125 125 jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0); 126 126 jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe … … 131 131 jit.ret(); 132 132 133 LinkBuffer patchBuffer(* globalData, &jit, GLOBAL_THUNK_ID);133 LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID); 134 134 return FINALIZE_CODE(patchBuffer, ("String get_by_val stub")); 135 135 } … … 250 250 Jump nonCell = jump(); 251 251 linkSlowCase(iter); // base array check 252 Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_ globalData->stringStructure.get()));253 emitNakedCall(m_ globalData->getCTIStub(stringGetByValStubGenerator).code());252 Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())); 253 emitNakedCall(m_vm->getCTIStub(stringGetByValStubGenerator).code()); 254 254 Jump failed = branchTestPtr(Zero, regT0); 255 255 emitStore(dst, regT1, regT0); … … 478 478 // to jump back to if one of these trampolies finds a match. 479 479 480 if (*ident == m_ globalData->propertyNames->length && shouldEmitProfiling()) {480 if (*ident == m_vm->propertyNames->length && shouldEmitProfiling()) { 481 481 loadPtr(Address(regT0, JSCell::structureOffset()), regT2); 482 482 emitArrayProfilingSiteForBytecodeIndex(regT2, regT3, m_bytecodeOffset); … … 705 705 Call failureCall = tailRecursiveCall(); 706 706 707 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);707 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 708 708 709 709 patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail)); … … 719 719 ("Baseline put_by_id transition stub for %s, return point %p", 720 720 toCString(*m_codeBlock).data(), returnAddress.value())), 721 *m_ globalData,721 *m_vm, 722 722 m_codeBlock->ownerExecutable(), 723 723 willNeedStorageRealloc, … … 777 777 Jump success = jump(); 778 778 779 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);779 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 780 780 781 781 // Use the patch information to link the failure cases back to the original slow case routine. … … 840 840 Jump success = jump(); 841 841 842 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);842 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 843 843 844 844 // Use the patch information to link the failure cases back to the original slow case routine. … … 865 865 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset( 866 866 stubInfo->patch.baseline.u.get.putResult).executableAddress())), 867 *m_ globalData,867 *m_vm, 868 868 m_codeBlock->ownerExecutable(), 869 869 needsStubLink); … … 908 908 Jump success = jump(); 909 909 910 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);910 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 911 911 if (needsStubLink) { 912 912 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { … … 931 931 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset( 932 932 stubInfo->patch.baseline.u.get.putResult).executableAddress())), 933 *m_ globalData,933 *m_vm, 934 934 m_codeBlock->ownerExecutable(), 935 935 needsStubLink); 936 936 937 polymorphicStructures->list[currentIndex].set(*m_ globalData, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);937 polymorphicStructures->list[currentIndex].set(*m_vm, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect); 938 938 939 939 // Finally patch the jump to slow case back in the hot path to jump here instead. … … 982 982 Jump success = jump(); 983 983 984 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);984 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 985 985 if (needsStubLink) { 986 986 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { … … 1004 1004 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset( 1005 1005 stubInfo->patch.baseline.u.get.putResult).executableAddress())), 1006 *m_ globalData,1006 *m_vm, 1007 1007 m_codeBlock->ownerExecutable(), 1008 1008 needsStubLink); 1009 1009 1010 prototypeStructures->list[currentIndex].set(callFrame-> globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect);1010 prototypeStructures->list[currentIndex].set(callFrame->vm(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect); 1011 1011 1012 1012 // Finally patch the jump to slow case back in the hot path to jump here instead. … … 1061 1061 Jump success = jump(); 1062 1062 1063 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);1063 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 1064 1064 if (needsStubLink) { 1065 1065 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { … … 1082 1082 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset( 1083 1083 stubInfo->patch.baseline.u.get.putResult).executableAddress())), 1084 *m_ globalData,1084 *m_vm, 1085 1085 m_codeBlock->ownerExecutable(), 1086 1086 needsStubLink); 1087 1087 1088 1088 // Track the stub we have created so that it will be deleted later. 1089 prototypeStructures->list[currentIndex].set(callFrame-> globalData(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);1089 prototypeStructures->list[currentIndex].set(callFrame->vm(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect); 1090 1090 1091 1091 // Finally patch the jump to slow case back in the hot path to jump here instead. … … 1136 1136 Jump success = jump(); 1137 1137 1138 LinkBuffer patchBuffer(*m_ globalData, this, m_codeBlock);1138 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 1139 1139 if (needsStubLink) { 1140 1140 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { … … 1156 1156 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset( 1157 1157 stubInfo->patch.baseline.u.get.putResult).executableAddress())), 1158 *m_ globalData,1158 *m_vm, 1159 1159 m_codeBlock->ownerExecutable(), 1160 1160 needsStubLink); -
trunk/Source/JavaScriptCore/jit/JITStubs.cpp
r148663 r148696 229 229 extern "C" { 230 230 231 __declspec(naked) EncodedJSValue ctiTrampoline(void* code, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, JSGlobalData*)231 __declspec(naked) EncodedJSValue ctiTrampoline(void* code, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, VM*) 232 232 { 233 233 __asm { … … 292 292 #elif CPU(SH4) 293 293 #define SYMBOL_STRING(name) #name 294 /* code (r4), JSStack* (r5), CallFrame* (r6), void* unused1 (r7), void* unused2(sp), JSGlobalData(sp)*/294 /* code (r4), JSStack* (r5), CallFrame* (r6), void* unused1 (r7), void* unused2(sp), VM (sp)*/ 295 295 296 296 asm volatile ( … … 546 546 "move $25,$4 # move executableAddress to t9" "\n" 547 547 "sw $5," STRINGIZE_VALUE_OF(REGISTER_FILE_OFFSET) "($29) # store JSStack to current stack" "\n" 548 "lw $9," STRINGIZE_VALUE_OF(STACK_LENGTH + 20) "($29) # load globalDatafrom previous stack" "\n"548 "lw $9," STRINGIZE_VALUE_OF(STACK_LENGTH + 20) "($29) # load vm from previous stack" "\n" 549 549 "jalr $25" "\n" 550 "sw $9," STRINGIZE_VALUE_OF(GLOBAL_DATA_OFFSET) "($29) # store globalDatato current stack" "\n"550 "sw $9," STRINGIZE_VALUE_OF(GLOBAL_DATA_OFFSET) "($29) # store vm to current stack" "\n" 551 551 "lw $16," STRINGIZE_VALUE_OF(PRESERVED_S0_OFFSET) "($29)" "\n" 552 552 "lw $17," STRINGIZE_VALUE_OF(PRESERVED_S1_OFFSET) "($29)" "\n" … … 755 755 #elif COMPILER(RVCT) && CPU(ARM_THUMB2) 756 756 757 __asm EncodedJSValue ctiTrampoline(void*, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, JSGlobalData*)757 __asm EncodedJSValue ctiTrampoline(void*, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, VM*) 758 758 { 759 759 PRESERVE8 … … 823 823 #elif COMPILER(RVCT) && CPU(ARM_TRADITIONAL) 824 824 825 __asm EncodedJSValue ctiTrampoline(void*, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, JSGlobalData*)825 __asm EncodedJSValue ctiTrampoline(void*, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, VM*) 826 826 { 827 827 ARM … … 865 865 866 866 #if ENABLE(OPCODE_SAMPLING) 867 #define CTI_SAMPLER stackFrame. globalData->interpreter->sampler()867 #define CTI_SAMPLER stackFrame.vm->interpreter->sampler() 868 868 #else 869 869 #define CTI_SAMPLER 0 870 870 #endif 871 871 872 void performPlatformSpecificJITAssertions( JSGlobalData* globalData)873 { 874 if (! globalData->canUseJIT())872 void performPlatformSpecificJITAssertions(VM* vm) 873 { 874 if (!vm->canUseJIT()) 875 875 return; 876 876 … … 909 909 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET); 910 910 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, stack) == REGISTER_FILE_OFFSET); 911 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, globalData) == GLOBAL_DATA_OFFSET);911 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, vm) == GLOBAL_DATA_OFFSET); 912 912 913 913 #endif … … 958 958 StructureChain* prototypeChain = structure->prototypeChain(callFrame); 959 959 ASSERT(structure->previousID()->transitionWatchpointSetHasBeenInvalidated()); 960 stubInfo->initPutByIdTransition(callFrame-> globalData(), codeBlock->ownerExecutable(), structure->previousID(), structure, prototypeChain, direct);961 JIT::compilePutByIdTransition(callFrame->scope()-> globalData(), codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct);960 stubInfo->initPutByIdTransition(callFrame->vm(), codeBlock->ownerExecutable(), structure->previousID(), structure, prototypeChain, direct); 961 JIT::compilePutByIdTransition(callFrame->scope()->vm(), codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct); 962 962 return; 963 963 } 964 964 965 stubInfo->initPutByIdReplace(callFrame-> globalData(), codeBlock->ownerExecutable(), structure);965 stubInfo->initPutByIdReplace(callFrame->vm(), codeBlock->ownerExecutable(), structure); 966 966 967 967 JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress, direct); … … 979 979 } 980 980 981 JSGlobalData* globalData = &callFrame->globalData();981 VM* vm = &callFrame->vm(); 982 982 983 983 if (isJSArray(baseValue) && propertyName == callFrame->propertyNames().length) { 984 JIT::compilePatchGetArrayLength(callFrame->scope()-> globalData(), codeBlock, returnAddress);984 JIT::compilePatchGetArrayLength(callFrame->scope()->vm(), codeBlock, returnAddress); 985 985 return; 986 986 } … … 989 989 // The tradeoff of compiling an patched inline string length access routine does not seem 990 990 // to pay off, so we currently only do this for arrays. 991 ctiPatchCallByReturnAddress(codeBlock, returnAddress, globalData->getCTIStub(stringLengthTrampolineGenerator).code());991 ctiPatchCallByReturnAddress(codeBlock, returnAddress, vm->getCTIStub(stringLengthTrampolineGenerator).code()); 992 992 return; 993 993 } … … 1018 1018 else { 1019 1019 JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress); 1020 stubInfo->initGetByIdSelf(callFrame-> globalData(), codeBlock->ownerExecutable(), structure);1020 stubInfo->initGetByIdSelf(callFrame->vm(), codeBlock->ownerExecutable(), structure); 1021 1021 } 1022 1022 return; … … 1044 1044 // should not be treated as a dictionary. 1045 1045 if (slotBaseObject->structure()->isDictionary()) { 1046 slotBaseObject->flattenDictionaryObject(callFrame-> globalData());1047 offset = slotBaseObject->structure()->get(callFrame-> globalData(), propertyName);1046 slotBaseObject->flattenDictionaryObject(callFrame->vm()); 1047 offset = slotBaseObject->structure()->get(callFrame->vm(), propertyName); 1048 1048 } 1049 1049 1050 stubInfo->initGetByIdProto(callFrame-> globalData(), codeBlock->ownerExecutable(), structure, slotBaseObject->structure(), slot.cachedPropertyType() == PropertySlot::Value);1050 stubInfo->initGetByIdProto(callFrame->vm(), codeBlock->ownerExecutable(), structure, slotBaseObject->structure(), slot.cachedPropertyType() == PropertySlot::Value); 1051 1051 1052 1052 ASSERT(!structure->isDictionary()); 1053 1053 ASSERT(!slotBaseObject->structure()->isDictionary()); 1054 JIT::compileGetByIdProto(callFrame->scope()-> globalData(), callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress);1054 JIT::compileGetByIdProto(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress); 1055 1055 return; 1056 1056 } … … 1065 1065 1066 1066 StructureChain* prototypeChain = structure->prototypeChain(callFrame); 1067 stubInfo->initGetByIdChain(callFrame-> globalData(), codeBlock->ownerExecutable(), structure, prototypeChain, count, slot.cachedPropertyType() == PropertySlot::Value);1068 JIT::compileGetByIdChain(callFrame->scope()-> globalData(), callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress);1067 stubInfo->initGetByIdChain(callFrame->vm(), codeBlock->ownerExecutable(), structure, prototypeChain, count, slot.cachedPropertyType() == PropertySlot::Value); 1068 JIT::compileGetByIdChain(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress); 1069 1069 } 1070 1070 … … 1116 1116 // good to keep the code size down by leaving as much of the exception 1117 1117 // handling code out of line as possible. 1118 static NEVER_INLINE void returnToThrowTrampoline( JSGlobalData* globalData, ReturnAddressPtr exceptionLocation, ReturnAddressPtr& returnAddressSlot)1119 { 1120 RELEASE_ASSERT( globalData->exception);1121 globalData->exceptionLocation = exceptionLocation;1118 static NEVER_INLINE void returnToThrowTrampoline(VM* vm, ReturnAddressPtr exceptionLocation, ReturnAddressPtr& returnAddressSlot) 1119 { 1120 RELEASE_ASSERT(vm->exception); 1121 vm->exceptionLocation = exceptionLocation; 1122 1122 returnAddressSlot = ReturnAddressPtr(FunctionPtr(ctiVMThrowTrampoline)); 1123 1123 } … … 1130 1130 #define VM_THROW_EXCEPTION_AT_END() \ 1131 1131 do {\ 1132 returnToThrowTrampoline(stackFrame. globalData, STUB_RETURN_ADDRESS, STUB_RETURN_ADDRESS);\1132 returnToThrowTrampoline(stackFrame.vm, STUB_RETURN_ADDRESS, STUB_RETURN_ADDRESS);\ 1133 1133 } while (0) 1134 1134 1135 1135 #define CHECK_FOR_EXCEPTION() \ 1136 1136 do { \ 1137 if (UNLIKELY(stackFrame. globalData->exception)) \1137 if (UNLIKELY(stackFrame.vm->exception)) \ 1138 1138 VM_THROW_EXCEPTION(); \ 1139 1139 } while (0) 1140 1140 #define CHECK_FOR_EXCEPTION_AT_END() \ 1141 1141 do { \ 1142 if (UNLIKELY(stackFrame. globalData->exception)) \1142 if (UNLIKELY(stackFrame.vm->exception)) \ 1143 1143 VM_THROW_EXCEPTION_AT_END(); \ 1144 1144 } while (0) 1145 1145 #define CHECK_FOR_EXCEPTION_VOID() \ 1146 1146 do { \ 1147 if (UNLIKELY(stackFrame. globalData->exception)) { \1147 if (UNLIKELY(stackFrame.vm->exception)) { \ 1148 1148 VM_THROW_EXCEPTION_AT_END(); \ 1149 1149 return; \ … … 1157 1157 { 1158 1158 CallFrame* callFrame = newCallFrame->callerFrame(); 1159 ASSERT(callFrame-> globalData().exception);1159 ASSERT(callFrame->vm().exception); 1160 1160 jitStackFrame.callFrame = callFrame; 1161 callFrame-> globalData().topCallFrame = callFrame;1162 returnToThrowTrampoline(&callFrame-> globalData(), ReturnAddressPtr(newCallFrame->returnPC()), returnAddressSlot);1161 callFrame->vm().topCallFrame = callFrame; 1162 returnToThrowTrampoline(&callFrame->vm(), ReturnAddressPtr(newCallFrame->returnPC()), returnAddressSlot); 1163 1163 return T(); 1164 1164 } … … 1166 1166 template<typename T> static T throwExceptionFromOpCall(JITStackFrame& jitStackFrame, CallFrame* newCallFrame, ReturnAddressPtr& returnAddressSlot, JSValue exception) 1167 1167 { 1168 newCallFrame->callerFrame()-> globalData().exception = exception;1168 newCallFrame->callerFrame()->vm().exception = exception; 1169 1169 return throwExceptionFromOpCall<T>(jitStackFrame, newCallFrame, returnAddressSlot); 1170 1170 } … … 1449 1449 STUB_INIT_STACK_FRAME(stackFrame); 1450 1450 CallFrame* callFrame = stackFrame.callFrame; 1451 JSGlobalData* globalData = stackFrame.globalData;1452 if (UNLIKELY( globalData->watchdog.didFire(callFrame))) {1453 globalData->exception = createTerminatedExecutionException(globalData);1451 VM* vm = stackFrame.vm; 1452 if (UNLIKELY(vm->watchdog.didFire(callFrame))) { 1453 vm->exception = createTerminatedExecutionException(vm); 1454 1454 VM_THROW_EXCEPTION_AT_END(); 1455 1455 return; … … 1491 1491 JSValue baseValue = stackFrame.args[0].jsValue(); 1492 1492 ASSERT(baseValue.isObject()); 1493 asObject(baseValue)->putDirect(stackFrame.callFrame-> globalData(), stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);1493 asObject(baseValue)->putDirect(stackFrame.callFrame->vm(), stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot); 1494 1494 CHECK_FOR_EXCEPTION_AT_END(); 1495 1495 } … … 1545 1545 ASSERT(baseValue.isObject()); 1546 1546 1547 asObject(baseValue)->putDirect(callFrame-> globalData(), ident, stackFrame.args[2].jsValue(), slot);1547 asObject(baseValue)->putDirect(callFrame->vm(), ident, stackFrame.args[2].jsValue(), slot); 1548 1548 1549 1549 if (accessType == static_cast<AccessType>(stubInfo->accessType)) { … … 1578 1578 JSValue baseValue = stackFrame.args[0].jsValue(); 1579 1579 ASSERT(baseValue.isObject()); 1580 asObject(baseValue)->putDirect(callFrame-> globalData(), ident, stackFrame.args[2].jsValue(), slot);1580 asObject(baseValue)->putDirect(callFrame->vm(), ident, stackFrame.args[2].jsValue(), slot); 1581 1581 1582 1582 CHECK_FOR_EXCEPTION_AT_END(); … … 1597 1597 ASSERT(baseValue.isObject()); 1598 1598 JSObject* base = asObject(baseValue); 1599 JSGlobalData& globalData = *stackFrame.globalData;1600 Butterfly* butterfly = base->growOutOfLineStorage( globalData, oldSize, newSize);1601 base->setButterfly( globalData, butterfly, newStructure);1599 VM& vm = *stackFrame.vm; 1600 Butterfly* butterfly = base->growOutOfLineStorage(vm, oldSize, newSize); 1601 base->setButterfly(vm, butterfly, newStructure); 1602 1602 1603 1603 return base; … … 1661 1661 1662 1662 if (stubInfo->accessType == access_unset) 1663 stubInfo->initGetByIdSelf(callFrame-> globalData(), codeBlock->ownerExecutable(), baseValue.asCell()->structure());1663 stubInfo->initGetByIdSelf(callFrame->vm(), codeBlock->ownerExecutable(), baseValue.asCell()->structure()); 1664 1664 1665 1665 if (stubInfo->accessType == access_get_by_id_self) { 1666 1666 ASSERT(!stubInfo->stubRoutine); 1667 polymorphicStructureList = new PolymorphicAccessStructureList(callFrame-> globalData(), codeBlock->ownerExecutable(), 0, stubInfo->u.getByIdSelf.baseObjectStructure.get(), true);1667 polymorphicStructureList = new PolymorphicAccessStructureList(callFrame->vm(), codeBlock->ownerExecutable(), 0, stubInfo->u.getByIdSelf.baseObjectStructure.get(), true); 1668 1668 stubInfo->initGetByIdSelfList(polymorphicStructureList, 1); 1669 1669 } else { … … 1673 1673 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) { 1674 1674 stubInfo->u.getByIdSelfList.listSize++; 1675 JIT::compileGetByIdSelfList(callFrame->scope()-> globalData(), codeBlock, stubInfo, polymorphicStructureList, listIndex, baseValue.asCell()->structure(), ident, slot, slot.cachedOffset());1675 JIT::compileGetByIdSelfList(callFrame->scope()->vm(), codeBlock, stubInfo, polymorphicStructureList, listIndex, baseValue.asCell()->structure(), ident, slot, slot.cachedOffset()); 1676 1676 1677 1677 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1)) … … 1683 1683 } 1684 1684 1685 static PolymorphicAccessStructureList* getPolymorphicAccessStructureListSlot( JSGlobalData& globalData, ScriptExecutable* owner, StructureStubInfo* stubInfo, int& listIndex)1685 static PolymorphicAccessStructureList* getPolymorphicAccessStructureListSlot(VM& vm, ScriptExecutable* owner, StructureStubInfo* stubInfo, int& listIndex) 1686 1686 { 1687 1687 PolymorphicAccessStructureList* prototypeStructureList = 0; … … 1690 1690 switch (stubInfo->accessType) { 1691 1691 case access_get_by_id_proto: 1692 prototypeStructureList = new PolymorphicAccessStructureList( globalData, owner, stubInfo->stubRoutine, stubInfo->u.getByIdProto.baseObjectStructure.get(), stubInfo->u.getByIdProto.prototypeStructure.get(), true);1692 prototypeStructureList = new PolymorphicAccessStructureList(vm, owner, stubInfo->stubRoutine, stubInfo->u.getByIdProto.baseObjectStructure.get(), stubInfo->u.getByIdProto.prototypeStructure.get(), true); 1693 1693 stubInfo->stubRoutine.clear(); 1694 1694 stubInfo->initGetByIdProtoList(prototypeStructureList, 2); 1695 1695 break; 1696 1696 case access_get_by_id_chain: 1697 prototypeStructureList = new PolymorphicAccessStructureList( globalData, owner, stubInfo->stubRoutine, stubInfo->u.getByIdChain.baseObjectStructure.get(), stubInfo->u.getByIdChain.chain.get(), true);1697 prototypeStructureList = new PolymorphicAccessStructureList(vm, owner, stubInfo->stubRoutine, stubInfo->u.getByIdChain.baseObjectStructure.get(), stubInfo->u.getByIdChain.chain.get(), true); 1698 1698 stubInfo->stubRoutine.clear(); 1699 1699 stubInfo->initGetByIdProtoList(prototypeStructureList, 2); … … 1725 1725 JSValue result = call(callFrame, getter, callType, callData, stackFrame.args[1].jsObject(), ArgList()); 1726 1726 if (callFrame->hadException()) 1727 returnToThrowTrampoline(&callFrame-> globalData(), stackFrame.args[2].returnAddress(), STUB_RETURN_ADDRESS);1727 returnToThrowTrampoline(&callFrame->vm(), stackFrame.args[2].returnAddress(), STUB_RETURN_ADDRESS); 1728 1728 1729 1729 return JSValue::encode(result); … … 1739 1739 JSValue result = getter(callFrame, slotBase, ident); 1740 1740 if (callFrame->hadException()) 1741 returnToThrowTrampoline(&callFrame-> globalData(), stackFrame.args[3].returnAddress(), STUB_RETURN_ADDRESS);1741 returnToThrowTrampoline(&callFrame->vm(), stackFrame.args[3].returnAddress(), STUB_RETURN_ADDRESS); 1742 1742 1743 1743 return JSValue::encode(result); … … 1790 1790 // should not be treated as a dictionary. 1791 1791 if (slotBaseObject->structure()->isDictionary()) { 1792 slotBaseObject->flattenDictionaryObject(callFrame-> globalData());1793 offset = slotBaseObject->structure()->get(callFrame-> globalData(), propertyName);1792 slotBaseObject->flattenDictionaryObject(callFrame->vm()); 1793 offset = slotBaseObject->structure()->get(callFrame->vm(), propertyName); 1794 1794 } 1795 1795 1796 1796 int listIndex; 1797 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame-> globalData(), codeBlock->ownerExecutable(), stubInfo, listIndex);1797 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->vm(), codeBlock->ownerExecutable(), stubInfo, listIndex); 1798 1798 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) { 1799 JIT::compileGetByIdProtoList(callFrame->scope()-> globalData(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), propertyName, slot, offset);1799 JIT::compileGetByIdProtoList(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), propertyName, slot, offset); 1800 1800 1801 1801 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1)) … … 1811 1811 ASSERT(!baseValue.asCell()->structure()->isDictionary()); 1812 1812 int listIndex; 1813 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame-> globalData(), codeBlock->ownerExecutable(), stubInfo, listIndex);1813 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->vm(), codeBlock->ownerExecutable(), stubInfo, listIndex); 1814 1814 1815 1815 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) { 1816 1816 StructureChain* protoChain = structure->prototypeChain(callFrame); 1817 JIT::compileGetByIdChainList(callFrame->scope()-> globalData(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, propertyName, slot, offset);1817 JIT::compileGetByIdChainList(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, propertyName, slot, offset); 1818 1818 1819 1819 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1)) … … 1891 1891 } 1892 1892 1893 stackFrame. globalData->exception = createInvalidParamError(callFrame, "instanceof", baseVal);1893 stackFrame.vm->exception = createInvalidParamError(callFrame, "instanceof", baseVal); 1894 1894 VM_THROW_EXCEPTION_AT_END(); 1895 1895 return JSValue::encode(JSValue()); … … 2060 2060 JSValue result = jsBoolean(couldDelete); 2061 2061 if (!couldDelete && callFrame->codeBlock()->isStrictMode()) 2062 stackFrame. globalData->exception = createTypeError(stackFrame.callFrame, "Unable to delete property.");2062 stackFrame.vm->exception = createTypeError(stackFrame.callFrame, "Unable to delete property."); 2063 2063 2064 2064 CHECK_FOR_EXCEPTION_AT_END(); … … 2116 2116 if (!error) 2117 2117 return function; 2118 callFrame-> globalData().exception = error;2118 callFrame->vm().exception = error; 2119 2119 return 0; 2120 2120 } … … 2211 2211 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable); 2212 2212 if (JSObject* error = functionExecutable->compileFor(callFrame, callee->scope(), kind)) { 2213 callFrame-> globalData().exception = error;2213 callFrame->vm().exception = error; 2214 2214 return 0; 2215 2215 } … … 2225 2225 callLinkInfo->setSeen(); 2226 2226 else 2227 JIT::linkFor(callee, callFrame->callerFrame()->codeBlock(), codeBlock, codePtr, callLinkInfo, &callFrame-> globalData(), kind);2227 JIT::linkFor(callee, callFrame->callerFrame()->codeBlock(), codeBlock, codePtr, callLinkInfo, &callFrame->vm(), kind); 2228 2228 2229 2229 return codePtr.executableAddress(); … … 2249 2249 2250 2250 CodeBlock* callerCodeBlock = callFrame->callerFrame()->codeBlock(); 2251 JSGlobalData* globalData = callerCodeBlock->globalData();2251 VM* vm = callerCodeBlock->vm(); 2252 2252 CallLinkInfo* callLinkInfo = &callerCodeBlock->getCallLinkInfo(callFrame->returnPC()); 2253 2253 JSFunction* callee = jsCast<JSFunction*>(callFrame->callee()); … … 2288 2288 JSObject* error = functionExecutable->compileFor(callFrame, scopeChain, CodeForCall); 2289 2289 if (error) { 2290 callFrame-> globalData().exception = error;2290 callFrame->vm().exception = error; 2291 2291 return 0; 2292 2292 } … … 2297 2297 if (shouldLink) { 2298 2298 ASSERT(codePtr); 2299 JIT::compileClosureCall( globalData, callLinkInfo, callerCodeBlock, calleeCodeBlock, structure, executable, codePtr);2299 JIT::compileClosureCall(vm, callLinkInfo, callerCodeBlock, calleeCodeBlock, structure, executable, codePtr); 2300 2300 callLinkInfo->hasSeenClosure = true; 2301 2301 } else … … 2321 2321 STUB_INIT_STACK_FRAME(stackFrame); 2322 2322 2323 JSActivation* activation = JSActivation::create(stackFrame.callFrame-> globalData(), stackFrame.callFrame, stackFrame.callFrame->codeBlock());2323 JSActivation* activation = JSActivation::create(stackFrame.callFrame->vm(), stackFrame.callFrame, stackFrame.callFrame->codeBlock()); 2324 2324 stackFrame.callFrame->setScope(activation); 2325 2325 return activation; … … 2349 2349 } 2350 2350 2351 if (stackFrame. globalData->exception)2351 if (stackFrame.vm->exception) 2352 2352 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS); 2353 2353 … … 2359 2359 STUB_INIT_STACK_FRAME(stackFrame); 2360 2360 2361 Arguments* arguments = Arguments::create(*stackFrame. globalData, stackFrame.callFrame);2361 Arguments* arguments = Arguments::create(*stackFrame.vm, stackFrame.callFrame); 2362 2362 return JSValue::encode(JSValue(arguments)); 2363 2363 } … … 2368 2368 2369 2369 ASSERT(stackFrame.callFrame->codeBlock()->needsFullScopeChain()); 2370 jsCast<JSActivation*>(stackFrame.args[0].jsValue())->tearOff(*stackFrame. globalData);2370 jsCast<JSActivation*>(stackFrame.args[0].jsValue())->tearOff(*stackFrame.vm); 2371 2371 } 2372 2372 … … 2389 2389 STUB_INIT_STACK_FRAME(stackFrame); 2390 2390 2391 if (LegacyProfiler* profiler = stackFrame. globalData->enabledProfiler())2391 if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler()) 2392 2392 profiler->willExecute(stackFrame.callFrame, stackFrame.args[0].jsValue()); 2393 2393 } … … 2397 2397 STUB_INIT_STACK_FRAME(stackFrame); 2398 2398 2399 if (LegacyProfiler* profiler = stackFrame. globalData->enabledProfiler())2399 if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler()) 2400 2400 profiler->didExecute(stackFrame.callFrame, stackFrame.args[0].jsValue()); 2401 2401 } … … 2475 2475 } 2476 2476 2477 if (stackFrame. globalData->exception)2477 if (stackFrame.vm->exception) 2478 2478 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS); 2479 2479 … … 2528 2528 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure()); 2529 2529 if (arrayMode != byValInfo.arrayMode) { 2530 JIT::compileGetByVal(&callFrame-> globalData(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode);2530 JIT::compileGetByVal(&callFrame->vm(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode); 2531 2531 didOptimize = true; 2532 2532 } … … 2621 2621 JSObject* object = asObject(baseValue); 2622 2622 if (object->canSetIndexQuickly(i)) 2623 object->setIndexQuickly(callFrame-> globalData(), i, value);2623 object->setIndexQuickly(callFrame->vm(), i, value); 2624 2624 else 2625 2625 object->methodTable()->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode()); … … 2631 2631 } else { 2632 2632 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame)); 2633 if (!callFrame-> globalData().exception) { // Don't put to an object if toString threw an exception.2633 if (!callFrame->vm().exception) { // Don't put to an object if toString threw an exception. 2634 2634 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode()); 2635 2635 baseValue.put(callFrame, property, value, slot); … … 2662 2662 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure()); 2663 2663 if (arrayMode != byValInfo.arrayMode) { 2664 JIT::compilePutByVal(&callFrame-> globalData(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode);2664 JIT::compilePutByVal(&callFrame->vm(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode); 2665 2665 didOptimize = true; 2666 2666 } … … 2797 2797 ASSERT(stackFrame.callFrame->codeBlock()->isStrictMode()); 2798 2798 if (!object->getPropertySlot(stackFrame.callFrame, stackFrame.args[1].identifier(), slot)) { 2799 stackFrame. globalData->exception = createErrorForInvalidGlobalAssignment(stackFrame.callFrame, stackFrame.args[1].identifier().string());2799 stackFrame.vm->exception = createErrorForInvalidGlobalAssignment(stackFrame.callFrame, stackFrame.args[1].identifier().string()); 2800 2800 VM_THROW_EXCEPTION(); 2801 2801 } … … 3174 3174 RegExp* regExp = stackFrame.args[0].regExp(); 3175 3175 if (!regExp->isValid()) { 3176 stackFrame. globalData->exception = createSyntaxError(callFrame, "Invalid flags supplied to RegExp constructor.");3176 stackFrame.vm->exception = createSyntaxError(callFrame, "Invalid flags supplied to RegExp constructor."); 3177 3177 VM_THROW_EXCEPTION(); 3178 3178 } 3179 3179 3180 return RegExpObject::create(*stackFrame. globalData, stackFrame.callFrame->lexicalGlobalObject(), stackFrame.callFrame->lexicalGlobalObject()->regExpStructure(), regExp);3180 return RegExpObject::create(*stackFrame.vm, stackFrame.callFrame->lexicalGlobalObject(), stackFrame.callFrame->lexicalGlobalObject()->regExpStructure(), regExp); 3181 3181 } 3182 3182 … … 3213 3213 3214 3214 JSValue result = eval(callFrame); 3215 if (stackFrame. globalData->exception)3215 if (stackFrame.vm->exception) 3216 3216 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS); 3217 3217 … … 3222 3222 { 3223 3223 STUB_INIT_STACK_FRAME(stackFrame); 3224 ExceptionHandler handler = jitThrow(stackFrame. globalData, stackFrame.callFrame, stackFrame.args[0].jsValue(), STUB_RETURN_ADDRESS);3224 ExceptionHandler handler = jitThrow(stackFrame.vm, stackFrame.callFrame, stackFrame.args[0].jsValue(), STUB_RETURN_ADDRESS); 3225 3225 STUB_SET_RETURN_ADDRESS(handler.catchRoutine); 3226 3226 return handler.callFrame; … … 3348 3348 3349 3349 if (!baseVal.isObject()) { 3350 stackFrame. globalData->exception = createInvalidParamError(stackFrame.callFrame, "in", baseVal);3350 stackFrame.vm->exception = createInvalidParamError(stackFrame.callFrame, "in", baseVal); 3351 3351 VM_THROW_EXCEPTION(); 3352 3352 } … … 3470 3470 3471 3471 if (!result && callFrame->codeBlock()->isStrictMode()) 3472 stackFrame. globalData->exception = createTypeError(stackFrame.callFrame, "Unable to delete property.");3472 stackFrame.vm->exception = createTypeError(stackFrame.callFrame, "Unable to delete property."); 3473 3473 3474 3474 CHECK_FOR_EXCEPTION_AT_END(); … … 3494 3494 3495 3495 if (!getter.isUndefined()) 3496 accessor->setGetter(callFrame-> globalData(), asObject(getter));3496 accessor->setGetter(callFrame->vm(), asObject(getter)); 3497 3497 if (!setter.isUndefined()) 3498 accessor->setSetter(callFrame-> globalData(), asObject(setter));3498 accessor->setSetter(callFrame->vm(), asObject(setter)); 3499 3499 baseObj->putDirectAccessor(callFrame, stackFrame.args[1].identifier(), accessor, Accessor); 3500 3500 } … … 3507 3507 String message = stackFrame.args[0].jsValue().toString(callFrame)->value(callFrame); 3508 3508 if (stackFrame.args[1].asInt32) 3509 stackFrame. globalData->exception = createReferenceError(callFrame, message);3509 stackFrame.vm->exception = createReferenceError(callFrame, message); 3510 3510 else 3511 stackFrame. globalData->exception = createTypeError(callFrame, message);3511 stackFrame.vm->exception = createTypeError(callFrame, message); 3512 3512 VM_THROW_EXCEPTION_AT_END(); 3513 3513 } … … 3524 3524 int column = stackFrame.args[3].int32(); 3525 3525 3526 stackFrame. globalData->interpreter->debug(callFrame, static_cast<DebugHookID>(debugHookID), firstLine, lastLine, column);3526 stackFrame.vm->interpreter->debug(callFrame, static_cast<DebugHookID>(debugHookID), firstLine, lastLine, column); 3527 3527 } 3528 3528 … … 3530 3530 { 3531 3531 STUB_INIT_STACK_FRAME(stackFrame); 3532 JSGlobalData* globalData = stackFrame.globalData;3533 ExceptionHandler handler = jitThrow( globalData, stackFrame.callFrame, globalData->exception, globalData->exceptionLocation);3532 VM* vm = stackFrame.vm; 3533 ExceptionHandler handler = jitThrow(vm, stackFrame.callFrame, vm->exception, vm->exceptionLocation); 3534 3534 STUB_SET_RETURN_ADDRESS(handler.catchRoutine); 3535 3535 return handler.callFrame; -
trunk/Source/JavaScriptCore/jit/JITStubs.h
r148639 r148696 49 49 class FunctionExecutable; 50 50 class Identifier; 51 class JSGlobalData;51 class VM; 52 52 class JSGlobalObject; 53 53 class JSObject; … … 99 99 void* unused1; 100 100 void* unused2; 101 JSGlobalData* globalData;101 VM* vm; 102 102 103 103 void* savedRBX; … … 135 135 // Passed on the stack 136 136 void* unused2; 137 JSGlobalData* globalData;137 VM* vm; 138 138 139 139 // When JIT code makes a call, it pushes its return address just below the rest of the stack. … … 163 163 void* unused1; 164 164 void* unused2; 165 JSGlobalData* globalData;165 VM* vm; 166 166 167 167 // When JIT code makes a call, it pushes its return address just below the rest of the stack. … … 194 194 // These arguments passed on the stack. 195 195 void* unused1; 196 JSGlobalData* globalData;196 VM* vm; 197 197 198 198 ReturnAddressPtr* returnAddressSlot() { return &thunkReturnAddress; } … … 224 224 // These arguments passed on the stack. 225 225 void* unused2; 226 JSGlobalData* globalData;226 VM* vm; 227 227 228 228 // When JIT code makes a call, it pushes its return address just below the rest of the stack. … … 258 258 // These arguments passed on the stack. 259 259 void* unused2; 260 JSGlobalData* globalData;260 VM* vm; 261 261 262 262 ReturnAddressPtr* returnAddressSlot() { return &thunkReturnAddress; } … … 279 279 JSValue* exception; 280 280 void* unused1; 281 JSGlobalData* globalData;281 VM* vm; 282 282 283 283 ReturnAddressPtr* returnAddressSlot() { return &thunkReturnAddress; } … … 308 308 extern "C" void ctiVMThrowTrampoline(); 309 309 extern "C" void ctiOpThrowNotCaught(); 310 extern "C" EncodedJSValue ctiTrampoline(void* code, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, JSGlobalData*);310 extern "C" EncodedJSValue ctiTrampoline(void* code, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, VM*); 311 311 #if ENABLE(DFG_JIT) 312 312 extern "C" void ctiTrampolineEnd(); … … 319 319 #endif 320 320 321 void performPlatformSpecificJITAssertions( JSGlobalData*);321 void performPlatformSpecificJITAssertions(VM*); 322 322 323 323 extern "C" { … … 449 449 450 450 struct JITStackFrame { 451 JSGlobalData* globalData;451 VM* vm; 452 452 }; 453 453 -
trunk/Source/JavaScriptCore/jit/JITThunks.cpp
r139004 r148696 31 31 #include "Executable.h" 32 32 #include "JIT.h" 33 #include " JSGlobalData.h"33 #include "VM.h" 34 34 #include "Operations.h" 35 35 … … 45 45 } 46 46 47 MacroAssemblerCodePtr JITThunks::ctiNativeCall( JSGlobalData* globalData)47 MacroAssemblerCodePtr JITThunks::ctiNativeCall(VM* vm) 48 48 { 49 49 #if ENABLE(LLINT) 50 if (! globalData->canUseJIT())50 if (!vm->canUseJIT()) 51 51 return MacroAssemblerCodePtr::createLLIntCodePtr(llint_native_call_trampoline); 52 52 #endif 53 return ctiStub( globalData, nativeCallGenerator).code();53 return ctiStub(vm, nativeCallGenerator).code(); 54 54 } 55 MacroAssemblerCodePtr JITThunks::ctiNativeConstruct( JSGlobalData* globalData)55 MacroAssemblerCodePtr JITThunks::ctiNativeConstruct(VM* vm) 56 56 { 57 57 #if ENABLE(LLINT) 58 if (! globalData->canUseJIT())58 if (!vm->canUseJIT()) 59 59 return MacroAssemblerCodePtr::createLLIntCodePtr(llint_native_construct_trampoline); 60 60 #endif 61 return ctiStub( globalData, nativeConstructGenerator).code();61 return ctiStub(vm, nativeConstructGenerator).code(); 62 62 } 63 63 64 MacroAssemblerCodeRef JITThunks::ctiStub( JSGlobalData* globalData, ThunkGenerator generator)64 MacroAssemblerCodeRef JITThunks::ctiStub(VM* vm, ThunkGenerator generator) 65 65 { 66 66 CTIStubMap::AddResult entry = m_ctiStubMap.add(generator, MacroAssemblerCodeRef()); 67 67 if (entry.isNewEntry) 68 entry.iterator->value = generator( globalData);68 entry.iterator->value = generator(vm); 69 69 return entry.iterator->value; 70 70 } 71 71 72 NativeExecutable* JITThunks::hostFunctionStub( JSGlobalData* globalData, NativeFunction function, NativeFunction constructor)72 NativeExecutable* JITThunks::hostFunctionStub(VM* vm, NativeFunction function, NativeFunction constructor) 73 73 { 74 74 if (NativeExecutable* nativeExecutable = m_hostFunctionStubMap->get(function)) 75 75 return nativeExecutable; 76 76 77 NativeExecutable* nativeExecutable = NativeExecutable::create(* globalData, JIT::compileCTINativeCall(globalData, function), function, MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeConstruct(globalData)), constructor, NoIntrinsic);77 NativeExecutable* nativeExecutable = NativeExecutable::create(*vm, JIT::compileCTINativeCall(vm, function), function, MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeConstruct(vm)), constructor, NoIntrinsic); 78 78 weakAdd(*m_hostFunctionStubMap, function, PassWeak<NativeExecutable>(nativeExecutable)); 79 79 return nativeExecutable; 80 80 } 81 81 82 NativeExecutable* JITThunks::hostFunctionStub( JSGlobalData* globalData, NativeFunction function, ThunkGenerator generator, Intrinsic intrinsic)82 NativeExecutable* JITThunks::hostFunctionStub(VM* vm, NativeFunction function, ThunkGenerator generator, Intrinsic intrinsic) 83 83 { 84 84 if (NativeExecutable* nativeExecutable = m_hostFunctionStubMap->get(function)) … … 87 87 MacroAssemblerCodeRef code; 88 88 if (generator) { 89 if ( globalData->canUseJIT())90 code = generator( globalData);89 if (vm->canUseJIT()) 90 code = generator(vm); 91 91 else 92 92 code = MacroAssemblerCodeRef(); 93 93 } else 94 code = JIT::compileCTINativeCall( globalData, function);94 code = JIT::compileCTINativeCall(vm, function); 95 95 96 NativeExecutable* nativeExecutable = NativeExecutable::create(* globalData, code, function, MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeConstruct(globalData)), callHostFunctionAsConstructor, intrinsic);96 NativeExecutable* nativeExecutable = NativeExecutable::create(*vm, code, function, MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeConstruct(vm)), callHostFunctionAsConstructor, intrinsic); 97 97 weakAdd(*m_hostFunctionStubMap, function, PassWeak<NativeExecutable>(nativeExecutable)); 98 98 return nativeExecutable; -
trunk/Source/JavaScriptCore/jit/JITThunks.h
r147962 r148696 44 44 namespace JSC { 45 45 46 class JSGlobalData;46 class VM; 47 47 class NativeExecutable; 48 48 … … 52 52 ~JITThunks(); 53 53 54 MacroAssemblerCodePtr ctiNativeCall( JSGlobalData*);55 MacroAssemblerCodePtr ctiNativeConstruct( JSGlobalData*);54 MacroAssemblerCodePtr ctiNativeCall(VM*); 55 MacroAssemblerCodePtr ctiNativeConstruct(VM*); 56 56 57 MacroAssemblerCodeRef ctiStub( JSGlobalData*, ThunkGenerator);57 MacroAssemblerCodeRef ctiStub(VM*, ThunkGenerator); 58 58 59 NativeExecutable* hostFunctionStub( JSGlobalData*, NativeFunction, NativeFunction constructor);60 NativeExecutable* hostFunctionStub( JSGlobalData*, NativeFunction, ThunkGenerator, Intrinsic);59 NativeExecutable* hostFunctionStub(VM*, NativeFunction, NativeFunction constructor); 60 NativeExecutable* hostFunctionStub(VM*, NativeFunction, ThunkGenerator, Intrinsic); 61 61 62 62 void clearHostFunctionStubs(); -
trunk/Source/JavaScriptCore/jit/JITWriteBarrier.h
r143147 r148696 37 37 38 38 class JSCell; 39 class JSGlobalData;39 class VM; 40 40 41 41 // Needs to be even to appease some of the backends. … … 78 78 } 79 79 80 void set( JSGlobalData&, CodeLocationDataLabelPtr location, JSCell* owner, JSCell* value)80 void set(VM&, CodeLocationDataLabelPtr location, JSCell* owner, JSCell* value) 81 81 { 82 82 Heap::writeBarrier(owner, value); … … 117 117 } 118 118 119 void set( JSGlobalData& globalData, CodeLocationDataLabelPtr location, JSCell* owner, T* value)119 void set(VM& vm, CodeLocationDataLabelPtr location, JSCell* owner, T* value) 120 120 { 121 121 validateCell(owner); 122 122 validateCell(value); 123 JITWriteBarrierBase::set( globalData, location, owner, value);123 JITWriteBarrierBase::set(vm, location, owner, value); 124 124 } 125 void set( JSGlobalData& globalData, JSCell* owner, T* value)125 void set(VM& vm, JSCell* owner, T* value) 126 126 { 127 set( globalData, location(), owner, value);127 set(vm, location(), owner, value); 128 128 } 129 129 T* get() const -
trunk/Source/JavaScriptCore/jit/SpecializedThunkJIT.h
r131858 r148696 56 56 } 57 57 58 void loadJSStringArgument( JSGlobalData& globalData, int argument, RegisterID dst)58 void loadJSStringArgument(VM& vm, int argument, RegisterID dst) 59 59 { 60 60 loadCellArgument(argument, dst); 61 m_failures.append(branchPtr(NotEqual, Address(dst, JSCell::structureOffset()), TrustedImmPtr( globalData.stringStructure.get())));61 m_failures.append(branchPtr(NotEqual, Address(dst, JSCell::structureOffset()), TrustedImmPtr(vm.stringStructure.get()))); 62 62 } 63 63 … … 131 131 } 132 132 133 MacroAssemblerCodeRef finalize( JSGlobalData& globalData, MacroAssemblerCodePtr fallback, const char* thunkKind)133 MacroAssemblerCodeRef finalize(VM& vm, MacroAssemblerCodePtr fallback, const char* thunkKind) 134 134 { 135 LinkBuffer patchBuffer( globalData, this, GLOBAL_THUNK_ID);135 LinkBuffer patchBuffer(vm, this, GLOBAL_THUNK_ID); 136 136 patchBuffer.link(m_failures, CodeLocationLabel(fallback)); 137 137 for (unsigned i = 0; i < m_calls.size(); i++) -
trunk/Source/JavaScriptCore/jit/ThunkGenerator.h
r138465 r148696 32 32 33 33 namespace JSC { 34 class JSGlobalData;34 class VM; 35 35 class MacroAssemblerCodeRef; 36 36 37 typedef MacroAssemblerCodeRef (*ThunkGenerator)( JSGlobalData*);37 typedef MacroAssemblerCodeRef (*ThunkGenerator)(VM*); 38 38 39 39 } // namespace JSC -
trunk/Source/JavaScriptCore/jit/ThunkGenerators.cpp
r144043 r148696 38 38 namespace JSC { 39 39 40 static JSInterfaceJIT::Call generateSlowCaseFor( JSGlobalData* globalData, JSInterfaceJIT& jit)40 static JSInterfaceJIT::Call generateSlowCaseFor(VM* vm, JSInterfaceJIT& jit) 41 41 { 42 42 jit.emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, JSInterfaceJIT::regT2); … … 49 49 jit.emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock); 50 50 51 jit.storePtr(JSInterfaceJIT::callFrameRegister, & globalData->topCallFrame);51 jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame); 52 52 jit.restoreArgumentReference(); 53 53 JSInterfaceJIT::Call callNotJSFunction = jit.call(); … … 59 59 } 60 60 61 static MacroAssemblerCodeRef linkForGenerator( JSGlobalData* globalData, FunctionPtr lazyLink, FunctionPtr notJSFunction, const char* name)61 static MacroAssemblerCodeRef linkForGenerator(VM* vm, FunctionPtr lazyLink, FunctionPtr notJSFunction, const char* name) 62 62 { 63 63 JSInterfaceJIT jit; … … 81 81 jit.emitPutToCallFrameHeader(JSInterfaceJIT::regT3, JSStack::ReturnPC); 82 82 83 jit.storePtr(JSInterfaceJIT::callFrameRegister, & globalData->topCallFrame);83 jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame); 84 84 jit.restoreArgumentReference(); 85 85 JSInterfaceJIT::Call callLazyLink = jit.call(); … … 88 88 89 89 slowCase.link(&jit); 90 JSInterfaceJIT::Call callNotJSFunction = generateSlowCaseFor( globalData, jit);91 92 LinkBuffer patchBuffer(* globalData, &jit, GLOBAL_THUNK_ID);90 JSInterfaceJIT::Call callNotJSFunction = generateSlowCaseFor(vm, jit); 91 92 LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID); 93 93 patchBuffer.link(callLazyLink, lazyLink); 94 94 patchBuffer.link(callNotJSFunction, notJSFunction); … … 97 97 } 98 98 99 MacroAssemblerCodeRef linkCallGenerator( JSGlobalData* globalData)100 { 101 return linkForGenerator( globalData, FunctionPtr(cti_vm_lazyLinkCall), FunctionPtr(cti_op_call_NotJSFunction), "call");102 } 103 104 MacroAssemblerCodeRef linkConstructGenerator( JSGlobalData* globalData)105 { 106 return linkForGenerator( globalData, FunctionPtr(cti_vm_lazyLinkConstruct), FunctionPtr(cti_op_construct_NotJSConstruct), "construct");107 } 108 109 MacroAssemblerCodeRef linkClosureCallGenerator( JSGlobalData* globalData)110 { 111 return linkForGenerator( globalData, FunctionPtr(cti_vm_lazyLinkClosureCall), FunctionPtr(cti_op_call_NotJSFunction), "closure call");112 } 113 114 static MacroAssemblerCodeRef virtualForGenerator( JSGlobalData* globalData, FunctionPtr compile, FunctionPtr notJSFunction, const char* name, CodeSpecializationKind kind)99 MacroAssemblerCodeRef linkCallGenerator(VM* vm) 100 { 101 return linkForGenerator(vm, FunctionPtr(cti_vm_lazyLinkCall), FunctionPtr(cti_op_call_NotJSFunction), "call"); 102 } 103 104 MacroAssemblerCodeRef linkConstructGenerator(VM* vm) 105 { 106 return linkForGenerator(vm, FunctionPtr(cti_vm_lazyLinkConstruct), FunctionPtr(cti_op_construct_NotJSConstruct), "construct"); 107 } 108 109 MacroAssemblerCodeRef linkClosureCallGenerator(VM* vm) 110 { 111 return linkForGenerator(vm, FunctionPtr(cti_vm_lazyLinkClosureCall), FunctionPtr(cti_op_call_NotJSFunction), "closure call"); 112 } 113 114 static MacroAssemblerCodeRef virtualForGenerator(VM* vm, FunctionPtr compile, FunctionPtr notJSFunction, const char* name, CodeSpecializationKind kind) 115 115 { 116 116 JSInterfaceJIT jit; … … 132 132 JSInterfaceJIT::Jump hasCodeBlock1 = jit.branch32(JSInterfaceJIT::GreaterThanOrEqual, JSInterfaceJIT::Address(JSInterfaceJIT::regT2, FunctionExecutable::offsetOfNumParametersFor(kind)), JSInterfaceJIT::TrustedImm32(0)); 133 133 jit.preserveReturnAddressAfterCall(JSInterfaceJIT::regT3); 134 jit.storePtr(JSInterfaceJIT::callFrameRegister, & globalData->topCallFrame);134 jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame); 135 135 jit.restoreArgumentReference(); 136 136 JSInterfaceJIT::Call callCompile = jit.call(); … … 143 143 144 144 slowCase.link(&jit); 145 JSInterfaceJIT::Call callNotJSFunction = generateSlowCaseFor( globalData, jit);146 147 LinkBuffer patchBuffer(* globalData, &jit, GLOBAL_THUNK_ID);145 JSInterfaceJIT::Call callNotJSFunction = generateSlowCaseFor(vm, jit); 146 147 LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID); 148 148 patchBuffer.link(callCompile, compile); 149 149 patchBuffer.link(callNotJSFunction, notJSFunction); … … 152 152 } 153 153 154 MacroAssemblerCodeRef virtualCallGenerator( JSGlobalData* globalData)155 { 156 return virtualForGenerator( globalData, FunctionPtr(cti_op_call_jitCompile), FunctionPtr(cti_op_call_NotJSFunction), "call", CodeForCall);157 } 158 159 MacroAssemblerCodeRef virtualConstructGenerator( JSGlobalData* globalData)160 { 161 return virtualForGenerator( globalData, FunctionPtr(cti_op_construct_jitCompile), FunctionPtr(cti_op_construct_NotJSConstruct), "construct", CodeForConstruct);162 } 163 164 MacroAssemblerCodeRef stringLengthTrampolineGenerator( JSGlobalData* globalData)154 MacroAssemblerCodeRef virtualCallGenerator(VM* vm) 155 { 156 return virtualForGenerator(vm, FunctionPtr(cti_op_call_jitCompile), FunctionPtr(cti_op_call_NotJSFunction), "call", CodeForCall); 157 } 158 159 MacroAssemblerCodeRef virtualConstructGenerator(VM* vm) 160 { 161 return virtualForGenerator(vm, FunctionPtr(cti_op_construct_jitCompile), FunctionPtr(cti_op_construct_NotJSConstruct), "construct", CodeForConstruct); 162 } 163 164 MacroAssemblerCodeRef stringLengthTrampolineGenerator(VM* vm) 165 165 { 166 166 JSInterfaceJIT jit; … … 172 172 JSInterfaceJIT::NotEqual, JSInterfaceJIT::Address( 173 173 JSInterfaceJIT::regT0, JSCell::structureOffset()), 174 JSInterfaceJIT::TrustedImmPtr( globalData->stringStructure.get()));174 JSInterfaceJIT::TrustedImmPtr(vm->stringStructure.get())); 175 175 176 176 // Checks out okay! - get the length from the Ustring. … … 194 194 JSInterfaceJIT::NotEqual, 195 195 JSInterfaceJIT::Address(JSInterfaceJIT::regT0, JSCell::structureOffset()), 196 JSInterfaceJIT::TrustedImmPtr( globalData->stringStructure.get()));196 JSInterfaceJIT::TrustedImmPtr(vm->stringStructure.get())); 197 197 198 198 // Checks out okay! - get the length from the Ustring. … … 213 213 JSInterfaceJIT::Call failureCases3Call = jit.makeTailRecursiveCall(failureCases3); 214 214 215 LinkBuffer patchBuffer(* globalData, &jit, GLOBAL_THUNK_ID);215 LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID); 216 216 217 217 patchBuffer.link(failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail)); … … 222 222 } 223 223 224 static MacroAssemblerCodeRef nativeForGenerator( JSGlobalData* globalData, CodeSpecializationKind kind)224 static MacroAssemblerCodeRef nativeForGenerator(VM* vm, CodeSpecializationKind kind) 225 225 { 226 226 int executableOffsetToFunction = NativeExecutable::offsetOfNativeFunctionFor(kind); … … 229 229 230 230 jit.emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock); 231 jit.storePtr(JSInterfaceJIT::callFrameRegister, & globalData->topCallFrame);231 jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame); 232 232 233 233 #if CPU(X86) … … 374 374 // Check for an exception 375 375 #if USE(JSVALUE64) 376 jit.load64(&( globalData->exception), JSInterfaceJIT::regT2);376 jit.load64(&(vm->exception), JSInterfaceJIT::regT2); 377 377 JSInterfaceJIT::Jump exceptionHandler = jit.branchTest64(JSInterfaceJIT::NonZero, JSInterfaceJIT::regT2); 378 378 #else 379 379 JSInterfaceJIT::Jump exceptionHandler = jit.branch32( 380 380 JSInterfaceJIT::NotEqual, 381 JSInterfaceJIT::AbsoluteAddress(reinterpret_cast<char*>(& globalData->exception) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)),381 JSInterfaceJIT::AbsoluteAddress(reinterpret_cast<char*>(&vm->exception) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), 382 382 JSInterfaceJIT::TrustedImm32(JSValue::EmptyValueTag)); 383 383 #endif … … 392 392 jit.preserveReturnAddressAfterCall(JSInterfaceJIT::regT1); 393 393 394 jit.move(JSInterfaceJIT::TrustedImmPtr(& globalData->exceptionLocation), JSInterfaceJIT::regT2);394 jit.move(JSInterfaceJIT::TrustedImmPtr(&vm->exceptionLocation), JSInterfaceJIT::regT2); 395 395 jit.storePtr(JSInterfaceJIT::regT1, JSInterfaceJIT::regT2); 396 396 jit.poke(JSInterfaceJIT::callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*)); 397 397 398 jit.storePtr(JSInterfaceJIT::callFrameRegister, & globalData->topCallFrame);398 jit.storePtr(JSInterfaceJIT::callFrameRegister, &vm->topCallFrame); 399 399 // Set the return address. 400 400 jit.move(JSInterfaceJIT::TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), JSInterfaceJIT::regT1); … … 403 403 jit.ret(); 404 404 405 LinkBuffer patchBuffer(* globalData, &jit, GLOBAL_THUNK_ID);405 LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID); 406 406 return FINALIZE_CODE(patchBuffer, ("native %s trampoline", toCString(kind).data())); 407 407 } 408 408 409 MacroAssemblerCodeRef nativeCallGenerator( JSGlobalData* globalData)410 { 411 return nativeForGenerator( globalData, CodeForCall);412 } 413 414 MacroAssemblerCodeRef nativeConstructGenerator( JSGlobalData* globalData)415 { 416 return nativeForGenerator( globalData, CodeForConstruct);417 } 418 419 static void stringCharLoad(SpecializedThunkJIT& jit, JSGlobalData* globalData)409 MacroAssemblerCodeRef nativeCallGenerator(VM* vm) 410 { 411 return nativeForGenerator(vm, CodeForCall); 412 } 413 414 MacroAssemblerCodeRef nativeConstructGenerator(VM* vm) 415 { 416 return nativeForGenerator(vm, CodeForConstruct); 417 } 418 419 static void stringCharLoad(SpecializedThunkJIT& jit, VM* vm) 420 420 { 421 421 // load string 422 jit.loadJSStringArgument(* globalData, SpecializedThunkJIT::ThisArgument, SpecializedThunkJIT::regT0);422 jit.loadJSStringArgument(*vm, SpecializedThunkJIT::ThisArgument, SpecializedThunkJIT::regT0); 423 423 424 424 // Load string length to regT2, and start the process of loading the data pointer into regT0 … … 447 447 } 448 448 449 static void charToString(SpecializedThunkJIT& jit, JSGlobalData* globalData, MacroAssembler::RegisterID src, MacroAssembler::RegisterID dst, MacroAssembler::RegisterID scratch)449 static void charToString(SpecializedThunkJIT& jit, VM* vm, MacroAssembler::RegisterID src, MacroAssembler::RegisterID dst, MacroAssembler::RegisterID scratch) 450 450 { 451 451 jit.appendFailure(jit.branch32(MacroAssembler::AboveOrEqual, src, MacroAssembler::TrustedImm32(0x100))); 452 jit.move(MacroAssembler::TrustedImmPtr( globalData->smallStrings.singleCharacterStrings()), scratch);452 jit.move(MacroAssembler::TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), scratch); 453 453 jit.loadPtr(MacroAssembler::BaseIndex(scratch, src, MacroAssembler::ScalePtr, 0), dst); 454 454 jit.appendFailure(jit.branchTestPtr(MacroAssembler::Zero, dst)); 455 455 } 456 456 457 MacroAssemblerCodeRef charCodeAtThunkGenerator( JSGlobalData* globalData)458 { 459 SpecializedThunkJIT jit(1); 460 stringCharLoad(jit, globalData);457 MacroAssemblerCodeRef charCodeAtThunkGenerator(VM* vm) 458 { 459 SpecializedThunkJIT jit(1); 460 stringCharLoad(jit, vm); 461 461 jit.returnInt32(SpecializedThunkJIT::regT0); 462 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "charCodeAt");463 } 464 465 MacroAssemblerCodeRef charAtThunkGenerator( JSGlobalData* globalData)466 { 467 SpecializedThunkJIT jit(1); 468 stringCharLoad(jit, globalData);469 charToString(jit, globalData, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT1);462 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "charCodeAt"); 463 } 464 465 MacroAssemblerCodeRef charAtThunkGenerator(VM* vm) 466 { 467 SpecializedThunkJIT jit(1); 468 stringCharLoad(jit, vm); 469 charToString(jit, vm, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT1); 470 470 jit.returnJSCell(SpecializedThunkJIT::regT0); 471 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "charAt");472 } 473 474 MacroAssemblerCodeRef fromCharCodeThunkGenerator( JSGlobalData* globalData)471 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "charAt"); 472 } 473 474 MacroAssemblerCodeRef fromCharCodeThunkGenerator(VM* vm) 475 475 { 476 476 SpecializedThunkJIT jit(1); 477 477 // load char code 478 478 jit.loadInt32Argument(0, SpecializedThunkJIT::regT0); 479 charToString(jit, globalData, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT1);479 charToString(jit, vm, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT1); 480 480 jit.returnJSCell(SpecializedThunkJIT::regT0); 481 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "fromCharCode");482 } 483 484 MacroAssemblerCodeRef sqrtThunkGenerator( JSGlobalData* globalData)481 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "fromCharCode"); 482 } 483 484 MacroAssemblerCodeRef sqrtThunkGenerator(VM* vm) 485 485 { 486 486 SpecializedThunkJIT jit(1); 487 487 if (!jit.supportsFloatingPointSqrt()) 488 return MacroAssemblerCodeRef::createSelfManagedCodeRef( globalData->jitStubs->ctiNativeCall(globalData));488 return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm)); 489 489 490 490 jit.loadDoubleArgument(0, SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::regT0); 491 491 jit.sqrtDouble(SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::fpRegT0); 492 492 jit.returnDouble(SpecializedThunkJIT::fpRegT0); 493 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "sqrt");493 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "sqrt"); 494 494 } 495 495 … … 562 562 static const double halfConstant = 0.5; 563 563 564 MacroAssemblerCodeRef floorThunkGenerator( JSGlobalData* globalData)564 MacroAssemblerCodeRef floorThunkGenerator(VM* vm) 565 565 { 566 566 SpecializedThunkJIT jit(1); 567 567 MacroAssembler::Jump nonIntJump; 568 568 if (!UnaryDoubleOpWrapper(floor) || !jit.supportsFloatingPoint()) 569 return MacroAssemblerCodeRef::createSelfManagedCodeRef( globalData->jitStubs->ctiNativeCall(globalData));569 return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm)); 570 570 jit.loadInt32Argument(0, SpecializedThunkJIT::regT0, nonIntJump); 571 571 jit.returnInt32(SpecializedThunkJIT::regT0); … … 591 591 doubleResult.link(&jit); 592 592 jit.returnDouble(SpecializedThunkJIT::fpRegT0); 593 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "floor");594 } 595 596 MacroAssemblerCodeRef ceilThunkGenerator( JSGlobalData* globalData)593 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "floor"); 594 } 595 596 MacroAssemblerCodeRef ceilThunkGenerator(VM* vm) 597 597 { 598 598 SpecializedThunkJIT jit(1); 599 599 if (!UnaryDoubleOpWrapper(ceil) || !jit.supportsFloatingPoint()) 600 return MacroAssemblerCodeRef::createSelfManagedCodeRef( globalData->jitStubs->ctiNativeCall(globalData));600 return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm)); 601 601 MacroAssembler::Jump nonIntJump; 602 602 jit.loadInt32Argument(0, SpecializedThunkJIT::regT0, nonIntJump); … … 610 610 doubleResult.link(&jit); 611 611 jit.returnDouble(SpecializedThunkJIT::fpRegT0); 612 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "ceil");613 } 614 615 MacroAssemblerCodeRef roundThunkGenerator( JSGlobalData* globalData)612 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "ceil"); 613 } 614 615 MacroAssemblerCodeRef roundThunkGenerator(VM* vm) 616 616 { 617 617 SpecializedThunkJIT jit(1); 618 618 if (!UnaryDoubleOpWrapper(jsRound) || !jit.supportsFloatingPoint()) 619 return MacroAssemblerCodeRef::createSelfManagedCodeRef( globalData->jitStubs->ctiNativeCall(globalData));619 return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm)); 620 620 MacroAssembler::Jump nonIntJump; 621 621 jit.loadInt32Argument(0, SpecializedThunkJIT::regT0, nonIntJump); … … 644 644 doubleResult.link(&jit); 645 645 jit.returnDouble(SpecializedThunkJIT::fpRegT0); 646 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "round");647 } 648 649 MacroAssemblerCodeRef expThunkGenerator( JSGlobalData* globalData)646 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "round"); 647 } 648 649 MacroAssemblerCodeRef expThunkGenerator(VM* vm) 650 650 { 651 651 if (!UnaryDoubleOpWrapper(exp)) 652 return MacroAssemblerCodeRef::createSelfManagedCodeRef( globalData->jitStubs->ctiNativeCall(globalData));652 return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm)); 653 653 SpecializedThunkJIT jit(1); 654 654 if (!jit.supportsFloatingPoint()) 655 return MacroAssemblerCodeRef::createSelfManagedCodeRef( globalData->jitStubs->ctiNativeCall(globalData));655 return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm)); 656 656 jit.loadDoubleArgument(0, SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::regT0); 657 657 jit.callDoubleToDouble(UnaryDoubleOpWrapper(exp)); 658 658 jit.returnDouble(SpecializedThunkJIT::fpRegT0); 659 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "exp");660 } 661 662 MacroAssemblerCodeRef logThunkGenerator( JSGlobalData* globalData)659 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "exp"); 660 } 661 662 MacroAssemblerCodeRef logThunkGenerator(VM* vm) 663 663 { 664 664 if (!UnaryDoubleOpWrapper(log)) 665 return MacroAssemblerCodeRef::createSelfManagedCodeRef( globalData->jitStubs->ctiNativeCall(globalData));665 return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm)); 666 666 SpecializedThunkJIT jit(1); 667 667 if (!jit.supportsFloatingPoint()) 668 return MacroAssemblerCodeRef::createSelfManagedCodeRef( globalData->jitStubs->ctiNativeCall(globalData));668 return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm)); 669 669 jit.loadDoubleArgument(0, SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::regT0); 670 670 jit.callDoubleToDouble(UnaryDoubleOpWrapper(log)); 671 671 jit.returnDouble(SpecializedThunkJIT::fpRegT0); 672 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "log");673 } 674 675 MacroAssemblerCodeRef absThunkGenerator( JSGlobalData* globalData)672 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "log"); 673 } 674 675 MacroAssemblerCodeRef absThunkGenerator(VM* vm) 676 676 { 677 677 SpecializedThunkJIT jit(1); 678 678 if (!jit.supportsFloatingPointAbs()) 679 return MacroAssemblerCodeRef::createSelfManagedCodeRef( globalData->jitStubs->ctiNativeCall(globalData));679 return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm)); 680 680 MacroAssembler::Jump nonIntJump; 681 681 jit.loadInt32Argument(0, SpecializedThunkJIT::regT0, nonIntJump); … … 690 690 jit.absDouble(SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::fpRegT1); 691 691 jit.returnDouble(SpecializedThunkJIT::fpRegT1); 692 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "abs");693 } 694 695 MacroAssemblerCodeRef powThunkGenerator( JSGlobalData* globalData)692 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "abs"); 693 } 694 695 MacroAssemblerCodeRef powThunkGenerator(VM* vm) 696 696 { 697 697 SpecializedThunkJIT jit(2); 698 698 if (!jit.supportsFloatingPoint()) 699 return MacroAssemblerCodeRef::createSelfManagedCodeRef( globalData->jitStubs->ctiNativeCall(globalData));699 return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm)); 700 700 701 701 jit.loadDouble(&oneConstant, SpecializedThunkJIT::fpRegT1); … … 742 742 jit.appendFailure(nonIntExponent); 743 743 744 return jit.finalize(* globalData, globalData->jitStubs->ctiNativeCall(globalData), "pow");744 return jit.finalize(*vm, vm->jitStubs->ctiNativeCall(vm), "pow"); 745 745 } 746 746 -
trunk/Source/JavaScriptCore/jit/ThunkGenerators.h
r138609 r148696 32 32 namespace JSC { 33 33 34 MacroAssemblerCodeRef linkCallGenerator( JSGlobalData*);35 MacroAssemblerCodeRef linkConstructGenerator( JSGlobalData*);36 MacroAssemblerCodeRef linkClosureCallGenerator( JSGlobalData*);37 MacroAssemblerCodeRef virtualCallGenerator( JSGlobalData*);38 MacroAssemblerCodeRef virtualConstructGenerator( JSGlobalData*);39 MacroAssemblerCodeRef stringLengthTrampolineGenerator( JSGlobalData*);40 MacroAssemblerCodeRef nativeCallGenerator( JSGlobalData*);41 MacroAssemblerCodeRef nativeConstructGenerator( JSGlobalData*);34 MacroAssemblerCodeRef linkCallGenerator(VM*); 35 MacroAssemblerCodeRef linkConstructGenerator(VM*); 36 MacroAssemblerCodeRef linkClosureCallGenerator(VM*); 37 MacroAssemblerCodeRef virtualCallGenerator(VM*); 38 MacroAssemblerCodeRef virtualConstructGenerator(VM*); 39 MacroAssemblerCodeRef stringLengthTrampolineGenerator(VM*); 40 MacroAssemblerCodeRef nativeCallGenerator(VM*); 41 MacroAssemblerCodeRef nativeConstructGenerator(VM*); 42 42 43 MacroAssemblerCodeRef charCodeAtThunkGenerator( JSGlobalData*);44 MacroAssemblerCodeRef charAtThunkGenerator( JSGlobalData*);45 MacroAssemblerCodeRef fromCharCodeThunkGenerator( JSGlobalData*);46 MacroAssemblerCodeRef absThunkGenerator( JSGlobalData*);47 MacroAssemblerCodeRef ceilThunkGenerator( JSGlobalData*);48 MacroAssemblerCodeRef expThunkGenerator( JSGlobalData*);49 MacroAssemblerCodeRef floorThunkGenerator( JSGlobalData*);50 MacroAssemblerCodeRef logThunkGenerator( JSGlobalData*);51 MacroAssemblerCodeRef roundThunkGenerator( JSGlobalData*);52 MacroAssemblerCodeRef sqrtThunkGenerator( JSGlobalData*);53 MacroAssemblerCodeRef powThunkGenerator( JSGlobalData*);43 MacroAssemblerCodeRef charCodeAtThunkGenerator(VM*); 44 MacroAssemblerCodeRef charAtThunkGenerator(VM*); 45 MacroAssemblerCodeRef fromCharCodeThunkGenerator(VM*); 46 MacroAssemblerCodeRef absThunkGenerator(VM*); 47 MacroAssemblerCodeRef ceilThunkGenerator(VM*); 48 MacroAssemblerCodeRef expThunkGenerator(VM*); 49 MacroAssemblerCodeRef floorThunkGenerator(VM*); 50 MacroAssemblerCodeRef logThunkGenerator(VM*); 51 MacroAssemblerCodeRef roundThunkGenerator(VM*); 52 MacroAssemblerCodeRef sqrtThunkGenerator(VM*); 53 MacroAssemblerCodeRef powThunkGenerator(VM*); 54 54 55 55 }
Note:
See TracChangeset
for help on using the changeset viewer.