Changeset 47186 in webkit for trunk/JavaScriptCore
- Timestamp:
- Aug 12, 2009, 10:58:36 PM (16 years ago)
- Location:
- trunk/JavaScriptCore
- Files:
-
- 12 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/JavaScriptCore/ChangeLog
r47184 r47186 1 2009-08-12 Gabor Loki <[email protected]> 2 3 Reviewed by Gavin Barraclough. 4 5 Add optimize call and property access support for ARM JIT. 6 https://bugs.webkit.org/show_bug.cgi?id=24986 7 8 For tightly coupled sequences the BEGIN_UNINTERRUPTED_SEQUENCE and 9 END_UNINTERRUPTED_SEQUENCE macros have been introduced which ensure 10 space for instructions and constants of the named sequence. This 11 method is vital for those architecture which are using constant pool. 12 13 The 'latePatch' method - which was linked to JmpSrc - is replaced with 14 a port specific solution (each calls are marked to place their address 15 on the constant pool). 16 17 * assembler/ARMAssembler.cpp: 18 (JSC::ARMAssembler::linkBranch): 19 (JSC::ARMAssembler::executableCopy): Add extra align for constant pool. 20 * assembler/ARMAssembler.h: 21 (JSC::ARMAssembler::JmpSrc::JmpSrc): 22 (JSC::ARMAssembler::sizeOfConstantPool): 23 (JSC::ARMAssembler::jmp): 24 (JSC::ARMAssembler::linkCall): 25 * assembler/ARMv7Assembler.h: 26 * assembler/AbstractMacroAssembler.h: 27 * assembler/AssemblerBufferWithConstantPool.h: 28 (JSC::AssemblerBufferWithConstantPool::flushIfNoSpaceFor): Fix the 29 computation of the remaining space. 30 * assembler/MacroAssemblerARM.h: 31 (JSC::MacroAssemblerARM::branch32): 32 (JSC::MacroAssemblerARM::nearCall): 33 (JSC::MacroAssemblerARM::call): 34 (JSC::MacroAssemblerARM::branchPtrWithPatch): 35 (JSC::MacroAssemblerARM::ensureSpace): 36 (JSC::MacroAssemblerARM::sizeOfConstantPool): 37 (JSC::MacroAssemblerARM::prepareCall): 38 * assembler/X86Assembler.h: 39 * jit/JIT.h: 40 * jit/JITCall.cpp: 41 (JSC::JIT::compileOpCall): 42 * jit/JITInlineMethods.h: 43 (JSC::JIT::beginUninterruptedSequence): 44 (JSC::JIT::endUninterruptedSequence): 45 * jit/JITPropertyAccess.cpp: 46 (JSC::JIT::emit_op_method_check): 47 (JSC::JIT::compileGetByIdHotPath): 48 (JSC::JIT::compileGetByIdSlowCase): 49 (JSC::JIT::emit_op_put_by_id): 50 1 51 2009-08-12 Gavin Barraclough <[email protected]> 2 52 -
trunk/JavaScriptCore/assembler/ARMAssembler.cpp
r46832 r47186 50 50 } 51 51 52 void ARMAssembler::linkBranch(void* code, JmpSrc from, void* to )52 void ARMAssembler::linkBranch(void* code, JmpSrc from, void* to, int useConstantPool) 53 53 { 54 54 ARMWord* insn = reinterpret_cast<ARMWord*>(code) + (from.m_offset / sizeof(ARMWord)); 55 55 56 if (! from.m_latePatch) {56 if (!useConstantPool) { 57 57 int diff = reinterpret_cast<ARMWord*>(to) - reinterpret_cast<ARMWord*>(insn + 2); 58 58 … … 368 368 void* ARMAssembler::executableCopy(ExecutablePool* allocator) 369 369 { 370 // 64-bit alignment is required for next constant pool and JIT code as well 371 m_buffer.flushWithoutBarrier(true); 372 if (m_buffer.uncheckedSize() & 0x7) 373 bkpt(0); 374 370 375 char* data = reinterpret_cast<char*>(m_buffer.executableCopy(allocator)); 371 376 372 377 for (Jumps::Iterator iter = m_jumps.begin(); iter != m_jumps.end(); ++iter) { 373 ARMWord* ldrAddr = reinterpret_cast<ARMWord*>(data + *iter); 374 ARMWord* offset = getLdrImmAddress(ldrAddr); 375 if (*offset != 0xffffffff) 376 linkBranch(data, JmpSrc(*iter), data + *offset); 378 // The last bit is set if the constant must be placed on constant pool. 379 int pos = (*iter) & (~0x1); 380 ARMWord* ldrAddr = reinterpret_cast<ARMWord*>(data + pos); 381 ARMWord offset = *getLdrImmAddress(ldrAddr); 382 if (offset != 0xffffffff) { 383 JmpSrc jmpSrc(pos); 384 linkBranch(data, jmpSrc, data + offset, ((*iter) & 1)); 385 } 377 386 } 378 387 -
trunk/JavaScriptCore/assembler/ARMAssembler.h
r46832 r47186 181 181 JmpSrc() 182 182 : m_offset(-1) 183 , m_latePatch(false)184 183 { 185 184 } 186 185 187 void enableLatePatch() { m_latePatch = true; }188 186 private: 189 187 JmpSrc(int offset) 190 188 : m_offset(offset) 191 , m_latePatch(false)192 189 { 193 190 } 194 191 195 int m_offset : 31; 196 int m_latePatch : 1; 192 int m_offset; 197 193 }; 198 194 … … 568 564 } 569 565 566 int sizeOfConstantPool() 567 { 568 return m_buffer.sizeOfConstantPool(); 569 } 570 570 571 JmpDst label() 571 572 { … … 581 582 } 582 583 583 JmpSrc jmp(Condition cc = AL) 584 { 585 int s = size(); 584 JmpSrc jmp(Condition cc = AL, int useConstantPool = 0) 585 { 586 ensureSpace(sizeof(ARMWord), sizeof(ARMWord)); 587 int s = m_buffer.uncheckedSize(); 586 588 ldr_un_imm(ARM::pc, 0xffffffff, cc); 587 m_jumps.append(s );589 m_jumps.append(s | (useConstantPool & 0x1)); 588 590 return JmpSrc(s); 589 591 } … … 594 596 595 597 static ARMWord* getLdrImmAddress(ARMWord* insn, uint32_t* constPool = 0); 596 static void linkBranch(void* code, JmpSrc from, void* to );598 static void linkBranch(void* code, JmpSrc from, void* to, int useConstantPool = 0); 597 599 598 600 static void patchPointerInternal(intptr_t from, void* to) … … 661 663 static void linkCall(void* code, JmpSrc from, void* to) 662 664 { 663 linkBranch(code, from, to );665 linkBranch(code, from, to, true); 664 666 } 665 667 -
trunk/JavaScriptCore/assembler/ARMv7Assembler.h
r46247 r47186 443 443 } 444 444 445 void enableLatePatch() { }446 445 private: 447 446 JmpSrc(int offset) -
trunk/JavaScriptCore/assembler/AbstractMacroAssembler.h
r46831 r47186 321 321 } 322 322 323 void enableLatePatch()324 {325 m_jmp.enableLatePatch();326 }327 328 323 JmpSrc m_jmp; 329 324 private: … … 362 357 } 363 358 364 void enableLatePatch()365 {366 m_jmp.enableLatePatch();367 }368 369 359 private: 370 360 JmpSrc m_jmp; -
trunk/JavaScriptCore/assembler/AssemblerBufferWithConstantPool.h
r46057 r47186 35 35 #include <wtf/SegmentedVector.h> 36 36 37 #define ASSEMBLER_HAS_CONSTANT_POOL 1 38 37 39 namespace JSC { 38 40 … … 178 180 } 179 181 182 int uncheckedSize() 183 { 184 return AssemblerBuffer::size(); 185 } 186 180 187 void* executableCopy(ExecutablePool* allocator) 181 188 { … … 208 215 209 216 // This flushing mechanism can be called after any unconditional jumps. 210 void flushWithoutBarrier( )217 void flushWithoutBarrier(bool isForced = false) 211 218 { 212 219 // Flush if constant pool is more than 60% full to avoid overuse of this function. 213 if ( 5 * m_numConsts > 3 * maxPoolSize / sizeof(uint32_t))220 if (isForced || 5 * m_numConsts > 3 * maxPoolSize / sizeof(uint32_t)) 214 221 flushConstantPool(false); 215 222 } … … 218 225 { 219 226 return m_pool; 227 } 228 229 int sizeOfConstantPool() 230 { 231 return m_numConsts; 220 232 } 221 233 … … 277 289 if (m_numConsts == 0) 278 290 return; 279 if ((m_maxDistance < nextInsnSize + m_lastConstDelta + barrierSize + (int)sizeof(uint32_t))) 291 int lastConstDelta = m_lastConstDelta > nextInsnSize ? m_lastConstDelta - nextInsnSize : 0; 292 if ((m_maxDistance < nextInsnSize + lastConstDelta + barrierSize + (int)sizeof(uint32_t))) 280 293 flushConstantPool(); 281 294 } … … 285 298 if (m_numConsts == 0) 286 299 return; 287 if ((m_maxDistance < nextInsnSize + m_lastConstDelta + barrierSize + (int)sizeof(uint32_t)) ||288 (m_numConsts + nextConstSize / sizeof(uint32_t)>= maxPoolSize))300 if ((m_maxDistance < nextInsnSize + m_lastConstDelta + nextConstSize + barrierSize + (int)sizeof(uint32_t)) || 301 (m_numConsts * sizeof(uint32_t) + nextConstSize >= maxPoolSize)) 289 302 flushConstantPool(); 290 303 } -
trunk/JavaScriptCore/assembler/MacroAssemblerARM.h
r46832 r47186 325 325 } 326 326 327 Jump branch32(Condition cond, RegisterID left, RegisterID right )327 Jump branch32(Condition cond, RegisterID left, RegisterID right, int useConstantPool = 0) 328 328 { 329 329 m_assembler.cmp_r(left, right); 330 return Jump(m_assembler.jmp(ARMCondition(cond) ));331 } 332 333 Jump branch32(Condition cond, RegisterID left, Imm32 right )330 return Jump(m_assembler.jmp(ARMCondition(cond), useConstantPool)); 331 } 332 333 Jump branch32(Condition cond, RegisterID left, Imm32 right, int useConstantPool = 0) 334 334 { 335 335 if (right.m_isPointer) { … … 338 338 } else 339 339 m_assembler.cmp_r(left, m_assembler.getImm(right.m_value, ARM::S0)); 340 return Jump(m_assembler.jmp(ARMCondition(cond) ));340 return Jump(m_assembler.jmp(ARMCondition(cond), useConstantPool)); 341 341 } 342 342 … … 498 498 { 499 499 prepareCall(); 500 return Call(m_assembler.jmp( ), Call::LinkableNear);500 return Call(m_assembler.jmp(ARMAssembler::AL, true), Call::LinkableNear); 501 501 } 502 502 … … 588 588 { 589 589 prepareCall(); 590 return Call(m_assembler.jmp( ), Call::Linkable);590 return Call(m_assembler.jmp(ARMAssembler::AL, true), Call::Linkable); 591 591 } 592 592 … … 611 611 { 612 612 dataLabel = moveWithPatch(initialRightValue, ARM::S1); 613 Jump jump = branch32(cond, left, ARM::S1); 614 jump.enableLatePatch(); 613 Jump jump = branch32(cond, left, ARM::S1, true); 615 614 return jump; 616 615 } … … 620 619 load32(left, ARM::S1); 621 620 dataLabel = moveWithPatch(initialRightValue, ARM::S0); 622 Jump jump = branch32(cond, ARM::S0, ARM::S1); 623 jump.enableLatePatch(); 621 Jump jump = branch32(cond, ARM::S0, ARM::S1, true); 624 622 return jump; 625 623 } … … 723 721 } 724 722 723 void ensureSpace(int insnSpace, int constSpace) 724 { 725 m_assembler.ensureSpace(insnSpace, constSpace); 726 } 727 728 int sizeOfConstantPool() 729 { 730 return m_assembler.sizeOfConstantPool(); 731 } 732 725 733 void prepareCall() 726 734 { 727 m_assembler.ensureSpace(3 * sizeof(ARMWord), sizeof(ARMWord));735 ensureSpace(3 * sizeof(ARMWord), sizeof(ARMWord)); 728 736 729 737 // S0 might be used for parameter passing -
trunk/JavaScriptCore/assembler/X86Assembler.h
r46598 r47186 232 232 } 233 233 234 void enableLatePatch() { }235 234 private: 236 235 JmpSrc(int offset) -
trunk/JavaScriptCore/jit/JIT.h
r46879 r47186 596 596 static const int patchOffsetMethodCheckProtoStruct = 28; 597 597 static const int patchOffsetMethodCheckPutFunction = 46; 598 #elif PLATFORM(ARM) 599 // These architecture specific value are used to enable patching - see comment on op_put_by_id. 600 static const int patchOffsetPutByIdStructure = 4; 601 static const int patchOffsetPutByIdExternalLoad = 16; 602 static const int patchLengthPutByIdExternalLoad = 4; 603 static const int patchOffsetPutByIdPropertyMapOffset = 20; 604 // These architecture specific value are used to enable patching - see comment on op_get_by_id. 605 static const int patchOffsetGetByIdStructure = 4; 606 static const int patchOffsetGetByIdBranchToSlowCase = 16; 607 static const int patchOffsetGetByIdExternalLoad = 16; 608 static const int patchLengthGetByIdExternalLoad = 4; 609 static const int patchOffsetGetByIdPropertyMapOffset = 20; 610 static const int patchOffsetGetByIdPutResult = 28; 611 #if ENABLE(OPCODE_SAMPLING) 612 #error "OPCODE_SAMPLING is not yet supported" 613 #else 614 static const int patchOffsetGetByIdSlowCaseCall = 36; 615 #endif 616 static const int patchOffsetOpCallCompareToJump = 12; 617 618 static const int patchOffsetMethodCheckProtoObj = 12; 619 static const int patchOffsetMethodCheckProtoStruct = 20; 620 static const int patchOffsetMethodCheckPutFunction = 32; 598 621 #endif 599 622 #endif // USE(JSVALUE32_64) 623 624 #if PLATFORM(ARM) && !PLATFORM_ARM_ARCH(7) 625 // sequenceOpCall 626 static const int sequenceOpCallInstructionSpace = 12; 627 static const int sequenceOpCallConstantSpace = 2; 628 // sequenceMethodCheck 629 static const int sequenceMethodCheckInstructionSpace = 40; 630 static const int sequenceMethodCheckConstantSpace = 6; 631 // sequenceGetByIdHotPath 632 static const int sequenceGetByIdHotPathInstructionSpace = 28; 633 static const int sequenceGetByIdHotPathConstantSpace = 3; 634 // sequenceGetByIdSlowCase 635 static const int sequenceGetByIdSlowCaseInstructionSpace = 40; 636 static const int sequenceGetByIdSlowCaseConstantSpace = 2; 637 // sequencePutById 638 static const int sequencePutByIdInstructionSpace = 28; 639 static const int sequencePutByIdConstantSpace = 3; 640 #endif 641 642 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL 643 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace) 644 #define END_UNINTERRUPTED_SEQUENCE(name) endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace) 645 646 void beginUninterruptedSequence(int, int); 647 void endUninterruptedSequence(int, int); 648 649 #else 650 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) 651 #define END_UNINTERRUPTED_SEQUENCE(name) 652 #endif 600 653 601 654 void emit_op_add(Instruction*); … … 836 889 unsigned m_jumpTargetsPosition; 837 890 #endif 891 892 #ifndef NDEBUG 893 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL 894 Label m_uninterruptedInstructionSequenceBegin; 895 int m_uninterruptedConstantSequenceBegin; 896 #endif 897 #endif 838 898 } JIT_CLASS_ALIGNMENT; 839 899 } // namespace JSC -
trunk/JavaScriptCore/jit/JITCall.cpp
r46879 r47186 618 618 emitGetVirtualRegister(callee, regT2); 619 619 DataLabelPtr addressOfLinkedFunctionCheck; 620 621 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall); 622 620 623 Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT2, addressOfLinkedFunctionCheck, ImmPtr(JSValue::encode(JSValue()))); 624 625 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall); 626 621 627 addSlowCase(jumpToSlow); 622 628 ASSERT(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow) == patchOffsetOpCallCompareToJump); -
trunk/JavaScriptCore/jit/JITInlineMethods.h
r46831 r47186 103 103 } 104 104 105 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL 106 107 ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace) 108 { 109 #if PLATFORM(ARM) && !PLATFORM_ARM_ARCH(7) 110 #ifndef NDEBUG 111 // Ensure the label after the sequence can also fit 112 insnSpace += sizeof(ARMWord); 113 constSpace += sizeof(uint64_t); 114 #endif 115 116 ensureSpace(insnSpace, constSpace); 117 118 #endif 119 120 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL 121 #ifndef NDEBUG 122 m_uninterruptedInstructionSequenceBegin = label(); 123 m_uninterruptedConstantSequenceBegin = sizeOfConstantPool(); 124 #endif 125 #endif 126 } 127 128 ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace) 129 { 130 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL 131 ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) == insnSpace); 132 ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin == constSpace); 133 #endif 134 } 135 136 #endif 137 105 138 #if PLATFORM(X86) || PLATFORM(X86_64) || (PLATFORM(ARM) && !PLATFORM_ARM_ARCH(7)) 106 139 -
trunk/JavaScriptCore/jit/JITPropertyAccess.cpp
r46879 r47186 1123 1123 m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_propertyAccessInstructionIndex)); 1124 1124 MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last(); 1125 1125 1126 Jump notCell = emitJumpIfNotJSCell(regT0); 1127 1128 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck); 1129 1126 1130 Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), info.structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))); 1127 1131 DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(ImmPtr(0), regT1); … … 1130 1134 // This will be relinked to load the function without doing a load. 1131 1135 DataLabelPtr putFunction = moveWithPatch(ImmPtr(0), regT0); 1136 1137 END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck); 1138 1132 1139 Jump match = jump(); 1133 1140 … … 1192 1199 1193 1200 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg); 1201 1202 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath); 1194 1203 1195 1204 Label hotPathBegin(this); … … 1211 1220 1212 1221 Label putResult(this); 1222 1223 END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath); 1224 1213 1225 ASSERT(differenceBetween(hotPathBegin, putResult) == patchOffsetGetByIdPutResult); 1214 1226 } … … 1233 1245 linkSlowCaseIfNotJSCell(iter, baseVReg); 1234 1246 linkSlowCase(iter); 1247 1248 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase); 1235 1249 1236 1250 #ifndef NDEBUG … … 1242 1256 Call call = stubCall.call(resultVReg); 1243 1257 1258 END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase); 1259 1244 1260 ASSERT(differenceBetween(coldPathBegin, call) == patchOffsetGetByIdSlowCaseCall); 1245 1261 … … 1264 1280 // Jump to a slow case if either the base object is an immediate, or if the Structure does not match. 1265 1281 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg); 1282 1283 BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById); 1266 1284 1267 1285 Label hotPathBegin(this); … … 1280 1298 1281 1299 DataLabel32 displacementLabel = storePtrWithAddressOffsetPatch(regT1, Address(regT0, patchGetByIdDefaultOffset)); 1300 1301 END_UNINTERRUPTED_SEQUENCE(sequencePutById); 1302 1282 1303 ASSERT(differenceBetween(hotPathBegin, displacementLabel) == patchOffsetPutByIdPropertyMapOffset); 1283 1304 }
Note:
See TracChangeset
for help on using the changeset viewer.