source: webkit/trunk/JavaScriptCore/jit/JITPropertyAccess.cpp@ 39440

Last change on this file since 39440 was 39440, checked in by [email protected], 16 years ago

2008-12-22 Sam Weinig <[email protected]>

Reviewed by Gavin Barraclough.

Rename all uses of the term "repatch" to "patch".

  • assembler/MacroAssembler.h: (JSC::MacroAssembler::DataLabelPtr::patch): (JSC::MacroAssembler::DataLabel32::patch): (JSC::MacroAssembler::Jump::patch): (JSC::MacroAssembler::PatchBuffer::PatchBuffer): (JSC::MacroAssembler::PatchBuffer::setPtr): (JSC::MacroAssembler::loadPtrWithAddressOffsetPatch): (JSC::MacroAssembler::storePtrWithAddressOffsetPatch): (JSC::MacroAssembler::storePtrWithPatch): (JSC::MacroAssembler::jnePtrWithPatch):
  • assembler/X86Assembler.h: (JSC::X86Assembler::patchAddress): (JSC::X86Assembler::patchImmediate): (JSC::X86Assembler::patchPointer): (JSC::X86Assembler::patchBranchOffset):
  • interpreter/Interpreter.cpp: (JSC::Interpreter::tryCTICachePutByID): (JSC::Interpreter::tryCTICacheGetByID): (JSC::Interpreter::cti_op_put_by_id): (JSC::Interpreter::cti_op_get_by_id): (JSC::Interpreter::cti_op_get_by_id_self_fail): (JSC::Interpreter::cti_op_get_by_id_proto_list): (JSC::Interpreter::cti_vm_dontLazyLinkCall):
  • jit/JIT.cpp: (JSC::ctiPatchCallByReturnAddress): (JSC::JIT::privateCompileMainPass): (JSC::JIT::privateCompile): (JSC::JIT::privateCompileCTIMachineTrampolines):
  • jit/JIT.h:
  • jit/JITCall.cpp: (JSC::JIT::unlinkCall): (JSC::JIT::linkCall): (JSC::JIT::compileOpCall):
  • jit/JITPropertyAccess.cpp: (JSC::JIT::compileGetByIdHotPath): (JSC::JIT::compilePutByIdHotPath): (JSC::JIT::compileGetByIdSlowCase): (JSC::JIT::compilePutByIdSlowCase): (JSC::JIT::privateCompilePutByIdTransition): (JSC::JIT::patchGetByIdSelf): (JSC::JIT::patchPutByIdReplace): (JSC::JIT::privateCompilePatchGetArrayLength): (JSC::JIT::privateCompileGetByIdSelf): (JSC::JIT::privateCompileGetByIdProto): (JSC::JIT::privateCompileGetByIdSelfList): (JSC::JIT::privateCompileGetByIdProtoList): (JSC::JIT::privateCompileGetByIdChainList): (JSC::JIT::privateCompileGetByIdChain): (JSC::JIT::privateCompilePutByIdReplace):
File size: 31.8 KB
Line 
1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "JIT.h"
28
29#if ENABLE(JIT)
30
31#include "CodeBlock.h"
32#include "JITInlineMethods.h"
33#include "JSArray.h"
34#include "JSFunction.h"
35#include "Interpreter.h"
36#include "ResultType.h"
37#include "SamplingTool.h"
38
39#ifndef NDEBUG
40#include <stdio.h>
41#endif
42
43using namespace std;
44
45namespace JSC {
46
47#if !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
48
49void JIT::compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier* ident, unsigned)
50{
51 // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
52 // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
53 // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
54 // to jump back to if one of these trampolies finds a match.
55
56 emitGetVirtualRegister(baseVReg, X86::eax);
57
58 emitPutJITStubArg(X86::eax, 1);
59 emitPutJITStubArgConstant(ident, 2);
60 emitCTICall(Interpreter::cti_op_get_by_id_generic);
61 emitPutVirtualRegister(resultVReg);
62}
63
64
65void JIT::compileGetByIdSlowCase(int, int, Identifier*, Vector<SlowCaseEntry>::iterator&, unsigned)
66{
67 ASSERT_NOT_REACHED();
68}
69
70void JIT::compilePutByIdHotPath(int baseVReg, Identifier* ident, int valueVReg, unsigned)
71{
72 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
73 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
74 // such that the Structure & offset are always at the same distance from this.
75
76 emitGetVirtualRegisters(baseVReg, X86::eax, valueVReg, X86::edx);
77
78 emitPutJITStubArgConstant(ident, 2);
79 emitPutJITStubArg(X86::eax, 1);
80 emitPutJITStubArg(X86::edx, 3);
81 emitCTICall(Interpreter::cti_op_put_by_id_generic);
82}
83
84void JIT::compilePutByIdSlowCase(int, Identifier*, int, Vector<SlowCaseEntry>::iterator&, unsigned)
85{
86 ASSERT_NOT_REACHED();
87}
88
89#else
90
91void JIT::compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier*, unsigned propertyAccessInstructionIndex)
92{
93 // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
94 // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
95 // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
96 // to jump back to if one of these trampolies finds a match.
97
98 emitGetVirtualRegister(baseVReg, X86::eax);
99
100 emitJumpSlowCaseIfNotJSCell(X86::eax, baseVReg);
101
102 Label hotPathBegin(this);
103 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
104
105 DataLabelPtr structureToCompare;
106 Jump structureCheck = jnePtrWithPatch(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
107 addSlowCase(structureCheck);
108 ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetGetByIdStructure);
109 ASSERT(differenceBetween(hotPathBegin, structureCheck) == patchOffsetGetByIdBranchToSlowCase);
110
111 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
112 DataLabel32 displacementLabel = loadPtrWithAddressOffsetPatch(Address(X86::eax, patchGetByIdDefaultOffset), X86::eax);
113 ASSERT(differenceBetween(hotPathBegin, displacementLabel) == patchOffsetGetByIdPropertyMapOffset);
114
115 Label putResult(this);
116 ASSERT(differenceBetween(hotPathBegin, putResult) == patchOffsetGetByIdPutResult);
117 emitPutVirtualRegister(resultVReg);
118}
119
120
121void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, unsigned propertyAccessInstructionIndex)
122{
123 // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
124 // so that we only need track one pointer into the slow case code - we track a pointer to the location
125 // of the call (which we can use to look up the patch information), but should a array-length or
126 // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back
127 // the distance from the call to the head of the slow case.
128
129 linkSlowCaseIfNotJSCell(iter, baseVReg);
130 linkSlowCase(iter);
131
132#ifndef NDEBUG
133 Label coldPathBegin(this);
134#endif
135 emitPutJITStubArg(X86::eax, 1);
136 emitPutJITStubArgConstant(ident, 2);
137 Jump call = emitCTICall(Interpreter::cti_op_get_by_id);
138 emitPutVirtualRegister(resultVReg);
139
140 ASSERT(differenceBetween(coldPathBegin, call) == patchOffsetGetByIdSlowCaseCall);
141
142 // Track the location of the call; this will be used to recover patch information.
143 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].callReturnLocation = call;
144}
145
146void JIT::compilePutByIdHotPath(int baseVReg, Identifier*, int valueVReg, unsigned propertyAccessInstructionIndex)
147{
148 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
149 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
150 // such that the Structure & offset are always at the same distance from this.
151
152 emitGetVirtualRegisters(baseVReg, X86::eax, valueVReg, X86::edx);
153
154 // Jump to a slow case if either the base object is an immediate, or if the Structure does not match.
155 emitJumpSlowCaseIfNotJSCell(X86::eax, baseVReg);
156
157 Label hotPathBegin(this);
158 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
159
160 // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
161 DataLabelPtr structureToCompare;
162 addSlowCase(jnePtrWithPatch(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
163 ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetPutByIdStructure);
164
165 // Plant a load from a bogus ofset in the object's property map; we will patch this later, if it is to be used.
166 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
167 DataLabel32 displacementLabel = storePtrWithAddressOffsetPatch(X86::edx, Address(X86::eax, patchGetByIdDefaultOffset));
168 ASSERT(differenceBetween(hotPathBegin, displacementLabel) == patchOffsetPutByIdPropertyMapOffset);
169}
170
171void JIT::compilePutByIdSlowCase(int baseVReg, Identifier* ident, int, Vector<SlowCaseEntry>::iterator& iter, unsigned propertyAccessInstructionIndex)
172{
173 linkSlowCaseIfNotJSCell(iter, baseVReg);
174 linkSlowCase(iter);
175
176 emitPutJITStubArgConstant(ident, 2);
177 emitPutJITStubArg(X86::eax, 1);
178 emitPutJITStubArg(X86::edx, 3);
179 Jump call = emitCTICall(Interpreter::cti_op_put_by_id);
180
181 // Track the location of the call; this will be used to recover patch information.
182 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].callReturnLocation = call;
183}
184
185static JSValue* resizePropertyStorage(JSObject* baseObject, int32_t oldSize, int32_t newSize)
186{
187 baseObject->allocatePropertyStorage(oldSize, newSize);
188 return baseObject;
189}
190
191static inline bool transitionWillNeedStorageRealloc(Structure* oldStructure, Structure* newStructure)
192{
193 return oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
194}
195
196void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, void* returnAddress)
197{
198 JumpList failureCases;
199 // Check eax is an object of the right Structure.
200 failureCases.append(jnz32(X86::eax, Imm32(JSImmediate::TagMask)));
201 failureCases.append(jnePtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), ImmPtr(oldStructure)));
202 JumpList successCases;
203
204 // ecx = baseObject
205 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
206 // proto(ecx) = baseObject->structure()->prototype()
207 failureCases.append(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type)), Imm32(ObjectType)));
208
209 loadPtr(Address(X86::ecx, FIELD_OFFSET(Structure, m_prototype)), X86::ecx);
210
211 // ecx = baseObject->m_structure
212 for (RefPtr<Structure>* it = chain->head(); *it; ++it) {
213 // null check the prototype
214 successCases.append(jePtr(X86::ecx, ImmPtr(jsNull())));
215
216 // Check the structure id
217 failureCases.append(jnePtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), ImmPtr(it->get())));
218
219 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
220 failureCases.append(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type)), Imm32(ObjectType)));
221 loadPtr(Address(X86::ecx, FIELD_OFFSET(Structure, m_prototype)), X86::ecx);
222 }
223
224 successCases.link(this);
225
226 Jump callTarget;
227
228 // emit a call only if storage realloc is needed
229 if (transitionWillNeedStorageRealloc(oldStructure, newStructure)) {
230 pop(X86::ebx);
231#if PLATFORM(X86_64)
232 move(Imm32(newStructure->propertyStorageCapacity()), X86::edx);
233 move(Imm32(oldStructure->propertyStorageCapacity()), X86::esi);
234 move(X86::eax, X86::edi);
235 callTarget = call();
236#else
237 push(Imm32(newStructure->propertyStorageCapacity()));
238 push(Imm32(oldStructure->propertyStorageCapacity()));
239 push(X86::eax);
240 callTarget = call();
241 addPtr(Imm32(3 * sizeof(void*)), X86::esp);
242#endif
243 emitGetJITStubArg(3, X86::edx);
244 push(X86::ebx);
245 }
246
247 // Assumes m_refCount can be decremented easily, refcount decrement is safe as
248 // codeblock should ensure oldStructure->m_refCount > 0
249 sub32(Imm32(1), AbsoluteAddress(oldStructure->addressOfCount()));
250 add32(Imm32(1), AbsoluteAddress(newStructure->addressOfCount()));
251 storePtr(ImmPtr(newStructure), Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)));
252
253 // write the value
254 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
255 storePtr(X86::edx, Address(X86::eax, cachedOffset * sizeof(JSValue*)));
256
257 ret();
258
259 Jump failureJump;
260 bool plantedFailureJump = false;
261 if (!failureCases.empty()) {
262 failureCases.link(this);
263 restoreArgumentReferenceForTrampoline();
264 failureJump = jump();
265 plantedFailureJump = true;
266 }
267
268 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
269 PatchBuffer patchBuffer(code);
270
271 if (plantedFailureJump)
272 patchBuffer.link(failureJump, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_fail));
273
274 if (transitionWillNeedStorageRealloc(oldStructure, newStructure))
275 patchBuffer.link(callTarget, reinterpret_cast<void*>(resizePropertyStorage));
276
277 stubInfo->stubRoutine = code;
278
279 Jump::patch(returnAddress, code);
280}
281
282void JIT::patchGetByIdSelf(StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, void* returnAddress)
283{
284 // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
285 // Should probably go to Interpreter::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
286 Jump::patch(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_self_fail));
287
288 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
289 void* structureAddress = reinterpret_cast<void*>(reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdStructure);
290 void* displacementAddress = reinterpret_cast<void*>(reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPropertyMapOffset);
291 DataLabelPtr::patch(structureAddress, structure);
292 DataLabel32::patch(displacementAddress, cachedOffset * sizeof(JSValue*));
293}
294
295void JIT::patchPutByIdReplace(StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, void* returnAddress)
296{
297 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
298 // Should probably go to Interpreter::cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
299 Jump::patch(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_generic));
300
301 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
302 void* structureAddress = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetPutByIdStructure;
303 void* displacementAddress = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetPutByIdPropertyMapOffset;
304 DataLabelPtr::patch(structureAddress, structure);
305 DataLabel32::patch(displacementAddress, cachedOffset * sizeof(JSValue*));
306}
307
308void JIT::privateCompilePatchGetArrayLength(void* returnAddress)
309{
310 StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
311
312 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
313 Jump::patch(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
314
315 // Check eax is an array
316 Jump failureCases1 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr));
317
318 // Checks out okay! - get the length from the storage
319 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
320 loadPtr(Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_length)), X86::ecx);
321
322 Jump failureCases2 = ja32(X86::ecx, Imm32(JSImmediate::maxImmediateInt));
323
324 add32(X86::ecx, X86::ecx);
325 add32(Imm32(1), X86::ecx);
326 signExtend32ToPtr(X86::ecx, X86::eax);
327 Jump success = jump();
328
329 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
330 PatchBuffer patchBuffer(code);
331
332 // Use the patch information to link the failure cases back to the original slow case routine.
333 void* slowCaseBegin = reinterpret_cast<char*>(stubInfo->callReturnLocation) - patchOffsetGetByIdSlowCaseCall;
334 patchBuffer.link(failureCases1, slowCaseBegin);
335 patchBuffer.link(failureCases2, slowCaseBegin);
336
337 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
338 void* hotPathPutResult = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
339 patchBuffer.link(success, hotPathPutResult);
340
341 // Track the stub we have created so that it will be deleted later.
342 stubInfo->stubRoutine = code;
343
344 // Finally patch the jump to sow case back in the hot path to jump here instead.
345 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
346 Jump::patch(jumpLocation, code);
347}
348
349void JIT::privateCompileGetByIdSelf(StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, void* returnAddress)
350{
351 // Check eax is an object of the right Structure.
352 Jump failureCases1 = jnz32(X86::eax, Imm32(JSImmediate::TagMask));
353 Jump failureCases2 = checkStructure(X86::eax, structure);
354
355 // Checks out okay! - getDirectOffset
356 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
357 loadPtr(Address(X86::eax, cachedOffset * sizeof(JSValue*)), X86::eax);
358 ret();
359
360 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
361 PatchBuffer patchBuffer(code);
362
363 patchBuffer.link(failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_self_fail));
364 patchBuffer.link(failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_self_fail));
365
366 stubInfo->stubRoutine = code;
367
368 Jump::patch(returnAddress, code);
369}
370
371void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, void* returnAddress, CallFrame* callFrame)
372{
373#if USE(CTI_REPATCH_PIC)
374 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
375 Jump::patch(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_list));
376
377 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
378 // referencing the prototype object - let's speculatively load it's table nice and early!)
379 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
380 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
381 loadPtr(static_cast<void*>(protoPropertyStorage), X86::edx);
382
383 // Check eax is an object of the right Structure.
384 Jump failureCases1 = checkStructure(X86::eax, structure);
385
386 // Check the prototype object's Structure had not changed.
387 Structure** prototypeStructureAddress = &(protoObject->m_structure);
388#if PLATFORM(X86_64)
389 move(ImmPtr(prototypeStructure), X86::ebx);
390 Jump failureCases2 = jnePtr(X86::ebx, AbsoluteAddress(prototypeStructureAddress));
391#else
392 Jump failureCases2 = jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
393#endif
394
395 // Checks out okay! - getDirectOffset
396 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValue*)), X86::eax);
397
398 Jump success = jump();
399
400 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
401 PatchBuffer patchBuffer(code);
402
403 // Use the patch information to link the failure cases back to the original slow case routine.
404 void* slowCaseBegin = reinterpret_cast<char*>(stubInfo->callReturnLocation) - patchOffsetGetByIdSlowCaseCall;
405 patchBuffer.link(failureCases1, slowCaseBegin);
406 patchBuffer.link(failureCases2, slowCaseBegin);
407
408 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
409 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
410 patchBuffer.link(success, reinterpret_cast<void*>(successDest));
411
412 // Track the stub we have created so that it will be deleted later.
413 stubInfo->stubRoutine = code;
414
415 // Finally patch the jump to slow case back in the hot path to jump here instead.
416 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
417 Jump::patch(jumpLocation, code);
418#else
419 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
420 // referencing the prototype object - let's speculatively load it's table nice and early!)
421 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
422 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
423 loadPtr(protoPropertyStorage, X86::edx);
424
425 // Check eax is an object of the right Structure.
426 Jump failureCases1 = jne32(X86::eax, Imm32(JSImmediate::TagMask));
427 Jump failureCases2 = checkStructure(X86::eax, structure);
428
429 // Check the prototype object's Structure had not changed.
430 Structure** prototypeStructureAddress = &(protoObject->m_structure);
431 Jump failureCases3 = jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
432
433 // Checks out okay! - getDirectOffset
434 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValue*)), X86::eax);
435
436 ret();
437
438 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
439 PatchBuffer patchBuffer(code);
440
441 patchBuffer.link(failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
442 patchBuffer.link(failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
443 patchBuffer.link(failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
444
445 stubInfo->stubRoutine = code;
446
447 Jump::patch(returnAddress, code);
448#endif
449}
450
451#if USE(CTI_REPATCH_PIC)
452void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
453{
454 Jump failureCase = checkStructure(X86::eax, structure);
455 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
456 loadPtr(Address(X86::eax, cachedOffset * sizeof(JSValue*)), X86::eax);
457 Jump success = jump();
458
459 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
460 ASSERT(code);
461 PatchBuffer patchBuffer(code);
462
463 // Use the patch information to link the failure cases back to the original slow case routine.
464 void* lastProtoBegin = polymorphicStructures->list[currentIndex - 1].stubRoutine;
465 if (!lastProtoBegin)
466 lastProtoBegin = reinterpret_cast<char*>(stubInfo->callReturnLocation) - patchOffsetGetByIdSlowCaseCall;
467
468 patchBuffer.link(failureCase, lastProtoBegin);
469
470 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
471 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
472 patchBuffer.link(success, reinterpret_cast<void*>(successDest));
473
474 structure->ref();
475 polymorphicStructures->list[currentIndex].set(code, structure);
476
477 // Finally patch the jump to slow case back in the hot path to jump here instead.
478 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
479 Jump::patch(jumpLocation, code);
480}
481
482void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame)
483{
484 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
485 // referencing the prototype object - let's speculatively load it's table nice and early!)
486 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
487 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
488 loadPtr(protoPropertyStorage, X86::edx);
489
490 // Check eax is an object of the right Structure.
491 Jump failureCases1 = checkStructure(X86::eax, structure);
492
493 // Check the prototype object's Structure had not changed.
494 Structure** prototypeStructureAddress = &(protoObject->m_structure);
495#if PLATFORM(X86_64)
496 move(ImmPtr(prototypeStructure), X86::ebx);
497 Jump failureCases2 = jnePtr(X86::ebx, AbsoluteAddress(prototypeStructureAddress));
498#else
499 Jump failureCases2 = jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
500#endif
501
502 // Checks out okay! - getDirectOffset
503 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValue*)), X86::eax);
504
505 Jump success = jump();
506
507 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
508 PatchBuffer patchBuffer(code);
509
510 // Use the patch information to link the failure cases back to the original slow case routine.
511 void* lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
512 patchBuffer.link(failureCases1, lastProtoBegin);
513 patchBuffer.link(failureCases2, lastProtoBegin);
514
515 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
516 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
517 patchBuffer.link(success, reinterpret_cast<void*>(successDest));
518
519 structure->ref();
520 prototypeStructure->ref();
521 prototypeStructures->list[currentIndex].set(code, structure, prototypeStructure);
522
523 // Finally patch the jump to slow case back in the hot path to jump here instead.
524 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
525 Jump::patch(jumpLocation, code);
526}
527
528void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame)
529{
530 ASSERT(count);
531
532 JumpList bucketsOfFail;
533
534 // Check eax is an object of the right Structure.
535 Jump baseObjectCheck = checkStructure(X86::eax, structure);
536 bucketsOfFail.append(baseObjectCheck);
537
538 Structure* currStructure = structure;
539 RefPtr<Structure>* chainEntries = chain->head();
540 JSObject* protoObject = 0;
541 for (unsigned i = 0; i < count; ++i) {
542 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
543 currStructure = chainEntries[i].get();
544
545 // Check the prototype object's Structure had not changed.
546 Structure** prototypeStructureAddress = &(protoObject->m_structure);
547#if PLATFORM(X86_64)
548 move(ImmPtr(currStructure), X86::ebx);
549 bucketsOfFail.append(jnePtr(X86::ebx, AbsoluteAddress(prototypeStructureAddress)));
550#else
551 bucketsOfFail.append(jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
552#endif
553 }
554 ASSERT(protoObject);
555
556 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
557 loadPtr(protoPropertyStorage, X86::edx);
558 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValue*)), X86::eax);
559 Jump success = jump();
560
561 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
562 PatchBuffer patchBuffer(code);
563
564 // Use the patch information to link the failure cases back to the original slow case routine.
565 void* lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
566
567 patchBuffer.link(bucketsOfFail, lastProtoBegin);
568
569 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
570 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
571 patchBuffer.link(success, reinterpret_cast<void*>(successDest));
572
573 // Track the stub we have created so that it will be deleted later.
574 structure->ref();
575 chain->ref();
576 prototypeStructures->list[currentIndex].set(code, structure, chain);
577
578 // Finally patch the jump to slow case back in the hot path to jump here instead.
579 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
580 Jump::patch(jumpLocation, code);
581}
582#endif
583
584void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, void* returnAddress, CallFrame* callFrame)
585{
586#if USE(CTI_REPATCH_PIC)
587 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
588 Jump::patch(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_list));
589
590 ASSERT(count);
591
592 JumpList bucketsOfFail;
593
594 // Check eax is an object of the right Structure.
595 bucketsOfFail.append(checkStructure(X86::eax, structure));
596
597 Structure* currStructure = structure;
598 RefPtr<Structure>* chainEntries = chain->head();
599 JSObject* protoObject = 0;
600 for (unsigned i = 0; i < count; ++i) {
601 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
602 currStructure = chainEntries[i].get();
603
604 // Check the prototype object's Structure had not changed.
605 Structure** prototypeStructureAddress = &(protoObject->m_structure);
606#if PLATFORM(X86_64)
607 move(ImmPtr(currStructure), X86::ebx);
608 bucketsOfFail.append(jnePtr(X86::ebx, AbsoluteAddress(prototypeStructureAddress)));
609#else
610 bucketsOfFail.append(jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
611#endif
612 }
613 ASSERT(protoObject);
614
615 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
616 loadPtr(protoPropertyStorage, X86::edx);
617 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValue*)), X86::eax);
618 Jump success = jump();
619
620 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
621 PatchBuffer patchBuffer(code);
622
623 // Use the patch information to link the failure cases back to the original slow case routine.
624 void* slowCaseBegin = reinterpret_cast<char*>(stubInfo->callReturnLocation) - patchOffsetGetByIdSlowCaseCall;
625
626 patchBuffer.link(bucketsOfFail, slowCaseBegin);
627
628 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
629 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
630 patchBuffer.link(success, reinterpret_cast<void*>(successDest));
631
632 // Track the stub we have created so that it will be deleted later.
633 stubInfo->stubRoutine = code;
634
635 // Finally patch the jump to slow case back in the hot path to jump here instead.
636 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
637 Jump::patch(jumpLocation, code);
638#else
639 ASSERT(count);
640
641 JumpList bucketsOfFail;
642
643 // Check eax is an object of the right Structure.
644 bucketsOfFail.append(jne32(X86::eax, Imm32(JSImmediate::TagMask)));
645 bucketsOfFail.append(checkStructure(X86::eax, structure));
646
647 Structure* currStructure = structure;
648 RefPtr<Structure>* chainEntries = chain->head();
649 JSObject* protoObject = 0;
650 for (unsigned i = 0; i < count; ++i) {
651 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
652 currStructure = chainEntries[i].get();
653
654 // Check the prototype object's Structure had not changed.
655 Structure** prototypeStructureAddress = &(protoObject->m_structure);
656#if PLATFORM(X86_64)
657 move(ImmPtr(currStructure), X86::ebx);
658 bucketsOfFail.append(jnePtr(X86::ebx, AbsoluteAddress(prototypeStructureAddress)));
659#else
660 bucketsOfFail.append(jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
661#endif
662 }
663 ASSERT(protoObject);
664
665 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
666 loadPtr(protoPropertyStorage, X86::edx);
667 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValue*)), X86::eax);
668 ret();
669
670 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
671
672 patchBuffer.link(bucketsOfFail, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
673
674 stubInfo->stubRoutine = code;
675
676 Jump::patch(returnAddress, code);
677#endif
678}
679
680void JIT::privateCompilePutByIdReplace(StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, void* returnAddress)
681{
682 // Check eax is an object of the right Structure.
683 Jump failureCases1 = jne32(X86::eax, Imm32(JSImmediate::TagMask));
684 Jump failureCases2 = checkStructure(X86::eax, structure);
685
686 // checks out okay! - putDirectOffset
687 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
688 storePtr(X86::edx, Address(X86::eax, cachedOffset * sizeof(JSValue*)));
689 ret();
690
691 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
692 PatchBuffer patchBuffer(code);
693
694 patchBuffer.link(failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_fail));
695 patchBuffer.link(failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_fail));
696
697 stubInfo->stubRoutine = code;
698
699 Jump::patch(returnAddress, code);
700}
701
702#endif
703
704} // namespace JSC
705
706#endif // ENABLE(JIT)
Note: See TracBrowser for help on using the repository browser.