source: webkit/trunk/JavaScriptCore/jit/JITPropertyAccess.cpp@ 39070

Last change on this file since 39070 was 39070, checked in by [email protected], 16 years ago

2008-12-05 Sam Weinig <[email protected]>

Reviewed by Cameron Zwarich.

Fix for https://bugs.webkit.org/show_bug.cgi?id=22715
Encapsulate more CodeBlock members in preparation
of moving some of them to a rare data structure.

  • bytecode/CodeBlock.cpp: (JSC::locationForOffset): (JSC::printConditionalJump): (JSC::printGetByIdOp): (JSC::printPutByIdOp): (JSC::CodeBlock::printStructure): (JSC::CodeBlock::printStructures): (JSC::CodeBlock::dump): (JSC::CodeBlock::~CodeBlock): (JSC::CodeBlock::unlinkCallers): (JSC::CodeBlock::derefStructures): (JSC::CodeBlock::refStructures): (JSC::CodeBlock::mark): (JSC::CodeBlock::getHandlerForVPC): (JSC::CodeBlock::nativeExceptionCodeForHandlerVPC): (JSC::CodeBlock::lineNumberForVPC): (JSC::CodeBlock::expressionRangeForVPC): (JSC::CodeBlock::shrinkToFit):
  • bytecode/CodeBlock.h: (JSC::CodeBlock::CodeBlock): (JSC::CodeBlock::addCaller): (JSC::CodeBlock::removeCaller): (JSC::CodeBlock::isKnownNotImmediate): (JSC::CodeBlock::isConstantRegisterIndex): (JSC::CodeBlock::getConstant): (JSC::CodeBlock::isTemporaryRegisterIndex): (JSC::CodeBlock::getStubInfo): (JSC::CodeBlock::getCallLinkInfo): (JSC::CodeBlock::instructions): (JSC::CodeBlock::setJITCode): (JSC::CodeBlock::jitCode): (JSC::CodeBlock::ownerNode): (JSC::CodeBlock::setGlobalData): (JSC::CodeBlock::setThisRegister): (JSC::CodeBlock::thisRegister): (JSC::CodeBlock::setNeedsFullScopeChain): (JSC::CodeBlock::needsFullScopeChain): (JSC::CodeBlock::setUsesEval): (JSC::CodeBlock::usesEval): (JSC::CodeBlock::setUsesArguments): (JSC::CodeBlock::usesArguments): (JSC::CodeBlock::codeType): (JSC::CodeBlock::source): (JSC::CodeBlock::sourceOffset): (JSC::CodeBlock::addGlobalResolveInstruction): (JSC::CodeBlock::numberOfPropertyAccessInstructions): (JSC::CodeBlock::addPropertyAccessInstruction): (JSC::CodeBlock::propertyAccessInstruction): (JSC::CodeBlock::numberOfCallLinkInfos): (JSC::CodeBlock::addCallLinkInfo): (JSC::CodeBlock::callLinkInfo): (JSC::CodeBlock::numberOfJumpTargets): (JSC::CodeBlock::addJumpTarget): (JSC::CodeBlock::jumpTarget): (JSC::CodeBlock::lastJumpTarget): (JSC::CodeBlock::numberOfExceptionHandlers): (JSC::CodeBlock::addExceptionHandler): (JSC::CodeBlock::exceptionHandler): (JSC::CodeBlock::addExpressionInfo): (JSC::CodeBlock::numberOfLineInfos): (JSC::CodeBlock::addLineInfo): (JSC::CodeBlock::lastLineInfo): (JSC::CodeBlock::jitReturnAddressVPCMap): (JSC::CodeBlock::numberOfIdentifiers): (JSC::CodeBlock::addIdentifier): (JSC::CodeBlock::identifier): (JSC::CodeBlock::numberOfConstantRegisters): (JSC::CodeBlock::addConstantRegister): (JSC::CodeBlock::constantRegister): (JSC::CodeBlock::addFunction): (JSC::CodeBlock::function): (JSC::CodeBlock::addFunctionExpression): (JSC::CodeBlock::functionExpression): (JSC::CodeBlock::addUnexpectedConstant): (JSC::CodeBlock::unexpectedConstant): (JSC::CodeBlock::addRegExp): (JSC::CodeBlock::regexp): (JSC::CodeBlock::symbolTable): (JSC::CodeBlock::evalCodeCache): New inline setters/getters.

(JSC::ProgramCodeBlock::ProgramCodeBlock):
(JSC::ProgramCodeBlock::~ProgramCodeBlock):
(JSC::ProgramCodeBlock::clearGlobalObject):

  • bytecode/SamplingTool.cpp: (JSC::ScopeSampleRecord::sample): (JSC::SamplingTool::dump):
  • bytecompiler/BytecodeGenerator.cpp:
  • bytecompiler/BytecodeGenerator.h:
  • bytecompiler/Label.h:
  • interpreter/CallFrame.cpp:
  • interpreter/Interpreter.cpp:
  • jit/JIT.cpp:
  • jit/JITCall.cpp:
  • jit/JITInlineMethods.h:
  • jit/JITPropertyAccess.cpp:
  • parser/Nodes.cpp:
  • runtime/Arguments.h:
  • runtime/ExceptionHelpers.cpp:
  • runtime/JSActivation.cpp:
  • runtime/JSActivation.h:
  • runtime/JSGlobalObject.cpp: Change direct access to use new getter/setters.
File size: 32.9 KB
Line 
1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "JIT.h"
28
29#if ENABLE(JIT)
30
31#include "CodeBlock.h"
32#include "JITInlineMethods.h"
33#include "JSArray.h"
34#include "JSFunction.h"
35#include "Interpreter.h"
36#include "ResultType.h"
37#include "SamplingTool.h"
38
39#ifndef NDEBUG
40#include <stdio.h>
41#endif
42
43using namespace std;
44
45namespace JSC {
46
47#if !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
48
49void JIT::compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier* ident, unsigned i, unsigned propertyAccessInstructionIndex)
50{
51 // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be repatched.
52 // Additionally, for get_by_id we need repatch the offset of the branch to the slow case (we repatch this to jump
53 // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
54 // to jump back to if one of these trampolies finds a match.
55
56 emitGetVirtualRegister(baseVReg, X86::eax, i);
57
58#ifdef NDEBUG
59 UNUSED_PARAM(propertyAccessInstructionIndex);
60#endif
61 ASSERT(m_codeBlock->propertyAccessInstructions[propertyAccessInstructionIndex].bytecodeIndex == i);
62
63#ifndef NDEBUG
64 JmpDst coldPathBegin = __ label();
65#endif
66 emitPutCTIArg(X86::eax, 0);
67 emitPutCTIArgConstant(reinterpret_cast<unsigned>(ident), 4);
68 JmpSrc call = emitCTICall(i, Interpreter::cti_op_get_by_id_generic);
69 ASSERT(X86Assembler::getDifferenceBetweenLabels(coldPathBegin, call) == repatchOffsetGetByIdSlowCaseCall);
70 emitPutVirtualRegister(resultVReg);
71
72 // Track the location of the call; this will be used to recover repatch information.
73 ASSERT(m_codeBlock->propertyAccessInstructions[propertyAccessInstructionIndex].bytecodeIndex == i);
74 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].callReturnLocation = call;
75}
76
77
78void JIT::compileGetByIdSlowCase(int, int, Identifier*, unsigned, Vector<SlowCaseEntry>::iterator&, unsigned)
79{
80 ASSERT_NOT_REACHED();
81}
82
83void JIT::compilePutByIdHotPath(int baseVReg, Identifier* ident, int valueVReg, unsigned i, unsigned propertyAccessInstructionIndex)
84{
85 // In order to be able to repatch both the Structure, and the object offset, we store one pointer,
86 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
87 // such that the Structure & offset are always at the same distance from this.
88
89 emitGetVirtualRegisters(baseVReg, X86::eax, valueVReg, X86::edx, i);
90
91 emitPutCTIArgConstant(reinterpret_cast<unsigned>(ident), 4);
92 emitPutCTIArg(X86::eax, 0);
93 emitPutCTIArg(X86::edx, 8);
94 JmpSrc call = emitCTICall(i, Interpreter::cti_op_put_by_id_generic);
95
96 // Track the location of the call; this will be used to recover repatch information.
97 ASSERT(m_codeBlock->propertyAccessInstructions[propertyAccessInstructionIndex].bytecodeIndex == i);
98 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].callReturnLocation = call;
99}
100
101void JIT::compilePutByIdSlowCase(int, Identifier*, int, unsigned, Vector<SlowCaseEntry>::iterator&, unsigned)
102{
103 ASSERT_NOT_REACHED();
104}
105
106#else
107
108void JIT::compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier*, unsigned i, unsigned propertyAccessInstructionIndex)
109{
110 // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be repatched.
111 // Additionally, for get_by_id we need repatch the offset of the branch to the slow case (we repatch this to jump
112 // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
113 // to jump back to if one of these trampolies finds a match.
114
115 emitGetVirtualRegister(baseVReg, X86::eax, i);
116
117 ASSERT(m_codeBlock->propertyAccessInstruction(propertyAccessInstructionIndex).bytecodeIndex == i);
118
119 emitJumpSlowCaseIfNotJSCell(X86::eax, i, baseVReg);
120
121 JmpDst hotPathBegin = __ label();
122 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
123
124 __ cmpl_i32m(repatchGetByIdDefaultStructure, FIELD_OFFSET(JSCell, m_structure), X86::eax);
125 ASSERT(X86Assembler::getDifferenceBetweenLabels(hotPathBegin, __ label()) == repatchOffsetGetByIdStructure);
126 m_slowCases.append(SlowCaseEntry(__ jne(), i));
127 ASSERT(X86Assembler::getDifferenceBetweenLabels(hotPathBegin, __ label()) == repatchOffsetGetByIdBranchToSlowCase);
128
129 __ movl_mr(FIELD_OFFSET(JSObject, m_propertyStorage), X86::eax, X86::eax);
130 __ movl_mr(repatchGetByIdDefaultOffset, X86::eax, X86::eax);
131 ASSERT(X86Assembler::getDifferenceBetweenLabels(hotPathBegin, __ label()) == repatchOffsetGetByIdPropertyMapOffset);
132 emitPutVirtualRegister(resultVReg);
133}
134
135
136void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, unsigned i, Vector<SlowCaseEntry>::iterator& iter, unsigned propertyAccessInstructionIndex)
137{
138 // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
139 // so that we only need track one pointer into the slow case code - we track a pointer to the location
140 // of the call (which we can use to look up the repatch information), but should a array-length or
141 // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back
142 // the distance from the call to the head of the slow case.
143
144 if (linkSlowCaseIfNotJSCell(iter, baseVReg))
145 ++iter;
146 __ link(iter->from, __ label());
147
148#ifndef NDEBUG
149 JmpDst coldPathBegin = __ label();
150#endif
151 emitPutCTIArg(X86::eax, 0);
152 emitPutCTIArgConstant(reinterpret_cast<unsigned>(ident), 4);
153 JmpSrc call = emitCTICall(i, Interpreter::cti_op_get_by_id);
154 ASSERT(X86Assembler::getDifferenceBetweenLabels(coldPathBegin, call) == repatchOffsetGetByIdSlowCaseCall);
155 emitPutVirtualRegister(resultVReg);
156
157 // Track the location of the call; this will be used to recover repatch information.
158 ASSERT(m_codeBlock->propertyAccessInstruction(propertyAccessInstructionIndex).bytecodeIndex == i);
159 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].callReturnLocation = call;
160}
161
162void JIT::compilePutByIdHotPath(int baseVReg, Identifier*, int valueVReg, unsigned i, unsigned propertyAccessInstructionIndex)
163{
164 // In order to be able to repatch both the Structure, and the object offset, we store one pointer,
165 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
166 // such that the Structure & offset are always at the same distance from this.
167
168 emitGetVirtualRegisters(baseVReg, X86::eax, valueVReg, X86::edx, i);
169
170 ASSERT(m_codeBlock->propertyAccessInstruction(propertyAccessInstructionIndex).bytecodeIndex == i);
171
172 // Jump to a slow case if either the base object is an immediate, or if the Structure does not match.
173 emitJumpSlowCaseIfNotJSCell(X86::eax, i, baseVReg);
174
175 JmpDst hotPathBegin = __ label();
176 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
177
178 // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
179 __ cmpl_i32m(repatchGetByIdDefaultStructure, FIELD_OFFSET(JSCell, m_structure), X86::eax);
180 ASSERT(X86Assembler::getDifferenceBetweenLabels(hotPathBegin, __ label()) == repatchOffsetPutByIdStructure);
181 m_slowCases.append(SlowCaseEntry(__ jne(), i));
182
183 // Plant a load from a bogus ofset in the object's property map; we will patch this later, if it is to be used.
184 __ movl_mr(FIELD_OFFSET(JSObject, m_propertyStorage), X86::eax, X86::eax);
185 __ movl_rm(X86::edx, repatchGetByIdDefaultOffset, X86::eax);
186 ASSERT(X86Assembler::getDifferenceBetweenLabels(hotPathBegin, __ label()) == repatchOffsetPutByIdPropertyMapOffset);
187}
188
189void JIT::compilePutByIdSlowCase(int baseVReg, Identifier* ident, int, unsigned i, Vector<SlowCaseEntry>::iterator& iter, unsigned propertyAccessInstructionIndex)
190{
191 if (linkSlowCaseIfNotJSCell(iter, baseVReg))
192 ++iter;
193 __ link(iter->from, __ label());
194
195 emitPutCTIArgConstant(reinterpret_cast<unsigned>(ident), 4);
196 emitPutCTIArg(X86::eax, 0);
197 emitPutCTIArg(X86::edx, 8);
198 JmpSrc call = emitCTICall(i, Interpreter::cti_op_put_by_id);
199
200 // Track the location of the call; this will be used to recover repatch information.
201 ASSERT(m_codeBlock->propertyAccessInstruction(propertyAccessInstructionIndex).bytecodeIndex == i);
202 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].callReturnLocation = call;
203}
204
205#endif
206
207static JSObject* resizePropertyStorage(JSObject* baseObject, size_t oldSize, size_t newSize)
208{
209 baseObject->allocatePropertyStorageInline(oldSize, newSize);
210 return baseObject;
211}
212
213static inline bool transitionWillNeedStorageRealloc(Structure* oldStructure, Structure* newStructure)
214{
215 return oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
216}
217
218void JIT::privateCompilePutByIdTransition(Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, void* returnAddress)
219{
220 Vector<JmpSrc, 16> failureCases;
221 // Check eax is an object of the right Structure.
222 __ testl_i32r(JSImmediate::TagMask, X86::eax);
223 failureCases.append(__ jne());
224 __ cmpl_i32m(reinterpret_cast<uint32_t>(oldStructure), FIELD_OFFSET(JSCell, m_structure), X86::eax);
225 failureCases.append(__ jne());
226 Vector<JmpSrc> successCases;
227
228 // ecx = baseObject
229 __ movl_mr(FIELD_OFFSET(JSCell, m_structure), X86::eax, X86::ecx);
230 // proto(ecx) = baseObject->structure()->prototype()
231 __ cmpl_i32m(ObjectType, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type), X86::ecx);
232 failureCases.append(__ jne());
233 __ movl_mr(FIELD_OFFSET(Structure, m_prototype), X86::ecx, X86::ecx);
234
235 // ecx = baseObject->m_structure
236 for (RefPtr<Structure>* it = chain->head(); *it; ++it) {
237 // null check the prototype
238 __ cmpl_i32r(asInteger(jsNull()), X86::ecx);
239 successCases.append(__ je());
240
241 // Check the structure id
242 __ cmpl_i32m(reinterpret_cast<uint32_t>(it->get()), FIELD_OFFSET(JSCell, m_structure), X86::ecx);
243 failureCases.append(__ jne());
244
245 __ movl_mr(FIELD_OFFSET(JSCell, m_structure), X86::ecx, X86::ecx);
246 __ cmpl_i32m(ObjectType, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type), X86::ecx);
247 failureCases.append(__ jne());
248 __ movl_mr(FIELD_OFFSET(Structure, m_prototype), X86::ecx, X86::ecx);
249 }
250
251 failureCases.append(__ jne());
252 for (unsigned i = 0; i < successCases.size(); ++i)
253 __ link(successCases[i], __ label());
254
255 JmpSrc callTarget;
256
257 // emit a call only if storage realloc is needed
258 if (transitionWillNeedStorageRealloc(oldStructure, newStructure)) {
259 __ pushl_r(X86::edx);
260 __ pushl_i32(newStructure->propertyStorageCapacity());
261 __ pushl_i32(oldStructure->propertyStorageCapacity());
262 __ pushl_r(X86::eax);
263 callTarget = __ call();
264 __ addl_i32r(3 * sizeof(void*), X86::esp);
265 __ popl_r(X86::edx);
266 }
267
268 // Assumes m_refCount can be decremented easily, refcount decrement is safe as
269 // codeblock should ensure oldStructure->m_refCount > 0
270 __ subl_i8m(1, reinterpret_cast<void*>(oldStructure));
271 __ addl_i8m(1, reinterpret_cast<void*>(newStructure));
272 __ movl_i32m(reinterpret_cast<uint32_t>(newStructure), FIELD_OFFSET(JSCell, m_structure), X86::eax);
273
274 // write the value
275 __ movl_mr(FIELD_OFFSET(JSObject, m_propertyStorage), X86::eax, X86::eax);
276 __ movl_rm(X86::edx, cachedOffset * sizeof(JSValue*), X86::eax);
277
278 __ ret();
279
280 JmpSrc failureJump;
281 if (failureCases.size()) {
282 for (unsigned i = 0; i < failureCases.size(); ++i)
283 __ link(failureCases[i], __ label());
284 restoreArgumentReferenceForTrampoline();
285 failureJump = __ jmp();
286 }
287
288 void* code = __ executableCopy();
289
290 if (failureCases.size())
291 X86Assembler::link(code, failureJump, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_fail));
292
293 if (transitionWillNeedStorageRealloc(oldStructure, newStructure))
294 X86Assembler::link(code, callTarget, reinterpret_cast<void*>(resizePropertyStorage));
295
296 m_codeBlock->getStubInfo(returnAddress).stubRoutine = code;
297
298 ctiRepatchCallByReturnAddress(returnAddress, code);
299}
300
301void JIT::patchGetByIdSelf(CodeBlock* codeBlock, Structure* structure, size_t cachedOffset, void* returnAddress)
302{
303 StructureStubInfo& info = codeBlock->getStubInfo(returnAddress);
304
305 // We don't want to repatch more than once - in future go to cti_op_get_by_id_generic.
306 // Should probably go to Interpreter::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
307 ctiRepatchCallByReturnAddress(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_self_fail));
308
309 // Repatch the offset into the propoerty map to load from, then repatch the Structure to look for.
310 X86Assembler::repatchDisplacement(reinterpret_cast<intptr_t>(info.hotPathBegin) + repatchOffsetGetByIdPropertyMapOffset, cachedOffset * sizeof(JSValue*));
311 X86Assembler::repatchImmediate(reinterpret_cast<intptr_t>(info.hotPathBegin) + repatchOffsetGetByIdStructure, reinterpret_cast<uint32_t>(structure));
312}
313
314void JIT::patchPutByIdReplace(CodeBlock* codeBlock, Structure* structure, size_t cachedOffset, void* returnAddress)
315{
316 StructureStubInfo& info = codeBlock->getStubInfo(returnAddress);
317
318 // We don't want to repatch more than once - in future go to cti_op_put_by_id_generic.
319 // Should probably go to Interpreter::cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
320 ctiRepatchCallByReturnAddress(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_generic));
321
322 // Repatch the offset into the propoerty map to load from, then repatch the Structure to look for.
323 X86Assembler::repatchDisplacement(reinterpret_cast<intptr_t>(info.hotPathBegin) + repatchOffsetPutByIdPropertyMapOffset, cachedOffset * sizeof(JSValue*));
324 X86Assembler::repatchImmediate(reinterpret_cast<intptr_t>(info.hotPathBegin) + repatchOffsetPutByIdStructure, reinterpret_cast<uint32_t>(structure));
325}
326
327void JIT::privateCompilePatchGetArrayLength(void* returnAddress)
328{
329 StructureStubInfo& info = m_codeBlock->getStubInfo(returnAddress);
330
331 // We don't want to repatch more than once - in future go to cti_op_put_by_id_generic.
332 ctiRepatchCallByReturnAddress(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
333
334 // Check eax is an array
335 __ cmpl_i32m(reinterpret_cast<unsigned>(m_interpreter->m_jsArrayVptr), X86::eax);
336 JmpSrc failureCases1 = __ jne();
337
338 // Checks out okay! - get the length from the storage
339 __ movl_mr(FIELD_OFFSET(JSArray, m_storage), X86::eax, X86::ecx);
340 __ movl_mr(FIELD_OFFSET(ArrayStorage, m_length), X86::ecx, X86::ecx);
341
342 __ cmpl_i32r(JSImmediate::maxImmediateInt, X86::ecx);
343 JmpSrc failureCases2 = __ ja();
344
345 __ addl_rr(X86::ecx, X86::ecx);
346 __ addl_i8r(1, X86::ecx);
347 __ movl_rr(X86::ecx, X86::eax);
348 JmpSrc success = __ jmp();
349
350 void* code = __ executableCopy();
351
352 // Use the repatch information to link the failure cases back to the original slow case routine.
353 void* slowCaseBegin = reinterpret_cast<char*>(info.callReturnLocation) - repatchOffsetGetByIdSlowCaseCall;
354 X86Assembler::link(code, failureCases1, slowCaseBegin);
355 X86Assembler::link(code, failureCases2, slowCaseBegin);
356
357 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
358 intptr_t successDest = reinterpret_cast<intptr_t>(info.hotPathBegin) + repatchOffsetGetByIdPropertyMapOffset;
359 X86Assembler::link(code, success, reinterpret_cast<void*>(successDest));
360
361 // Track the stub we have created so that it will be deleted later.
362 m_codeBlock->getStubInfo(returnAddress).stubRoutine = code;
363
364 // Finally repatch the jump to sow case back in the hot path to jump here instead.
365 intptr_t jmpLocation = reinterpret_cast<intptr_t>(info.hotPathBegin) + repatchOffsetGetByIdBranchToSlowCase;
366 X86Assembler::repatchBranchOffset(jmpLocation, code);
367}
368
369void JIT::privateCompileGetByIdSelf(Structure* structure, size_t cachedOffset, void* returnAddress)
370{
371 // Check eax is an object of the right Structure.
372 __ testl_i32r(JSImmediate::TagMask, X86::eax);
373 JmpSrc failureCases1 = __ jne();
374 JmpSrc failureCases2 = checkStructure(X86::eax, structure);
375
376 // Checks out okay! - getDirectOffset
377 __ movl_mr(FIELD_OFFSET(JSObject, m_propertyStorage), X86::eax, X86::eax);
378 __ movl_mr(cachedOffset * sizeof(JSValue*), X86::eax, X86::eax);
379 __ ret();
380
381 void* code = __ executableCopy();
382
383 X86Assembler::link(code, failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_self_fail));
384 X86Assembler::link(code, failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_self_fail));
385
386 m_codeBlock->getStubInfo(returnAddress).stubRoutine = code;
387
388 ctiRepatchCallByReturnAddress(returnAddress, code);
389}
390
391void JIT::privateCompileGetByIdProto(Structure* structure, Structure* prototypeStructure, size_t cachedOffset, void* returnAddress, CallFrame* callFrame)
392{
393#if USE(CTI_REPATCH_PIC)
394 StructureStubInfo& info = m_codeBlock->getStubInfo(returnAddress);
395
396 // We don't want to repatch more than once - in future go to cti_op_put_by_id_generic.
397 ctiRepatchCallByReturnAddress(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_list));
398
399 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
400 // referencing the prototype object - let's speculatively load it's table nice and early!)
401 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
402 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
403 __ movl_mr(static_cast<void*>(protoPropertyStorage), X86::edx);
404
405 // Check eax is an object of the right Structure.
406 JmpSrc failureCases1 = checkStructure(X86::eax, structure);
407
408 // Check the prototype object's Structure had not changed.
409 Structure** prototypeStructureAddress = &(protoObject->m_structure);
410 __ cmpl_i32m(reinterpret_cast<uint32_t>(prototypeStructure), prototypeStructureAddress);
411 JmpSrc failureCases2 = __ jne();
412
413 // Checks out okay! - getDirectOffset
414 __ movl_mr(cachedOffset * sizeof(JSValue*), X86::edx, X86::eax);
415
416 JmpSrc success = __ jmp();
417
418 void* code = __ executableCopy();
419
420 // Use the repatch information to link the failure cases back to the original slow case routine.
421 void* slowCaseBegin = reinterpret_cast<char*>(info.callReturnLocation) - repatchOffsetGetByIdSlowCaseCall;
422 X86Assembler::link(code, failureCases1, slowCaseBegin);
423 X86Assembler::link(code, failureCases2, slowCaseBegin);
424
425 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
426 intptr_t successDest = reinterpret_cast<intptr_t>(info.hotPathBegin) + repatchOffsetGetByIdPropertyMapOffset;
427 X86Assembler::link(code, success, reinterpret_cast<void*>(successDest));
428
429 // Track the stub we have created so that it will be deleted later.
430 info.stubRoutine = code;
431
432 // Finally repatch the jump to slow case back in the hot path to jump here instead.
433 intptr_t jmpLocation = reinterpret_cast<intptr_t>(info.hotPathBegin) + repatchOffsetGetByIdBranchToSlowCase;
434 X86Assembler::repatchBranchOffset(jmpLocation, code);
435#else
436 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
437 // referencing the prototype object - let's speculatively load it's table nice and early!)
438 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
439 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
440 __ movl_mr(static_cast<void*>(protoPropertyStorage), X86::edx);
441
442 // Check eax is an object of the right Structure.
443 __ testl_i32r(JSImmediate::TagMask, X86::eax);
444 JmpSrc failureCases1 = __ jne();
445 JmpSrc failureCases2 = checkStructure(X86::eax, structure);
446
447 // Check the prototype object's Structure had not changed.
448 Structure** prototypeStructureAddress = &(protoObject->m_structure);
449 __ cmpl_i32m(reinterpret_cast<uint32_t>(prototypeStructure), prototypeStructureAddress);
450 JmpSrc failureCases3 = __ jne();
451
452 // Checks out okay! - getDirectOffset
453 __ movl_mr(cachedOffset * sizeof(JSValue*), X86::edx, X86::eax);
454
455 __ ret();
456
457 void* code = __ executableCopy();
458
459 X86Assembler::link(code, failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
460 X86Assembler::link(code, failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
461 X86Assembler::link(code, failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
462
463 m_codeBlock->getStubInfo(returnAddress).stubRoutine = code;
464
465 ctiRepatchCallByReturnAddress(returnAddress, code);
466#endif
467}
468
469#if USE(CTI_REPATCH_PIC)
470void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
471{
472 JmpSrc failureCase = checkStructure(X86::eax, structure);
473 __ movl_mr(FIELD_OFFSET(JSObject, m_propertyStorage), X86::eax, X86::eax);
474 __ movl_mr(cachedOffset * sizeof(JSValue*), X86::eax, X86::eax);
475 JmpSrc success = __ jmp();
476
477 void* code = __ executableCopy();
478 ASSERT(code);
479
480 // Use the repatch information to link the failure cases back to the original slow case routine.
481 void* lastProtoBegin = polymorphicStructures->list[currentIndex - 1].stubRoutine;
482 if (!lastProtoBegin)
483 lastProtoBegin = reinterpret_cast<char*>(stubInfo->callReturnLocation) - repatchOffsetGetByIdSlowCaseCall;
484
485 X86Assembler::link(code, failureCase, lastProtoBegin);
486
487 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
488 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + repatchOffsetGetByIdPropertyMapOffset;
489 X86Assembler::link(code, success, reinterpret_cast<void*>(successDest));
490
491 structure->ref();
492 polymorphicStructures->list[currentIndex].set(cachedOffset, code, structure);
493
494 // Finally repatch the jump to slow case back in the hot path to jump here instead.
495 intptr_t jmpLocation = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + repatchOffsetGetByIdBranchToSlowCase;
496 X86Assembler::repatchBranchOffset(jmpLocation, code);
497}
498
499void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame)
500{
501 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
502 // referencing the prototype object - let's speculatively load it's table nice and early!)
503 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
504 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
505 __ movl_mr(static_cast<void*>(protoPropertyStorage), X86::edx);
506
507 // Check eax is an object of the right Structure.
508 JmpSrc failureCases1 = checkStructure(X86::eax, structure);
509
510 // Check the prototype object's Structure had not changed.
511 Structure** prototypeStructureAddress = &(protoObject->m_structure);
512 __ cmpl_i32m(reinterpret_cast<uint32_t>(prototypeStructure), prototypeStructureAddress);
513 JmpSrc failureCases2 = __ jne();
514
515 // Checks out okay! - getDirectOffset
516 __ movl_mr(cachedOffset * sizeof(JSValue*), X86::edx, X86::eax);
517
518 JmpSrc success = __ jmp();
519
520 void* code = __ executableCopy();
521
522 // Use the repatch information to link the failure cases back to the original slow case routine.
523 void* lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
524 X86Assembler::link(code, failureCases1, lastProtoBegin);
525 X86Assembler::link(code, failureCases2, lastProtoBegin);
526
527 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
528 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + repatchOffsetGetByIdPropertyMapOffset;
529 X86Assembler::link(code, success, reinterpret_cast<void*>(successDest));
530
531 structure->ref();
532 prototypeStructure->ref();
533 prototypeStructures->list[currentIndex].set(cachedOffset, code, structure, prototypeStructure);
534
535 // Finally repatch the jump to slow case back in the hot path to jump here instead.
536 intptr_t jmpLocation = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + repatchOffsetGetByIdBranchToSlowCase;
537 X86Assembler::repatchBranchOffset(jmpLocation, code);
538}
539
540void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame)
541{
542 ASSERT(count);
543
544 Vector<JmpSrc> bucketsOfFail;
545
546 // Check eax is an object of the right Structure.
547 bucketsOfFail.append(checkStructure(X86::eax, structure));
548
549 Structure* currStructure = structure;
550 RefPtr<Structure>* chainEntries = chain->head();
551 JSObject* protoObject = 0;
552 for (unsigned i = 0; i < count; ++i) {
553 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
554 currStructure = chainEntries[i].get();
555
556 // Check the prototype object's Structure had not changed.
557 Structure** prototypeStructureAddress = &(protoObject->m_structure);
558 __ cmpl_i32m(reinterpret_cast<uint32_t>(currStructure), prototypeStructureAddress);
559 bucketsOfFail.append(__ jne());
560 }
561 ASSERT(protoObject);
562
563 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
564 __ movl_mr(static_cast<void*>(protoPropertyStorage), X86::edx);
565 __ movl_mr(cachedOffset * sizeof(JSValue*), X86::edx, X86::eax);
566 JmpSrc success = __ jmp();
567
568 void* code = __ executableCopy();
569
570 // Use the repatch information to link the failure cases back to the original slow case routine.
571 void* lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
572
573 for (unsigned i = 0; i < bucketsOfFail.size(); ++i)
574 X86Assembler::link(code, bucketsOfFail[i], lastProtoBegin);
575
576 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
577 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + repatchOffsetGetByIdPropertyMapOffset;
578 X86Assembler::link(code, success, reinterpret_cast<void*>(successDest));
579
580 // Track the stub we have created so that it will be deleted later.
581 structure->ref();
582 chain->ref();
583 prototypeStructures->list[currentIndex].set(cachedOffset, code, structure, chain);
584
585 // Finally repatch the jump to slow case back in the hot path to jump here instead.
586 intptr_t jmpLocation = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + repatchOffsetGetByIdBranchToSlowCase;
587 X86Assembler::repatchBranchOffset(jmpLocation, code);
588}
589#endif
590
591void JIT::privateCompileGetByIdChain(Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, void* returnAddress, CallFrame* callFrame)
592{
593#if USE(CTI_REPATCH_PIC)
594 StructureStubInfo& info = m_codeBlock->getStubInfo(returnAddress);
595
596 // We don't want to repatch more than once - in future go to cti_op_put_by_id_generic.
597 ctiRepatchCallByReturnAddress(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_list));
598
599 ASSERT(count);
600
601 Vector<JmpSrc> bucketsOfFail;
602
603 // Check eax is an object of the right Structure.
604 bucketsOfFail.append(checkStructure(X86::eax, structure));
605
606 Structure* currStructure = structure;
607 RefPtr<Structure>* chainEntries = chain->head();
608 JSObject* protoObject = 0;
609 for (unsigned i = 0; i < count; ++i) {
610 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
611 currStructure = chainEntries[i].get();
612
613 // Check the prototype object's Structure had not changed.
614 Structure** prototypeStructureAddress = &(protoObject->m_structure);
615 __ cmpl_i32m(reinterpret_cast<uint32_t>(currStructure), prototypeStructureAddress);
616 bucketsOfFail.append(__ jne());
617 }
618 ASSERT(protoObject);
619
620 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
621 __ movl_mr(static_cast<void*>(protoPropertyStorage), X86::edx);
622 __ movl_mr(cachedOffset * sizeof(JSValue*), X86::edx, X86::eax);
623 JmpSrc success = __ jmp();
624
625 void* code = __ executableCopy();
626
627 // Use the repatch information to link the failure cases back to the original slow case routine.
628 void* slowCaseBegin = reinterpret_cast<char*>(info.callReturnLocation) - repatchOffsetGetByIdSlowCaseCall;
629
630 for (unsigned i = 0; i < bucketsOfFail.size(); ++i)
631 X86Assembler::link(code, bucketsOfFail[i], slowCaseBegin);
632
633 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
634 intptr_t successDest = reinterpret_cast<intptr_t>(info.hotPathBegin) + repatchOffsetGetByIdPropertyMapOffset;
635 X86Assembler::link(code, success, reinterpret_cast<void*>(successDest));
636
637 // Track the stub we have created so that it will be deleted later.
638 info.stubRoutine = code;
639
640 // Finally repatch the jump to slow case back in the hot path to jump here instead.
641 intptr_t jmpLocation = reinterpret_cast<intptr_t>(info.hotPathBegin) + repatchOffsetGetByIdBranchToSlowCase;
642 X86Assembler::repatchBranchOffset(jmpLocation, code);
643#else
644 ASSERT(count);
645
646 Vector<JmpSrc> bucketsOfFail;
647
648 // Check eax is an object of the right Structure.
649 __ testl_i32r(JSImmediate::TagMask, X86::eax);
650 bucketsOfFail.append(__ jne());
651 bucketsOfFail.append(checkStructure(X86::eax, structure));
652
653 Structure* currStructure = structure;
654 RefPtr<Structure>* chainEntries = chain->head();
655 JSObject* protoObject = 0;
656 for (unsigned i = 0; i < count; ++i) {
657 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
658 currStructure = chainEntries[i].get();
659
660 // Check the prototype object's Structure had not changed.
661 Structure** prototypeStructureAddress = &(protoObject->m_structure);
662 __ cmpl_i32m(reinterpret_cast<uint32_t>(currStructure), prototypeStructureAddress);
663 bucketsOfFail.append(__ jne());
664 }
665 ASSERT(protoObject);
666
667 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
668 __ movl_mr(static_cast<void*>(protoPropertyStorage), X86::edx);
669 __ movl_mr(cachedOffset * sizeof(JSValue*), X86::edx, X86::eax);
670 __ ret();
671
672 void* code = __ executableCopy();
673
674 for (unsigned i = 0; i < bucketsOfFail.size(); ++i)
675 X86Assembler::link(code, bucketsOfFail[i], reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
676
677 m_codeBlock->getStubInfo(returnAddress).stubRoutine = code;
678
679 ctiRepatchCallByReturnAddress(returnAddress, code);
680#endif
681}
682
683void JIT::privateCompilePutByIdReplace(Structure* structure, size_t cachedOffset, void* returnAddress)
684{
685 // Check eax is an object of the right Structure.
686 __ testl_i32r(JSImmediate::TagMask, X86::eax);
687 JmpSrc failureCases1 = __ jne();
688 JmpSrc failureCases2 = checkStructure(X86::eax, structure);
689
690 // checks out okay! - putDirectOffset
691 __ movl_mr(FIELD_OFFSET(JSObject, m_propertyStorage), X86::eax, X86::eax);
692 __ movl_rm(X86::edx, cachedOffset * sizeof(JSValue*), X86::eax);
693 __ ret();
694
695 void* code = __ executableCopy();
696
697 X86Assembler::link(code, failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_fail));
698 X86Assembler::link(code, failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_fail));
699
700 m_codeBlock->getStubInfo(returnAddress).stubRoutine = code;
701
702 ctiRepatchCallByReturnAddress(returnAddress, code);
703}
704
705} // namespace JSC
706
707#endif // ENABLE(JIT)
Note: See TracBrowser for help on using the repository browser.