source: webkit/trunk/JavaScriptCore/bytecode/CodeBlock.h@ 47022

Last change on this file since 47022 was 47022, checked in by [email protected], 16 years ago

Stack overflow crash in JavaScript garbage collector mark pass
https://bugs.webkit.org/show_bug.cgi?id=12216

Reviewed by Gavin Barraclough and Sam Weinig

Make the GC mark phase iterative by using an explicit mark stack.
To do this marking any single object is performed in multiple stages

  • The object is appended to the MarkStack, this sets the marked bit for the object using the new markDirect() function, and then returns
  • When the MarkStack is drain()ed the object is popped off the stack and markChildren(MarkStack&) is called on the object to collect all of its children. drain() then repeats until the stack is empty.

Additionally I renamed a number of methods from 'mark' to 'markAggregate'
in order to make it more clear that marking of those object was not
going to result in an actual recursive mark.

File size: 24.3 KB
Line 
1/*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <[email protected]>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#ifndef CodeBlock_h
31#define CodeBlock_h
32
33#include "EvalCodeCache.h"
34#include "Instruction.h"
35#include "JITCode.h"
36#include "JSGlobalObject.h"
37#include "JumpTable.h"
38#include "Nodes.h"
39#include "PtrAndFlags.h"
40#include "RegExp.h"
41#include "UString.h"
42#include <wtf/FastAllocBase.h>
43#include <wtf/RefPtr.h>
44#include <wtf/Vector.h>
45
46#if ENABLE(JIT)
47#include "StructureStubInfo.h"
48#endif
49
50// Register numbers used in bytecode operations have different meaning accoring to their ranges:
51// 0x80000000-0xFFFFFFFF Negative indicies from the CallFrame pointer are entries in the call frame, see RegisterFile.h.
52// 0x00000000-0x3FFFFFFF Forwards indices from the CallFrame pointer are local vars and temporaries with the function's callframe.
53// 0x40000000-0x7FFFFFFF Positive indices from 0x40000000 specify entries in the constant pool on the CodeBlock.
54static const int FirstConstantRegisterIndex = 0x40000000;
55
56namespace JSC {
57
58 enum HasSeenShouldRepatch {
59 hasSeenShouldRepatch
60 };
61
62 class ExecState;
63
64 enum CodeType { GlobalCode, EvalCode, FunctionCode, NativeCode };
65
66 static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); }
67
68 struct HandlerInfo {
69 uint32_t start;
70 uint32_t end;
71 uint32_t target;
72 uint32_t scopeDepth;
73#if ENABLE(JIT)
74 CodeLocationLabel nativeCode;
75#endif
76 };
77
78 struct ExpressionRangeInfo {
79 enum {
80 MaxOffset = (1 << 7) - 1,
81 MaxDivot = (1 << 25) - 1
82 };
83 uint32_t instructionOffset : 25;
84 uint32_t divotPoint : 25;
85 uint32_t startOffset : 7;
86 uint32_t endOffset : 7;
87 };
88
89 struct LineInfo {
90 uint32_t instructionOffset;
91 int32_t lineNumber;
92 };
93
94 // Both op_construct and op_instanceof require a use of op_get_by_id to get
95 // the prototype property from an object. The exception messages for exceptions
96 // thrown by these instances op_get_by_id need to reflect this.
97 struct GetByIdExceptionInfo {
98 unsigned bytecodeOffset : 31;
99 bool isOpConstruct : 1;
100 };
101
102#if ENABLE(JIT)
103 struct CallLinkInfo {
104 CallLinkInfo()
105 : callee(0)
106 {
107 }
108
109 unsigned bytecodeIndex;
110 CodeLocationNearCall callReturnLocation;
111 CodeLocationDataLabelPtr hotPathBegin;
112 CodeLocationNearCall hotPathOther;
113 PtrAndFlags<CodeBlock, HasSeenShouldRepatch> ownerCodeBlock;
114 CodeBlock* callee;
115 unsigned position;
116
117 void setUnlinked() { callee = 0; }
118 bool isLinked() { return callee; }
119
120 bool seenOnce()
121 {
122 return ownerCodeBlock.isFlagSet(hasSeenShouldRepatch);
123 }
124
125 void setSeen()
126 {
127 ownerCodeBlock.setFlag(hasSeenShouldRepatch);
128 }
129 };
130
131 struct MethodCallLinkInfo {
132 MethodCallLinkInfo()
133 : cachedStructure(0)
134 {
135 }
136
137 bool seenOnce()
138 {
139 return cachedPrototypeStructure.isFlagSet(hasSeenShouldRepatch);
140 }
141
142 void setSeen()
143 {
144 cachedPrototypeStructure.setFlag(hasSeenShouldRepatch);
145 }
146
147 CodeLocationCall callReturnLocation;
148 CodeLocationDataLabelPtr structureLabel;
149 Structure* cachedStructure;
150 PtrAndFlags<Structure, HasSeenShouldRepatch> cachedPrototypeStructure;
151 };
152
153 struct FunctionRegisterInfo {
154 FunctionRegisterInfo(unsigned bytecodeOffset, int functionRegisterIndex)
155 : bytecodeOffset(bytecodeOffset)
156 , functionRegisterIndex(functionRegisterIndex)
157 {
158 }
159
160 unsigned bytecodeOffset;
161 int functionRegisterIndex;
162 };
163
164 struct GlobalResolveInfo {
165 GlobalResolveInfo(unsigned bytecodeOffset)
166 : structure(0)
167 , offset(0)
168 , bytecodeOffset(bytecodeOffset)
169 {
170 }
171
172 Structure* structure;
173 unsigned offset;
174 unsigned bytecodeOffset;
175 };
176
177 // This structure is used to map from a call return location
178 // (given as an offset in bytes into the JIT code) back to
179 // the bytecode index of the corresponding bytecode operation.
180 // This is then used to look up the corresponding handler.
181 struct CallReturnOffsetToBytecodeIndex {
182 CallReturnOffsetToBytecodeIndex(unsigned callReturnOffset, unsigned bytecodeIndex)
183 : callReturnOffset(callReturnOffset)
184 , bytecodeIndex(bytecodeIndex)
185 {
186 }
187
188 unsigned callReturnOffset;
189 unsigned bytecodeIndex;
190 };
191
192 // valueAtPosition helpers for the binaryChop algorithm below.
193
194 inline void* getStructureStubInfoReturnLocation(StructureStubInfo* structureStubInfo)
195 {
196 return structureStubInfo->callReturnLocation.executableAddress();
197 }
198
199 inline void* getCallLinkInfoReturnLocation(CallLinkInfo* callLinkInfo)
200 {
201 return callLinkInfo->callReturnLocation.executableAddress();
202 }
203
204 inline void* getMethodCallLinkInfoReturnLocation(MethodCallLinkInfo* methodCallLinkInfo)
205 {
206 return methodCallLinkInfo->callReturnLocation.executableAddress();
207 }
208
209 inline unsigned getCallReturnOffset(CallReturnOffsetToBytecodeIndex* pc)
210 {
211 return pc->callReturnOffset;
212 }
213
214 // Binary chop algorithm, calls valueAtPosition on pre-sorted elements in array,
215 // compares result with key (KeyTypes should be comparable with '--', '<', '>').
216 // Optimized for cases where the array contains the key, checked by assertions.
217 template<typename ArrayType, typename KeyType, KeyType(*valueAtPosition)(ArrayType*)>
218 inline ArrayType* binaryChop(ArrayType* array, size_t size, KeyType key)
219 {
220 // The array must contain at least one element (pre-condition, array does conatin key).
221 // If the array only contains one element, no need to do the comparison.
222 while (size > 1) {
223 // Pick an element to check, half way through the array, and read the value.
224 int pos = (size - 1) >> 1;
225 KeyType val = valueAtPosition(&array[pos]);
226
227 // If the key matches, success!
228 if (val == key)
229 return &array[pos];
230 // The item we are looking for is smaller than the item being check; reduce the value of 'size',
231 // chopping off the right hand half of the array.
232 else if (key < val)
233 size = pos;
234 // Discard all values in the left hand half of the array, up to and including the item at pos.
235 else {
236 size -= (pos + 1);
237 array += (pos + 1);
238 }
239
240 // 'size' should never reach zero.
241 ASSERT(size);
242 }
243
244 // If we reach this point we've chopped down to one element, no need to check it matches
245 ASSERT(size == 1);
246 ASSERT(key == valueAtPosition(&array[0]));
247 return &array[0];
248 }
249#endif
250
251 class CodeBlock : public FastAllocBase {
252 friend class JIT;
253 public:
254 CodeBlock(ScopeNode* ownerNode);
255 CodeBlock(ScopeNode* ownerNode, CodeType, PassRefPtr<SourceProvider>, unsigned sourceOffset);
256 ~CodeBlock();
257
258 void markAggregate(MarkStack&);
259 void refStructures(Instruction* vPC) const;
260 void derefStructures(Instruction* vPC) const;
261#if ENABLE(JIT_OPTIMIZE_CALL)
262 void unlinkCallers();
263#endif
264
265 static void dumpStatistics();
266
267#if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING
268 void dump(ExecState*) const;
269 void printStructures(const Instruction*) const;
270 void printStructure(const char* name, const Instruction*, int operand) const;
271#endif
272
273 inline bool isKnownNotImmediate(int index)
274 {
275 if (index == m_thisRegister)
276 return true;
277
278 if (isConstantRegisterIndex(index))
279 return getConstant(index).isCell();
280
281 return false;
282 }
283
284 ALWAYS_INLINE bool isTemporaryRegisterIndex(int index)
285 {
286 return index >= m_numVars;
287 }
288
289 HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset);
290 int lineNumberForBytecodeOffset(CallFrame*, unsigned bytecodeOffset);
291 int expressionRangeForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, int& divot, int& startOffset, int& endOffset);
292 bool getByIdExceptionInfoForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, OpcodeID&);
293
294#if ENABLE(JIT)
295 void addCaller(CallLinkInfo* caller)
296 {
297 caller->callee = this;
298 caller->position = m_linkedCallerList.size();
299 m_linkedCallerList.append(caller);
300 }
301
302 void removeCaller(CallLinkInfo* caller)
303 {
304 unsigned pos = caller->position;
305 unsigned lastPos = m_linkedCallerList.size() - 1;
306
307 if (pos != lastPos) {
308 m_linkedCallerList[pos] = m_linkedCallerList[lastPos];
309 m_linkedCallerList[pos]->position = pos;
310 }
311 m_linkedCallerList.shrink(lastPos);
312 }
313
314 StructureStubInfo& getStubInfo(ReturnAddressPtr returnAddress)
315 {
316 return *(binaryChop<StructureStubInfo, void*, getStructureStubInfoReturnLocation>(m_structureStubInfos.begin(), m_structureStubInfos.size(), returnAddress.value()));
317 }
318
319 CallLinkInfo& getCallLinkInfo(ReturnAddressPtr returnAddress)
320 {
321 return *(binaryChop<CallLinkInfo, void*, getCallLinkInfoReturnLocation>(m_callLinkInfos.begin(), m_callLinkInfos.size(), returnAddress.value()));
322 }
323
324 MethodCallLinkInfo& getMethodCallLinkInfo(ReturnAddressPtr returnAddress)
325 {
326 return *(binaryChop<MethodCallLinkInfo, void*, getMethodCallLinkInfoReturnLocation>(m_methodCallLinkInfos.begin(), m_methodCallLinkInfos.size(), returnAddress.value()));
327 }
328
329 unsigned getBytecodeIndex(CallFrame* callFrame, ReturnAddressPtr returnAddress)
330 {
331 reparseForExceptionInfoIfNecessary(callFrame);
332 return binaryChop<CallReturnOffsetToBytecodeIndex, unsigned, getCallReturnOffset>(callReturnIndexVector().begin(), callReturnIndexVector().size(), ownerNode()->generatedJITCode().offsetOf(returnAddress.value()))->bytecodeIndex;
333 }
334
335 bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset, int& functionRegisterIndex);
336#endif
337
338 void setIsNumericCompareFunction(bool isNumericCompareFunction) { m_isNumericCompareFunction = isNumericCompareFunction; }
339 bool isNumericCompareFunction() { return m_isNumericCompareFunction; }
340
341 Vector<Instruction>& instructions() { return m_instructions; }
342#ifndef NDEBUG
343 void setInstructionCount(unsigned instructionCount) { m_instructionCount = instructionCount; }
344#endif
345
346#if ENABLE(JIT)
347 JITCode& getJITCode() { return ownerNode()->generatedJITCode(); }
348 void setJITCode(JITCode);
349 ExecutablePool* executablePool() { return ownerNode()->getExecutablePool(); }
350#endif
351
352 ScopeNode* ownerNode() const { return m_ownerNode; }
353
354 void setGlobalData(JSGlobalData* globalData) { m_globalData = globalData; }
355
356 void setThisRegister(int thisRegister) { m_thisRegister = thisRegister; }
357 int thisRegister() const { return m_thisRegister; }
358
359 void setNeedsFullScopeChain(bool needsFullScopeChain) { m_needsFullScopeChain = needsFullScopeChain; }
360 bool needsFullScopeChain() const { return m_needsFullScopeChain; }
361 void setUsesEval(bool usesEval) { m_usesEval = usesEval; }
362 bool usesEval() const { return m_usesEval; }
363 void setUsesArguments(bool usesArguments) { m_usesArguments = usesArguments; }
364 bool usesArguments() const { return m_usesArguments; }
365
366 CodeType codeType() const { return m_codeType; }
367
368 SourceProvider* source() const { ASSERT(m_codeType != NativeCode); return m_source.get(); }
369 unsigned sourceOffset() const { ASSERT(m_codeType != NativeCode); return m_sourceOffset; }
370
371 size_t numberOfJumpTargets() const { return m_jumpTargets.size(); }
372 void addJumpTarget(unsigned jumpTarget) { m_jumpTargets.append(jumpTarget); }
373 unsigned jumpTarget(int index) const { return m_jumpTargets[index]; }
374 unsigned lastJumpTarget() const { return m_jumpTargets.last(); }
375
376#if !ENABLE(JIT)
377 void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); }
378 void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); }
379 bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset);
380#else
381 size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); }
382 void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); }
383 StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; }
384
385 void addGlobalResolveInfo(unsigned globalResolveInstruction) { m_globalResolveInfos.append(GlobalResolveInfo(globalResolveInstruction)); }
386 GlobalResolveInfo& globalResolveInfo(int index) { return m_globalResolveInfos[index]; }
387 bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset);
388
389 size_t numberOfCallLinkInfos() const { return m_callLinkInfos.size(); }
390 void addCallLinkInfo() { m_callLinkInfos.append(CallLinkInfo()); }
391 CallLinkInfo& callLinkInfo(int index) { return m_callLinkInfos[index]; }
392
393 void addMethodCallLinkInfos(unsigned n) { m_methodCallLinkInfos.grow(n); }
394 MethodCallLinkInfo& methodCallLinkInfo(int index) { return m_methodCallLinkInfos[index]; }
395
396 void addFunctionRegisterInfo(unsigned bytecodeOffset, int functionIndex) { createRareDataIfNecessary(); m_rareData->m_functionRegisterInfos.append(FunctionRegisterInfo(bytecodeOffset, functionIndex)); }
397#endif
398
399 // Exception handling support
400
401 size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; }
402 void addExceptionHandler(const HandlerInfo& hanler) { createRareDataIfNecessary(); return m_rareData->m_exceptionHandlers.append(hanler); }
403 HandlerInfo& exceptionHandler(int index) { ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; }
404
405 bool hasExceptionInfo() const { return m_exceptionInfo; }
406 void clearExceptionInfo() { m_exceptionInfo.clear(); }
407
408 void addExpressionInfo(const ExpressionRangeInfo& expressionInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_expressionInfo.append(expressionInfo); }
409 void addGetByIdExceptionInfo(const GetByIdExceptionInfo& info) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_getByIdExceptionInfo.append(info); }
410
411 size_t numberOfLineInfos() const { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.size(); }
412 void addLineInfo(const LineInfo& lineInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_lineInfo.append(lineInfo); }
413 LineInfo& lastLineInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.last(); }
414
415#if ENABLE(JIT)
416 Vector<CallReturnOffsetToBytecodeIndex>& callReturnIndexVector() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_callReturnIndexVector; }
417#endif
418
419 // Constant Pool
420
421 size_t numberOfIdentifiers() const { return m_identifiers.size(); }
422 void addIdentifier(const Identifier& i) { return m_identifiers.append(i); }
423 Identifier& identifier(int index) { return m_identifiers[index]; }
424
425 size_t numberOfConstantRegisters() const { return m_constantRegisters.size(); }
426 void addConstantRegister(const Register& r) { return m_constantRegisters.append(r); }
427 Register& constantRegister(int index) { return m_constantRegisters[index - FirstConstantRegisterIndex]; }
428 ALWAYS_INLINE bool isConstantRegisterIndex(int index) { return index >= FirstConstantRegisterIndex; }
429 ALWAYS_INLINE JSValue getConstant(int index) const { return m_constantRegisters[index - FirstConstantRegisterIndex].jsValue(); }
430
431 unsigned addFunctionExpression(FuncExprNode* n) { unsigned size = m_functionExpressions.size(); m_functionExpressions.append(n); return size; }
432 FuncExprNode* functionExpression(int index) const { return m_functionExpressions[index].get(); }
433
434 unsigned addFunction(FuncDeclNode* n) { createRareDataIfNecessary(); unsigned size = m_rareData->m_functions.size(); m_rareData->m_functions.append(n); return size; }
435 FuncDeclNode* function(int index) const { ASSERT(m_rareData); return m_rareData->m_functions[index].get(); }
436
437 bool hasFunctions() const { return m_functionExpressions.size() || (m_rareData && m_rareData->m_functions.size()); }
438
439 unsigned addRegExp(RegExp* r) { createRareDataIfNecessary(); unsigned size = m_rareData->m_regexps.size(); m_rareData->m_regexps.append(r); return size; }
440 RegExp* regexp(int index) const { ASSERT(m_rareData); return m_rareData->m_regexps[index].get(); }
441
442
443 // Jump Tables
444
445 size_t numberOfImmediateSwitchJumpTables() const { return m_rareData ? m_rareData->m_immediateSwitchJumpTables.size() : 0; }
446 SimpleJumpTable& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_immediateSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_immediateSwitchJumpTables.last(); }
447 SimpleJumpTable& immediateSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_immediateSwitchJumpTables[tableIndex]; }
448
449 size_t numberOfCharacterSwitchJumpTables() const { return m_rareData ? m_rareData->m_characterSwitchJumpTables.size() : 0; }
450 SimpleJumpTable& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_characterSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_characterSwitchJumpTables.last(); }
451 SimpleJumpTable& characterSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_characterSwitchJumpTables[tableIndex]; }
452
453 size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; }
454 StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); }
455 StringJumpTable& stringSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; }
456
457
458 SymbolTable& symbolTable() { return m_symbolTable; }
459
460 EvalCodeCache& evalCodeCache() { ASSERT(m_codeType != NativeCode); createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; }
461
462 void shrinkToFit();
463
464 // FIXME: Make these remaining members private.
465
466 int m_numCalleeRegisters;
467 int m_numVars;
468 int m_numParameters;
469
470 private:
471#if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING)
472 void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const;
473#endif
474
475 void reparseForExceptionInfoIfNecessary(CallFrame*);
476
477 void createRareDataIfNecessary()
478 {
479 ASSERT(m_codeType != NativeCode);
480 if (!m_rareData)
481 m_rareData.set(new RareData);
482 }
483
484 ScopeNode* m_ownerNode;
485 JSGlobalData* m_globalData;
486
487 Vector<Instruction> m_instructions;
488#ifndef NDEBUG
489 unsigned m_instructionCount;
490#endif
491
492 int m_thisRegister;
493
494 bool m_needsFullScopeChain;
495 bool m_usesEval;
496 bool m_usesArguments;
497 bool m_isNumericCompareFunction;
498
499 CodeType m_codeType;
500
501 RefPtr<SourceProvider> m_source;
502 unsigned m_sourceOffset;
503
504#if !ENABLE(JIT)
505 Vector<unsigned> m_propertyAccessInstructions;
506 Vector<unsigned> m_globalResolveInstructions;
507#else
508 Vector<StructureStubInfo> m_structureStubInfos;
509 Vector<GlobalResolveInfo> m_globalResolveInfos;
510 Vector<CallLinkInfo> m_callLinkInfos;
511 Vector<MethodCallLinkInfo> m_methodCallLinkInfos;
512 Vector<CallLinkInfo*> m_linkedCallerList;
513#endif
514
515 Vector<unsigned> m_jumpTargets;
516
517 // Constant Pool
518 Vector<Identifier> m_identifiers;
519 Vector<Register> m_constantRegisters;
520 Vector<RefPtr<FuncExprNode> > m_functionExpressions;
521
522 SymbolTable m_symbolTable;
523
524 struct ExceptionInfo : FastAllocBase {
525 Vector<ExpressionRangeInfo> m_expressionInfo;
526 Vector<LineInfo> m_lineInfo;
527 Vector<GetByIdExceptionInfo> m_getByIdExceptionInfo;
528
529#if ENABLE(JIT)
530 Vector<CallReturnOffsetToBytecodeIndex> m_callReturnIndexVector;
531#endif
532 };
533 OwnPtr<ExceptionInfo> m_exceptionInfo;
534
535 struct RareData : FastAllocBase {
536 Vector<HandlerInfo> m_exceptionHandlers;
537
538 // Rare Constants
539 Vector<RefPtr<FuncDeclNode> > m_functions;
540 Vector<RefPtr<RegExp> > m_regexps;
541
542 // Jump Tables
543 Vector<SimpleJumpTable> m_immediateSwitchJumpTables;
544 Vector<SimpleJumpTable> m_characterSwitchJumpTables;
545 Vector<StringJumpTable> m_stringSwitchJumpTables;
546
547 EvalCodeCache m_evalCodeCache;
548
549#if ENABLE(JIT)
550 Vector<FunctionRegisterInfo> m_functionRegisterInfos;
551#endif
552 };
553 OwnPtr<RareData> m_rareData;
554 };
555
556 // Program code is not marked by any function, so we make the global object
557 // responsible for marking it.
558
559 class ProgramCodeBlock : public CodeBlock {
560 public:
561 ProgramCodeBlock(ScopeNode* ownerNode, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider)
562 : CodeBlock(ownerNode, codeType, sourceProvider, 0)
563 , m_globalObject(globalObject)
564 {
565 m_globalObject->codeBlocks().add(this);
566 }
567
568 ~ProgramCodeBlock()
569 {
570 if (m_globalObject)
571 m_globalObject->codeBlocks().remove(this);
572 }
573
574 void clearGlobalObject() { m_globalObject = 0; }
575
576 private:
577 JSGlobalObject* m_globalObject; // For program and eval nodes, the global object that marks the constant pool.
578 };
579
580 class EvalCodeBlock : public ProgramCodeBlock {
581 public:
582 EvalCodeBlock(ScopeNode* ownerNode, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, int baseScopeDepth)
583 : ProgramCodeBlock(ownerNode, EvalCode, globalObject, sourceProvider)
584 , m_baseScopeDepth(baseScopeDepth)
585 {
586 }
587
588 int baseScopeDepth() const { return m_baseScopeDepth; }
589
590 private:
591 int m_baseScopeDepth;
592 };
593
594 inline Register& ExecState::r(int index)
595 {
596 CodeBlock* codeBlock = this->codeBlock();
597 if (codeBlock->isConstantRegisterIndex(index))
598 return codeBlock->constantRegister(index);
599 return this[index];
600 }
601
602} // namespace JSC
603
604#endif // CodeBlock_h
Note: See TracBrowser for help on using the repository browser.