source: webkit/trunk/JavaScriptCore/bytecode/CodeBlock.h@ 44711

Last change on this file since 44711 was 44711, checked in by [email protected], 16 years ago

2009-06-15 Gavin Barraclough <[email protected]>

Reviewed by Sam Weinig.

Having moved most of their functionality into the RepatchBuffer class,
we can simplify the CodeLocation* classes.

The CodeLocation* classes are currently a tangle of templatey and friendly
badness, burried in the middle of AbstractMacroAssembler. Having moved
the ability to repatch out into RepatchBufer they are now do-nothing wrappers
on CodePtr (MacroAssemblerCodePtr), that only exist to provide type-safety.

Simplify the code, and move them off into their own header.

  • JavaScriptCore.xcodeproj/project.pbxproj:
  • assembler/AbstractMacroAssembler.h: (JSC::AbstractMacroAssembler::PatchBuffer::patch):
  • assembler/CodeLocation.h: Copied from assembler/AbstractMacroAssembler.h. (JSC::CodeLocationCommon::CodeLocationCommon): (JSC::CodeLocationInstruction::CodeLocationInstruction): (JSC::CodeLocationLabel::CodeLocationLabel): (JSC::CodeLocationJump::CodeLocationJump): (JSC::CodeLocationCall::CodeLocationCall): (JSC::CodeLocationNearCall::CodeLocationNearCall): (JSC::CodeLocationDataLabel32::CodeLocationDataLabel32): (JSC::CodeLocationDataLabelPtr::CodeLocationDataLabelPtr): (JSC::CodeLocationCommon::instructionAtOffset): (JSC::CodeLocationCommon::labelAtOffset): (JSC::CodeLocationCommon::jumpAtOffset): (JSC::CodeLocationCommon::callAtOffset): (JSC::CodeLocationCommon::nearCallAtOffset): (JSC::CodeLocationCommon::dataLabelPtrAtOffset): (JSC::CodeLocationCommon::dataLabel32AtOffset):
  • assembler/MacroAssemblerCodeRef.h: (JSC::MacroAssemblerCodePtr::operator!):
  • bytecode/CodeBlock.h: (JSC::getStructureStubInfoReturnLocation): (JSC::getCallLinkInfoReturnLocation): (JSC::getMethodCallLinkInfoReturnLocation):
  • bytecode/Instruction.h:
  • bytecode/JumpTable.h: (JSC::StringJumpTable::ctiForValue): (JSC::SimpleJumpTable::ctiForValue):
  • bytecode/StructureStubInfo.h:
  • bytecompiler/BytecodeGenerator.cpp: (JSC::BytecodeGenerator::emitCatch):
  • jit/JIT.cpp: (JSC::JIT::privateCompile):
  • jit/JITStubs.cpp: (JSC::JITStubs::DEFINE_STUB_FUNCTION): (JSC::JITStubs::getPolymorphicAccessStructureListSlot):
File size: 23.2 KB
Line 
1/*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <[email protected]>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#ifndef CodeBlock_h
31#define CodeBlock_h
32
33#include "EvalCodeCache.h"
34#include "Instruction.h"
35#include "JITCode.h"
36#include "JSGlobalObject.h"
37#include "JumpTable.h"
38#include "Nodes.h"
39#include "RegExp.h"
40#include "UString.h"
41#include <wtf/RefPtr.h>
42#include <wtf/Vector.h>
43
44#if ENABLE(JIT)
45#include "StructureStubInfo.h"
46#endif
47
48namespace JSC {
49
50 class ExecState;
51
52 enum CodeType { GlobalCode, EvalCode, FunctionCode };
53
54 static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); }
55
56 struct HandlerInfo {
57 uint32_t start;
58 uint32_t end;
59 uint32_t target;
60 uint32_t scopeDepth;
61#if ENABLE(JIT)
62 CodeLocationLabel nativeCode;
63#endif
64 };
65
66 struct ExpressionRangeInfo {
67 enum {
68 MaxOffset = (1 << 7) - 1,
69 MaxDivot = (1 << 25) - 1
70 };
71 uint32_t instructionOffset : 25;
72 uint32_t divotPoint : 25;
73 uint32_t startOffset : 7;
74 uint32_t endOffset : 7;
75 };
76
77 struct LineInfo {
78 uint32_t instructionOffset;
79 int32_t lineNumber;
80 };
81
82 // Both op_construct and op_instanceof require a use of op_get_by_id to get
83 // the prototype property from an object. The exception messages for exceptions
84 // thrown by these instances op_get_by_id need to reflect this.
85 struct GetByIdExceptionInfo {
86 unsigned bytecodeOffset : 31;
87 bool isOpConstruct : 1;
88 };
89
90#if ENABLE(JIT)
91 struct CallLinkInfo {
92 CallLinkInfo()
93 : callee(0)
94 {
95 }
96
97 unsigned bytecodeIndex;
98 CodeLocationNearCall callReturnLocation;
99 CodeLocationDataLabelPtr hotPathBegin;
100 CodeLocationNearCall hotPathOther;
101 CodeBlock* callee;
102 unsigned position;
103
104 void setUnlinked() { callee = 0; }
105 bool isLinked() { return callee; }
106 };
107
108 struct MethodCallLinkInfo {
109 MethodCallLinkInfo()
110 : cachedStructure(0)
111 {
112 }
113
114 CodeLocationCall callReturnLocation;
115 CodeLocationDataLabelPtr structureLabel;
116 Structure* cachedStructure;
117 };
118
119 struct FunctionRegisterInfo {
120 FunctionRegisterInfo(unsigned bytecodeOffset, int functionRegisterIndex)
121 : bytecodeOffset(bytecodeOffset)
122 , functionRegisterIndex(functionRegisterIndex)
123 {
124 }
125
126 unsigned bytecodeOffset;
127 int functionRegisterIndex;
128 };
129
130 struct GlobalResolveInfo {
131 GlobalResolveInfo(unsigned bytecodeOffset)
132 : structure(0)
133 , offset(0)
134 , bytecodeOffset(bytecodeOffset)
135 {
136 }
137
138 Structure* structure;
139 unsigned offset;
140 unsigned bytecodeOffset;
141 };
142
143 // This structure is used to map from a call return location
144 // (given as an offset in bytes into the JIT code) back to
145 // the bytecode index of the corresponding bytecode operation.
146 // This is then used to look up the corresponding handler.
147 struct CallReturnOffsetToBytecodeIndex {
148 CallReturnOffsetToBytecodeIndex(unsigned callReturnOffset, unsigned bytecodeIndex)
149 : callReturnOffset(callReturnOffset)
150 , bytecodeIndex(bytecodeIndex)
151 {
152 }
153
154 unsigned callReturnOffset;
155 unsigned bytecodeIndex;
156 };
157
158 // valueAtPosition helpers for the binaryChop algorithm below.
159
160 inline void* getStructureStubInfoReturnLocation(StructureStubInfo* structureStubInfo)
161 {
162 return structureStubInfo->callReturnLocation.executableAddress();
163 }
164
165 inline void* getCallLinkInfoReturnLocation(CallLinkInfo* callLinkInfo)
166 {
167 return callLinkInfo->callReturnLocation.executableAddress();
168 }
169
170 inline void* getMethodCallLinkInfoReturnLocation(MethodCallLinkInfo* methodCallLinkInfo)
171 {
172 return methodCallLinkInfo->callReturnLocation.executableAddress();
173 }
174
175 inline unsigned getCallReturnOffset(CallReturnOffsetToBytecodeIndex* pc)
176 {
177 return pc->callReturnOffset;
178 }
179
180 // Binary chop algorithm, calls valueAtPosition on pre-sorted elements in array,
181 // compares result with key (KeyTypes should be comparable with '--', '<', '>').
182 // Optimized for cases where the array contains the key, checked by assertions.
183 template<typename ArrayType, typename KeyType, KeyType(*valueAtPosition)(ArrayType*)>
184 inline ArrayType* binaryChop(ArrayType* array, size_t size, KeyType key)
185 {
186 // The array must contain at least one element (pre-condition, array does conatin key).
187 // If the array only contains one element, no need to do the comparison.
188 while (size > 1) {
189 // Pick an element to check, half way through the array, and read the value.
190 int pos = (size - 1) >> 1;
191 KeyType val = valueAtPosition(&array[pos]);
192
193 // If the key matches, success!
194 if (val == key)
195 return &array[pos];
196 // The item we are looking for is smaller than the item being check; reduce the value of 'size',
197 // chopping off the right hand half of the array.
198 else if (key < val)
199 size = pos;
200 // Discard all values in the left hand half of the array, up to and including the item at pos.
201 else {
202 size -= (pos + 1);
203 array += (pos + 1);
204 }
205
206 // 'size' should never reach zero.
207 ASSERT(size);
208 }
209
210 // If we reach this point we've chopped down to one element, no need to check it matches
211 ASSERT(size == 1);
212 ASSERT(key == valueAtPosition(&array[0]));
213 return &array[0];
214 }
215#endif
216
217 class CodeBlock {
218 friend class JIT;
219 public:
220 CodeBlock(ScopeNode* ownerNode, CodeType, PassRefPtr<SourceProvider>, unsigned sourceOffset);
221 ~CodeBlock();
222
223 void mark();
224 void refStructures(Instruction* vPC) const;
225 void derefStructures(Instruction* vPC) const;
226#if ENABLE(JIT)
227 void unlinkCallers();
228#endif
229
230 static void dumpStatistics();
231
232#if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING
233 void dump(ExecState*) const;
234 void printStructures(const Instruction*) const;
235 void printStructure(const char* name, const Instruction*, int operand) const;
236#endif
237
238 inline bool isKnownNotImmediate(int index)
239 {
240 if (index == m_thisRegister)
241 return true;
242
243 if (isConstantRegisterIndex(index))
244 return getConstant(index).isCell();
245
246 return false;
247 }
248
249 ALWAYS_INLINE bool isConstantRegisterIndex(int index)
250 {
251 return index >= m_numVars && index < m_numVars + m_numConstants;
252 }
253
254 ALWAYS_INLINE JSValue getConstant(int index)
255 {
256 return m_constantRegisters[index - m_numVars].jsValue();
257 }
258
259 ALWAYS_INLINE bool isTemporaryRegisterIndex(int index)
260 {
261 return index >= m_numVars + m_numConstants;
262 }
263
264 HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset);
265 int lineNumberForBytecodeOffset(CallFrame*, unsigned bytecodeOffset);
266 int expressionRangeForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, int& divot, int& startOffset, int& endOffset);
267 bool getByIdExceptionInfoForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, OpcodeID&);
268
269#if ENABLE(JIT)
270 void addCaller(CallLinkInfo* caller)
271 {
272 caller->callee = this;
273 caller->position = m_linkedCallerList.size();
274 m_linkedCallerList.append(caller);
275 }
276
277 void removeCaller(CallLinkInfo* caller)
278 {
279 unsigned pos = caller->position;
280 unsigned lastPos = m_linkedCallerList.size() - 1;
281
282 if (pos != lastPos) {
283 m_linkedCallerList[pos] = m_linkedCallerList[lastPos];
284 m_linkedCallerList[pos]->position = pos;
285 }
286 m_linkedCallerList.shrink(lastPos);
287 }
288
289 StructureStubInfo& getStubInfo(ReturnAddressPtr returnAddress)
290 {
291 return *(binaryChop<StructureStubInfo, void*, getStructureStubInfoReturnLocation>(m_structureStubInfos.begin(), m_structureStubInfos.size(), returnAddress.value()));
292 }
293
294 CallLinkInfo& getCallLinkInfo(ReturnAddressPtr returnAddress)
295 {
296 return *(binaryChop<CallLinkInfo, void*, getCallLinkInfoReturnLocation>(m_callLinkInfos.begin(), m_callLinkInfos.size(), returnAddress.value()));
297 }
298
299 MethodCallLinkInfo& getMethodCallLinkInfo(ReturnAddressPtr returnAddress)
300 {
301 return *(binaryChop<MethodCallLinkInfo, void*, getMethodCallLinkInfoReturnLocation>(m_methodCallLinkInfos.begin(), m_methodCallLinkInfos.size(), returnAddress.value()));
302 }
303
304 unsigned getBytecodeIndex(CallFrame* callFrame, ReturnAddressPtr returnAddress)
305 {
306 reparseForExceptionInfoIfNecessary(callFrame);
307 return binaryChop<CallReturnOffsetToBytecodeIndex, unsigned, getCallReturnOffset>(m_exceptionInfo->m_callReturnIndexVector.begin(), m_exceptionInfo->m_callReturnIndexVector.size(), ownerNode()->generatedJITCode().offsetOf(returnAddress.value()))->bytecodeIndex;
308 }
309
310 bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset, int& functionRegisterIndex);
311#endif
312
313 void setIsNumericCompareFunction(bool isNumericCompareFunction) { m_isNumericCompareFunction = isNumericCompareFunction; }
314 bool isNumericCompareFunction() { return m_isNumericCompareFunction; }
315
316 Vector<Instruction>& instructions() { return m_instructions; }
317#ifndef NDEBUG
318 void setInstructionCount(unsigned instructionCount) { m_instructionCount = instructionCount; }
319#endif
320
321#if ENABLE(JIT)
322 void setJITCode(JITCode);
323 ExecutablePool* executablePool() { return ownerNode()->getExecutablePool(); }
324#endif
325
326 ScopeNode* ownerNode() const { return m_ownerNode; }
327
328 void setGlobalData(JSGlobalData* globalData) { m_globalData = globalData; }
329
330 void setThisRegister(int thisRegister) { m_thisRegister = thisRegister; }
331 int thisRegister() const { return m_thisRegister; }
332
333 void setNeedsFullScopeChain(bool needsFullScopeChain) { m_needsFullScopeChain = needsFullScopeChain; }
334 bool needsFullScopeChain() const { return m_needsFullScopeChain; }
335 void setUsesEval(bool usesEval) { m_usesEval = usesEval; }
336 bool usesEval() const { return m_usesEval; }
337 void setUsesArguments(bool usesArguments) { m_usesArguments = usesArguments; }
338 bool usesArguments() const { return m_usesArguments; }
339
340 CodeType codeType() const { return m_codeType; }
341
342 SourceProvider* source() const { return m_source.get(); }
343 unsigned sourceOffset() const { return m_sourceOffset; }
344
345 size_t numberOfJumpTargets() const { return m_jumpTargets.size(); }
346 void addJumpTarget(unsigned jumpTarget) { m_jumpTargets.append(jumpTarget); }
347 unsigned jumpTarget(int index) const { return m_jumpTargets[index]; }
348 unsigned lastJumpTarget() const { return m_jumpTargets.last(); }
349
350#if !ENABLE(JIT)
351 void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); }
352 void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); }
353 bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset);
354#else
355 size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); }
356 void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); }
357 StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; }
358
359 void addGlobalResolveInfo(unsigned globalResolveInstruction) { m_globalResolveInfos.append(GlobalResolveInfo(globalResolveInstruction)); }
360 GlobalResolveInfo& globalResolveInfo(int index) { return m_globalResolveInfos[index]; }
361 bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset);
362
363 size_t numberOfCallLinkInfos() const { return m_callLinkInfos.size(); }
364 void addCallLinkInfo() { m_callLinkInfos.append(CallLinkInfo()); }
365 CallLinkInfo& callLinkInfo(int index) { return m_callLinkInfos[index]; }
366
367 void addMethodCallLinkInfos(unsigned n) { m_methodCallLinkInfos.grow(n); }
368 MethodCallLinkInfo& methodCallLinkInfo(int index) { return m_methodCallLinkInfos[index]; }
369
370 void addFunctionRegisterInfo(unsigned bytecodeOffset, int functionIndex) { createRareDataIfNecessary(); m_rareData->m_functionRegisterInfos.append(FunctionRegisterInfo(bytecodeOffset, functionIndex)); }
371#endif
372
373 // Exception handling support
374
375 size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; }
376 void addExceptionHandler(const HandlerInfo& hanler) { createRareDataIfNecessary(); return m_rareData->m_exceptionHandlers.append(hanler); }
377 HandlerInfo& exceptionHandler(int index) { ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; }
378
379 bool hasExceptionInfo() const { return m_exceptionInfo; }
380 void clearExceptionInfo() { m_exceptionInfo.clear(); }
381
382 void addExpressionInfo(const ExpressionRangeInfo& expressionInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_expressionInfo.append(expressionInfo); }
383 void addGetByIdExceptionInfo(const GetByIdExceptionInfo& info) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_getByIdExceptionInfo.append(info); }
384
385 size_t numberOfLineInfos() const { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.size(); }
386 void addLineInfo(const LineInfo& lineInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_lineInfo.append(lineInfo); }
387 LineInfo& lastLineInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.last(); }
388
389#if ENABLE(JIT)
390 Vector<CallReturnOffsetToBytecodeIndex>& callReturnIndexVector() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_callReturnIndexVector; }
391#endif
392
393 // Constant Pool
394
395 size_t numberOfIdentifiers() const { return m_identifiers.size(); }
396 void addIdentifier(const Identifier& i) { return m_identifiers.append(i); }
397 Identifier& identifier(int index) { return m_identifiers[index]; }
398
399 size_t numberOfConstantRegisters() const { return m_constantRegisters.size(); }
400 void addConstantRegister(const Register& r) { return m_constantRegisters.append(r); }
401 Register& constantRegister(int index) { return m_constantRegisters[index]; }
402
403 unsigned addFunctionExpression(FuncExprNode* n) { unsigned size = m_functionExpressions.size(); m_functionExpressions.append(n); return size; }
404 FuncExprNode* functionExpression(int index) const { return m_functionExpressions[index].get(); }
405
406 unsigned addFunction(FuncDeclNode* n) { createRareDataIfNecessary(); unsigned size = m_rareData->m_functions.size(); m_rareData->m_functions.append(n); return size; }
407 FuncDeclNode* function(int index) const { ASSERT(m_rareData); return m_rareData->m_functions[index].get(); }
408
409 bool hasFunctions() const { return m_functionExpressions.size() || (m_rareData && m_rareData->m_functions.size()); }
410
411 unsigned addUnexpectedConstant(JSValue v) { createRareDataIfNecessary(); unsigned size = m_rareData->m_unexpectedConstants.size(); m_rareData->m_unexpectedConstants.append(v); return size; }
412 JSValue unexpectedConstant(int index) const { ASSERT(m_rareData); return m_rareData->m_unexpectedConstants[index]; }
413
414 unsigned addRegExp(RegExp* r) { createRareDataIfNecessary(); unsigned size = m_rareData->m_regexps.size(); m_rareData->m_regexps.append(r); return size; }
415 RegExp* regexp(int index) const { ASSERT(m_rareData); return m_rareData->m_regexps[index].get(); }
416
417
418 // Jump Tables
419
420 size_t numberOfImmediateSwitchJumpTables() const { return m_rareData ? m_rareData->m_immediateSwitchJumpTables.size() : 0; }
421 SimpleJumpTable& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_immediateSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_immediateSwitchJumpTables.last(); }
422 SimpleJumpTable& immediateSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_immediateSwitchJumpTables[tableIndex]; }
423
424 size_t numberOfCharacterSwitchJumpTables() const { return m_rareData ? m_rareData->m_characterSwitchJumpTables.size() : 0; }
425 SimpleJumpTable& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_characterSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_characterSwitchJumpTables.last(); }
426 SimpleJumpTable& characterSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_characterSwitchJumpTables[tableIndex]; }
427
428 size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; }
429 StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); }
430 StringJumpTable& stringSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; }
431
432
433 SymbolTable& symbolTable() { return m_symbolTable; }
434
435 EvalCodeCache& evalCodeCache() { createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; }
436
437 void shrinkToFit();
438
439 // FIXME: Make these remaining members private.
440
441 int m_numCalleeRegisters;
442 // NOTE: numConstants holds the number of constant registers allocated
443 // by the code generator, not the number of constant registers used.
444 // (Duplicate constants are uniqued during code generation, and spare
445 // constant registers may be allocated.)
446 int m_numConstants;
447 int m_numVars;
448 int m_numParameters;
449
450 private:
451#if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING)
452 void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const;
453#endif
454
455 void reparseForExceptionInfoIfNecessary(CallFrame*);
456
457 void createRareDataIfNecessary()
458 {
459 if (!m_rareData)
460 m_rareData.set(new RareData);
461 }
462
463 ScopeNode* m_ownerNode;
464 JSGlobalData* m_globalData;
465
466 Vector<Instruction> m_instructions;
467#ifndef NDEBUG
468 unsigned m_instructionCount;
469#endif
470
471 int m_thisRegister;
472
473 bool m_needsFullScopeChain;
474 bool m_usesEval;
475 bool m_usesArguments;
476 bool m_isNumericCompareFunction;
477
478 CodeType m_codeType;
479
480 RefPtr<SourceProvider> m_source;
481 unsigned m_sourceOffset;
482
483#if !ENABLE(JIT)
484 Vector<unsigned> m_propertyAccessInstructions;
485 Vector<unsigned> m_globalResolveInstructions;
486#else
487 Vector<StructureStubInfo> m_structureStubInfos;
488 Vector<GlobalResolveInfo> m_globalResolveInfos;
489 Vector<CallLinkInfo> m_callLinkInfos;
490 Vector<MethodCallLinkInfo> m_methodCallLinkInfos;
491 Vector<CallLinkInfo*> m_linkedCallerList;
492#endif
493
494 Vector<unsigned> m_jumpTargets;
495
496 // Constant Pool
497 Vector<Identifier> m_identifiers;
498 Vector<Register> m_constantRegisters;
499 Vector<RefPtr<FuncExprNode> > m_functionExpressions;
500
501 SymbolTable m_symbolTable;
502
503 struct ExceptionInfo {
504 Vector<ExpressionRangeInfo> m_expressionInfo;
505 Vector<LineInfo> m_lineInfo;
506 Vector<GetByIdExceptionInfo> m_getByIdExceptionInfo;
507
508#if ENABLE(JIT)
509 Vector<CallReturnOffsetToBytecodeIndex> m_callReturnIndexVector;
510#endif
511 };
512 OwnPtr<ExceptionInfo> m_exceptionInfo;
513
514 struct RareData {
515 Vector<HandlerInfo> m_exceptionHandlers;
516
517 // Rare Constants
518 Vector<RefPtr<FuncDeclNode> > m_functions;
519 Vector<JSValue> m_unexpectedConstants;
520 Vector<RefPtr<RegExp> > m_regexps;
521
522 // Jump Tables
523 Vector<SimpleJumpTable> m_immediateSwitchJumpTables;
524 Vector<SimpleJumpTable> m_characterSwitchJumpTables;
525 Vector<StringJumpTable> m_stringSwitchJumpTables;
526
527 EvalCodeCache m_evalCodeCache;
528
529#if ENABLE(JIT)
530 Vector<FunctionRegisterInfo> m_functionRegisterInfos;
531#endif
532 };
533 OwnPtr<RareData> m_rareData;
534 };
535
536 // Program code is not marked by any function, so we make the global object
537 // responsible for marking it.
538
539 class ProgramCodeBlock : public CodeBlock {
540 public:
541 ProgramCodeBlock(ScopeNode* ownerNode, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider)
542 : CodeBlock(ownerNode, codeType, sourceProvider, 0)
543 , m_globalObject(globalObject)
544 {
545 m_globalObject->codeBlocks().add(this);
546 }
547
548 ~ProgramCodeBlock()
549 {
550 if (m_globalObject)
551 m_globalObject->codeBlocks().remove(this);
552 }
553
554 void clearGlobalObject() { m_globalObject = 0; }
555
556 private:
557 JSGlobalObject* m_globalObject; // For program and eval nodes, the global object that marks the constant pool.
558 };
559
560 class EvalCodeBlock : public ProgramCodeBlock {
561 public:
562 EvalCodeBlock(ScopeNode* ownerNode, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, int baseScopeDepth)
563 : ProgramCodeBlock(ownerNode, EvalCode, globalObject, sourceProvider)
564 , m_baseScopeDepth(baseScopeDepth)
565 {
566 }
567
568 int baseScopeDepth() const { return m_baseScopeDepth; }
569
570 private:
571 int m_baseScopeDepth;
572 };
573
574} // namespace JSC
575
576#endif // CodeBlock_h
Note: See TracBrowser for help on using the repository browser.