source: webkit/trunk/JavaScriptCore/bytecode/CodeBlock.h@ 45609

Last change on this file since 45609 was 45609, checked in by [email protected], 16 years ago

JavaScriptCore:

2009-07-07 Gavin Barraclough <[email protected]>

Reviewed by Sam Weinig.

Stop loading constants into the register file.

Instead, use high register values (highest bit bar the sign bit set) to indicate
constants in the instruction stream, and when we encounter such a value load it
directly from the CodeBlock.

Since constants are no longer copied into the register file, this patch renders
the 'unexpected constant' mechanism redundant, and removes it.

2% improvement, thanks to Sam Weinig.

  • bytecode/CodeBlock.cpp: (JSC::CodeBlock::dump): (JSC::CodeBlock::CodeBlock): (JSC::CodeBlock::mark): (JSC::CodeBlock::shrinkToFit):
  • bytecode/CodeBlock.h: (JSC::CodeBlock::isTemporaryRegisterIndex): (JSC::CodeBlock::constantRegister): (JSC::CodeBlock::isConstantRegisterIndex): (JSC::CodeBlock::getConstant): (JSC::ExecState::r):
  • bytecode/Opcode.h:
  • bytecompiler/BytecodeGenerator.cpp: (JSC::BytecodeGenerator::preserveLastVar): (JSC::BytecodeGenerator::BytecodeGenerator): (JSC::BytecodeGenerator::addConstantValue): (JSC::BytecodeGenerator::emitEqualityOp): (JSC::BytecodeGenerator::emitLoad): (JSC::BytecodeGenerator::emitResolveBase): (JSC::BytecodeGenerator::emitResolveWithBase): (JSC::BytecodeGenerator::emitNewError):
  • bytecompiler/BytecodeGenerator.h: (JSC::BytecodeGenerator::emitNode):
  • interpreter/CallFrame.h: (JSC::ExecState::noCaller): (JSC::ExecState::hasHostCallFrameFlag): (JSC::ExecState::addHostCallFrameFlag): (JSC::ExecState::removeHostCallFrameFlag):
  • interpreter/Interpreter.cpp: (JSC::Interpreter::resolve): (JSC::Interpreter::resolveSkip): (JSC::Interpreter::resolveGlobal): (JSC::Interpreter::resolveBase): (JSC::Interpreter::resolveBaseAndProperty): (JSC::Interpreter::resolveBaseAndFunc): (JSC::Interpreter::dumpRegisters): (JSC::Interpreter::throwException): (JSC::Interpreter::createExceptionScope): (JSC::Interpreter::privateExecute): (JSC::Interpreter::retrieveArguments):
  • jit/JIT.cpp: (JSC::JIT::privateCompileMainPass):
  • jit/JITInlineMethods.h: (JSC::JIT::emitLoadDouble): (JSC::JIT::emitLoadInt32ToDouble):
  • jit/JITOpcodes.cpp: (JSC::JIT::emit_op_new_error): (JSC::JIT::emit_op_enter): (JSC::JIT::emit_op_enter_with_activation):
  • parser/Nodes.cpp: (JSC::DeleteResolveNode::emitBytecode): (JSC::DeleteValueNode::emitBytecode): (JSC::PrefixResolveNode::emitBytecode):
  • runtime/JSActivation.cpp: (JSC::JSActivation::JSActivation):
  • wtf/Platform.h:

LayoutTests:

2009-07-07 Gavin Barraclough <[email protected]>

Reviewed by Geoff Garen.

fast/js/global-recursion-on-full-stack.html is a little finicky.

The test recurses down the JS stack to find the bottom (catching this with an exception),
then tries to call a host function (document.write), switch writes new '<script>' code,
and expects this code to be run, then expects this code to try to call 'f();' again,
which it expects to fail, and it expects to catch that exception. However it is possible
that one of the earlier stages (the call to document.write, entering the interpreter to
run the new global code) will fail, depending on exactly how much stack space was free at
the point the last call to f() failed.

Tweak the test to make it work.

  • fast/js/global-recursion-on-full-stack.html:
File size: 23.5 KB
Line 
1/*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <[email protected]>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#ifndef CodeBlock_h
31#define CodeBlock_h
32
33#include "EvalCodeCache.h"
34#include "Instruction.h"
35#include "JITCode.h"
36#include "JSGlobalObject.h"
37#include "JumpTable.h"
38#include "Nodes.h"
39#include "RegExp.h"
40#include "UString.h"
41#include <wtf/FastAllocBase.h>
42#include <wtf/RefPtr.h>
43#include <wtf/Vector.h>
44
45#if ENABLE(JIT)
46#include "StructureStubInfo.h"
47#endif
48
49// Register numbers used in bytecode operations have different meaning accoring to their ranges:
50// 0x80000000-0xFFFFFFFF Negative indicies from the CallFrame pointer are entries in the call frame, see RegisterFile.h.
51// 0x00000000-0x3FFFFFFF Forwards indices from the CallFrame pointer are local vars and temporaries with the function's callframe.
52// 0x40000000-0x7FFFFFFF Positive indices from 0x40000000 specify entries in the constant pool on the CodeBlock.
53static const int FirstConstantRegisterIndex = 0x40000000;
54
55namespace JSC {
56
57 class ExecState;
58
59 enum CodeType { GlobalCode, EvalCode, FunctionCode, NativeCode };
60
61 static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); }
62
63 struct HandlerInfo {
64 uint32_t start;
65 uint32_t end;
66 uint32_t target;
67 uint32_t scopeDepth;
68#if ENABLE(JIT)
69 CodeLocationLabel nativeCode;
70#endif
71 };
72
73 struct ExpressionRangeInfo {
74 enum {
75 MaxOffset = (1 << 7) - 1,
76 MaxDivot = (1 << 25) - 1
77 };
78 uint32_t instructionOffset : 25;
79 uint32_t divotPoint : 25;
80 uint32_t startOffset : 7;
81 uint32_t endOffset : 7;
82 };
83
84 struct LineInfo {
85 uint32_t instructionOffset;
86 int32_t lineNumber;
87 };
88
89 // Both op_construct and op_instanceof require a use of op_get_by_id to get
90 // the prototype property from an object. The exception messages for exceptions
91 // thrown by these instances op_get_by_id need to reflect this.
92 struct GetByIdExceptionInfo {
93 unsigned bytecodeOffset : 31;
94 bool isOpConstruct : 1;
95 };
96
97#if ENABLE(JIT)
98 struct CallLinkInfo {
99 CallLinkInfo()
100 : callee(0)
101 {
102 }
103
104 unsigned bytecodeIndex;
105 CodeLocationNearCall callReturnLocation;
106 CodeLocationDataLabelPtr hotPathBegin;
107 CodeLocationNearCall hotPathOther;
108 CodeBlock* callee;
109 unsigned position;
110
111 void setUnlinked() { callee = 0; }
112 bool isLinked() { return callee; }
113 };
114
115 struct MethodCallLinkInfo {
116 MethodCallLinkInfo()
117 : cachedStructure(0)
118 {
119 }
120
121 CodeLocationCall callReturnLocation;
122 CodeLocationDataLabelPtr structureLabel;
123 Structure* cachedStructure;
124 };
125
126 struct FunctionRegisterInfo {
127 FunctionRegisterInfo(unsigned bytecodeOffset, int functionRegisterIndex)
128 : bytecodeOffset(bytecodeOffset)
129 , functionRegisterIndex(functionRegisterIndex)
130 {
131 }
132
133 unsigned bytecodeOffset;
134 int functionRegisterIndex;
135 };
136
137 struct GlobalResolveInfo {
138 GlobalResolveInfo(unsigned bytecodeOffset)
139 : structure(0)
140 , offset(0)
141 , bytecodeOffset(bytecodeOffset)
142 {
143 }
144
145 Structure* structure;
146 unsigned offset;
147 unsigned bytecodeOffset;
148 };
149
150 // This structure is used to map from a call return location
151 // (given as an offset in bytes into the JIT code) back to
152 // the bytecode index of the corresponding bytecode operation.
153 // This is then used to look up the corresponding handler.
154 struct CallReturnOffsetToBytecodeIndex {
155 CallReturnOffsetToBytecodeIndex(unsigned callReturnOffset, unsigned bytecodeIndex)
156 : callReturnOffset(callReturnOffset)
157 , bytecodeIndex(bytecodeIndex)
158 {
159 }
160
161 unsigned callReturnOffset;
162 unsigned bytecodeIndex;
163 };
164
165 // valueAtPosition helpers for the binaryChop algorithm below.
166
167 inline void* getStructureStubInfoReturnLocation(StructureStubInfo* structureStubInfo)
168 {
169 return structureStubInfo->callReturnLocation.executableAddress();
170 }
171
172 inline void* getCallLinkInfoReturnLocation(CallLinkInfo* callLinkInfo)
173 {
174 return callLinkInfo->callReturnLocation.executableAddress();
175 }
176
177 inline void* getMethodCallLinkInfoReturnLocation(MethodCallLinkInfo* methodCallLinkInfo)
178 {
179 return methodCallLinkInfo->callReturnLocation.executableAddress();
180 }
181
182 inline unsigned getCallReturnOffset(CallReturnOffsetToBytecodeIndex* pc)
183 {
184 return pc->callReturnOffset;
185 }
186
187 // Binary chop algorithm, calls valueAtPosition on pre-sorted elements in array,
188 // compares result with key (KeyTypes should be comparable with '--', '<', '>').
189 // Optimized for cases where the array contains the key, checked by assertions.
190 template<typename ArrayType, typename KeyType, KeyType(*valueAtPosition)(ArrayType*)>
191 inline ArrayType* binaryChop(ArrayType* array, size_t size, KeyType key)
192 {
193 // The array must contain at least one element (pre-condition, array does conatin key).
194 // If the array only contains one element, no need to do the comparison.
195 while (size > 1) {
196 // Pick an element to check, half way through the array, and read the value.
197 int pos = (size - 1) >> 1;
198 KeyType val = valueAtPosition(&array[pos]);
199
200 // If the key matches, success!
201 if (val == key)
202 return &array[pos];
203 // The item we are looking for is smaller than the item being check; reduce the value of 'size',
204 // chopping off the right hand half of the array.
205 else if (key < val)
206 size = pos;
207 // Discard all values in the left hand half of the array, up to and including the item at pos.
208 else {
209 size -= (pos + 1);
210 array += (pos + 1);
211 }
212
213 // 'size' should never reach zero.
214 ASSERT(size);
215 }
216
217 // If we reach this point we've chopped down to one element, no need to check it matches
218 ASSERT(size == 1);
219 ASSERT(key == valueAtPosition(&array[0]));
220 return &array[0];
221 }
222#endif
223
224 class CodeBlock : public WTF::FastAllocBase {
225 friend class JIT;
226 public:
227 CodeBlock(ScopeNode* ownerNode);
228 CodeBlock(ScopeNode* ownerNode, CodeType, PassRefPtr<SourceProvider>, unsigned sourceOffset);
229 ~CodeBlock();
230
231 void mark();
232 void refStructures(Instruction* vPC) const;
233 void derefStructures(Instruction* vPC) const;
234#if ENABLE(JIT)
235 void unlinkCallers();
236#endif
237
238 static void dumpStatistics();
239
240#if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING
241 void dump(ExecState*) const;
242 void printStructures(const Instruction*) const;
243 void printStructure(const char* name, const Instruction*, int operand) const;
244#endif
245
246 inline bool isKnownNotImmediate(int index)
247 {
248 if (index == m_thisRegister)
249 return true;
250
251 if (isConstantRegisterIndex(index))
252 return getConstant(index).isCell();
253
254 return false;
255 }
256
257 ALWAYS_INLINE bool isTemporaryRegisterIndex(int index)
258 {
259 return index >= m_numVars;
260 }
261
262 HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset);
263 int lineNumberForBytecodeOffset(CallFrame*, unsigned bytecodeOffset);
264 int expressionRangeForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, int& divot, int& startOffset, int& endOffset);
265 bool getByIdExceptionInfoForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, OpcodeID&);
266
267#if ENABLE(JIT)
268 void addCaller(CallLinkInfo* caller)
269 {
270 caller->callee = this;
271 caller->position = m_linkedCallerList.size();
272 m_linkedCallerList.append(caller);
273 }
274
275 void removeCaller(CallLinkInfo* caller)
276 {
277 unsigned pos = caller->position;
278 unsigned lastPos = m_linkedCallerList.size() - 1;
279
280 if (pos != lastPos) {
281 m_linkedCallerList[pos] = m_linkedCallerList[lastPos];
282 m_linkedCallerList[pos]->position = pos;
283 }
284 m_linkedCallerList.shrink(lastPos);
285 }
286
287 StructureStubInfo& getStubInfo(ReturnAddressPtr returnAddress)
288 {
289 return *(binaryChop<StructureStubInfo, void*, getStructureStubInfoReturnLocation>(m_structureStubInfos.begin(), m_structureStubInfos.size(), returnAddress.value()));
290 }
291
292 CallLinkInfo& getCallLinkInfo(ReturnAddressPtr returnAddress)
293 {
294 return *(binaryChop<CallLinkInfo, void*, getCallLinkInfoReturnLocation>(m_callLinkInfos.begin(), m_callLinkInfos.size(), returnAddress.value()));
295 }
296
297 MethodCallLinkInfo& getMethodCallLinkInfo(ReturnAddressPtr returnAddress)
298 {
299 return *(binaryChop<MethodCallLinkInfo, void*, getMethodCallLinkInfoReturnLocation>(m_methodCallLinkInfos.begin(), m_methodCallLinkInfos.size(), returnAddress.value()));
300 }
301
302 unsigned getBytecodeIndex(CallFrame* callFrame, ReturnAddressPtr returnAddress)
303 {
304 reparseForExceptionInfoIfNecessary(callFrame);
305 return binaryChop<CallReturnOffsetToBytecodeIndex, unsigned, getCallReturnOffset>(m_exceptionInfo->m_callReturnIndexVector.begin(), m_exceptionInfo->m_callReturnIndexVector.size(), ownerNode()->generatedJITCode().offsetOf(returnAddress.value()))->bytecodeIndex;
306 }
307
308 bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset, int& functionRegisterIndex);
309#endif
310
311 void setIsNumericCompareFunction(bool isNumericCompareFunction) { m_isNumericCompareFunction = isNumericCompareFunction; }
312 bool isNumericCompareFunction() { return m_isNumericCompareFunction; }
313
314 Vector<Instruction>& instructions() { return m_instructions; }
315#ifndef NDEBUG
316 void setInstructionCount(unsigned instructionCount) { m_instructionCount = instructionCount; }
317#endif
318
319#if ENABLE(JIT)
320 void setJITCode(JITCode);
321 ExecutablePool* executablePool() { return ownerNode()->getExecutablePool(); }
322#endif
323
324 ScopeNode* ownerNode() const { return m_ownerNode; }
325
326 void setGlobalData(JSGlobalData* globalData) { m_globalData = globalData; }
327
328 void setThisRegister(int thisRegister) { m_thisRegister = thisRegister; }
329 int thisRegister() const { return m_thisRegister; }
330
331 void setNeedsFullScopeChain(bool needsFullScopeChain) { m_needsFullScopeChain = needsFullScopeChain; }
332 bool needsFullScopeChain() const { return m_needsFullScopeChain; }
333 void setUsesEval(bool usesEval) { m_usesEval = usesEval; }
334 bool usesEval() const { return m_usesEval; }
335 void setUsesArguments(bool usesArguments) { m_usesArguments = usesArguments; }
336 bool usesArguments() const { return m_usesArguments; }
337
338 CodeType codeType() const { return m_codeType; }
339
340 SourceProvider* source() const { ASSERT(m_codeType != NativeCode); return m_source.get(); }
341 unsigned sourceOffset() const { ASSERT(m_codeType != NativeCode); return m_sourceOffset; }
342
343 size_t numberOfJumpTargets() const { return m_jumpTargets.size(); }
344 void addJumpTarget(unsigned jumpTarget) { m_jumpTargets.append(jumpTarget); }
345 unsigned jumpTarget(int index) const { return m_jumpTargets[index]; }
346 unsigned lastJumpTarget() const { return m_jumpTargets.last(); }
347
348#if !ENABLE(JIT)
349 void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); }
350 void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); }
351 bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset);
352#else
353 size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); }
354 void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); }
355 StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; }
356
357 void addGlobalResolveInfo(unsigned globalResolveInstruction) { m_globalResolveInfos.append(GlobalResolveInfo(globalResolveInstruction)); }
358 GlobalResolveInfo& globalResolveInfo(int index) { return m_globalResolveInfos[index]; }
359 bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset);
360
361 size_t numberOfCallLinkInfos() const { return m_callLinkInfos.size(); }
362 void addCallLinkInfo() { m_callLinkInfos.append(CallLinkInfo()); }
363 CallLinkInfo& callLinkInfo(int index) { return m_callLinkInfos[index]; }
364
365 void addMethodCallLinkInfos(unsigned n) { m_methodCallLinkInfos.grow(n); }
366 MethodCallLinkInfo& methodCallLinkInfo(int index) { return m_methodCallLinkInfos[index]; }
367
368 void addFunctionRegisterInfo(unsigned bytecodeOffset, int functionIndex) { createRareDataIfNecessary(); m_rareData->m_functionRegisterInfos.append(FunctionRegisterInfo(bytecodeOffset, functionIndex)); }
369#endif
370
371 // Exception handling support
372
373 size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; }
374 void addExceptionHandler(const HandlerInfo& hanler) { createRareDataIfNecessary(); return m_rareData->m_exceptionHandlers.append(hanler); }
375 HandlerInfo& exceptionHandler(int index) { ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; }
376
377 bool hasExceptionInfo() const { return m_exceptionInfo; }
378 void clearExceptionInfo() { m_exceptionInfo.clear(); }
379
380 void addExpressionInfo(const ExpressionRangeInfo& expressionInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_expressionInfo.append(expressionInfo); }
381 void addGetByIdExceptionInfo(const GetByIdExceptionInfo& info) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_getByIdExceptionInfo.append(info); }
382
383 size_t numberOfLineInfos() const { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.size(); }
384 void addLineInfo(const LineInfo& lineInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_lineInfo.append(lineInfo); }
385 LineInfo& lastLineInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.last(); }
386
387#if ENABLE(JIT)
388 Vector<CallReturnOffsetToBytecodeIndex>& callReturnIndexVector() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_callReturnIndexVector; }
389#endif
390
391 // Constant Pool
392
393 size_t numberOfIdentifiers() const { return m_identifiers.size(); }
394 void addIdentifier(const Identifier& i) { return m_identifiers.append(i); }
395 Identifier& identifier(int index) { return m_identifiers[index]; }
396
397 size_t numberOfConstantRegisters() const { return m_constantRegisters.size(); }
398 void addConstantRegister(const Register& r) { return m_constantRegisters.append(r); }
399 Register& constantRegister(int index) { return m_constantRegisters[index - FirstConstantRegisterIndex]; }
400 ALWAYS_INLINE bool isConstantRegisterIndex(int index) { return index >= FirstConstantRegisterIndex; }
401 ALWAYS_INLINE JSValue getConstant(int index) const { return m_constantRegisters[index - FirstConstantRegisterIndex].jsValue(); }
402
403 unsigned addFunctionExpression(FuncExprNode* n) { unsigned size = m_functionExpressions.size(); m_functionExpressions.append(n); return size; }
404 FuncExprNode* functionExpression(int index) const { return m_functionExpressions[index].get(); }
405
406 unsigned addFunction(FuncDeclNode* n) { createRareDataIfNecessary(); unsigned size = m_rareData->m_functions.size(); m_rareData->m_functions.append(n); return size; }
407 FuncDeclNode* function(int index) const { ASSERT(m_rareData); return m_rareData->m_functions[index].get(); }
408
409 bool hasFunctions() const { return m_functionExpressions.size() || (m_rareData && m_rareData->m_functions.size()); }
410
411 unsigned addRegExp(RegExp* r) { createRareDataIfNecessary(); unsigned size = m_rareData->m_regexps.size(); m_rareData->m_regexps.append(r); return size; }
412 RegExp* regexp(int index) const { ASSERT(m_rareData); return m_rareData->m_regexps[index].get(); }
413
414
415 // Jump Tables
416
417 size_t numberOfImmediateSwitchJumpTables() const { return m_rareData ? m_rareData->m_immediateSwitchJumpTables.size() : 0; }
418 SimpleJumpTable& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_immediateSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_immediateSwitchJumpTables.last(); }
419 SimpleJumpTable& immediateSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_immediateSwitchJumpTables[tableIndex]; }
420
421 size_t numberOfCharacterSwitchJumpTables() const { return m_rareData ? m_rareData->m_characterSwitchJumpTables.size() : 0; }
422 SimpleJumpTable& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_characterSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_characterSwitchJumpTables.last(); }
423 SimpleJumpTable& characterSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_characterSwitchJumpTables[tableIndex]; }
424
425 size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; }
426 StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); }
427 StringJumpTable& stringSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; }
428
429
430 SymbolTable& symbolTable() { return m_symbolTable; }
431
432 EvalCodeCache& evalCodeCache() { ASSERT(m_codeType != NativeCode); createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; }
433
434 void shrinkToFit();
435
436 // FIXME: Make these remaining members private.
437
438 int m_numCalleeRegisters;
439 int m_numVars;
440 int m_numParameters;
441
442 private:
443#if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING)
444 void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const;
445#endif
446
447 void reparseForExceptionInfoIfNecessary(CallFrame*);
448
449 void createRareDataIfNecessary()
450 {
451 ASSERT(m_codeType != NativeCode);
452 if (!m_rareData)
453 m_rareData.set(new RareData);
454 }
455
456 ScopeNode* m_ownerNode;
457 JSGlobalData* m_globalData;
458
459 Vector<Instruction> m_instructions;
460#ifndef NDEBUG
461 unsigned m_instructionCount;
462#endif
463
464 int m_thisRegister;
465
466 bool m_needsFullScopeChain;
467 bool m_usesEval;
468 bool m_usesArguments;
469 bool m_isNumericCompareFunction;
470
471 CodeType m_codeType;
472
473 RefPtr<SourceProvider> m_source;
474 unsigned m_sourceOffset;
475
476#if !ENABLE(JIT)
477 Vector<unsigned> m_propertyAccessInstructions;
478 Vector<unsigned> m_globalResolveInstructions;
479#else
480 Vector<StructureStubInfo> m_structureStubInfos;
481 Vector<GlobalResolveInfo> m_globalResolveInfos;
482 Vector<CallLinkInfo> m_callLinkInfos;
483 Vector<MethodCallLinkInfo> m_methodCallLinkInfos;
484 Vector<CallLinkInfo*> m_linkedCallerList;
485#endif
486
487 Vector<unsigned> m_jumpTargets;
488
489 // Constant Pool
490 Vector<Identifier> m_identifiers;
491 Vector<Register> m_constantRegisters;
492 Vector<RefPtr<FuncExprNode> > m_functionExpressions;
493
494 SymbolTable m_symbolTable;
495
496 struct ExceptionInfo {
497 Vector<ExpressionRangeInfo> m_expressionInfo;
498 Vector<LineInfo> m_lineInfo;
499 Vector<GetByIdExceptionInfo> m_getByIdExceptionInfo;
500
501#if ENABLE(JIT)
502 Vector<CallReturnOffsetToBytecodeIndex> m_callReturnIndexVector;
503#endif
504 };
505 OwnPtr<ExceptionInfo> m_exceptionInfo;
506
507 struct RareData {
508 Vector<HandlerInfo> m_exceptionHandlers;
509
510 // Rare Constants
511 Vector<RefPtr<FuncDeclNode> > m_functions;
512 Vector<RefPtr<RegExp> > m_regexps;
513
514 // Jump Tables
515 Vector<SimpleJumpTable> m_immediateSwitchJumpTables;
516 Vector<SimpleJumpTable> m_characterSwitchJumpTables;
517 Vector<StringJumpTable> m_stringSwitchJumpTables;
518
519 EvalCodeCache m_evalCodeCache;
520
521#if ENABLE(JIT)
522 Vector<FunctionRegisterInfo> m_functionRegisterInfos;
523#endif
524 };
525 OwnPtr<RareData> m_rareData;
526 };
527
528 // Program code is not marked by any function, so we make the global object
529 // responsible for marking it.
530
531 class ProgramCodeBlock : public CodeBlock {
532 public:
533 ProgramCodeBlock(ScopeNode* ownerNode, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider)
534 : CodeBlock(ownerNode, codeType, sourceProvider, 0)
535 , m_globalObject(globalObject)
536 {
537 m_globalObject->codeBlocks().add(this);
538 }
539
540 ~ProgramCodeBlock()
541 {
542 if (m_globalObject)
543 m_globalObject->codeBlocks().remove(this);
544 }
545
546 void clearGlobalObject() { m_globalObject = 0; }
547
548 private:
549 JSGlobalObject* m_globalObject; // For program and eval nodes, the global object that marks the constant pool.
550 };
551
552 class EvalCodeBlock : public ProgramCodeBlock {
553 public:
554 EvalCodeBlock(ScopeNode* ownerNode, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, int baseScopeDepth)
555 : ProgramCodeBlock(ownerNode, EvalCode, globalObject, sourceProvider)
556 , m_baseScopeDepth(baseScopeDepth)
557 {
558 }
559
560 int baseScopeDepth() const { return m_baseScopeDepth; }
561
562 private:
563 int m_baseScopeDepth;
564 };
565
566 inline Register& ExecState::r(int index)
567 {
568 CodeBlock* codeBlock = this->codeBlock();
569 if (codeBlock->isConstantRegisterIndex(index))
570 return codeBlock->constantRegister(index);
571 return this[index];
572 }
573
574} // namespace JSC
575
576#endif // CodeBlock_h
Note: See TracBrowser for help on using the repository browser.