source: webkit/trunk/JavaScriptCore/bytecode/CodeBlock.h@ 40846

Last change on this file since 40846 was 40846, checked in by [email protected], 16 years ago

2009-02-10 Gavin Barraclough <[email protected]>

Reviewed by Oliver Hunt.

Reduce use of void* / reinterpret_cast in JIT repatching code,
add strong types for Calls and for the various types of pointers
we retain into the JIT generated instruction stream.

No performance impact.

  • assembler/AbstractMacroAssembler.h: (JSC::AbstractMacroAssembler::ImmPtr::ImmPtr): (JSC::AbstractMacroAssembler::ImmPtr::asIntptr): (JSC::AbstractMacroAssembler::Imm32::Imm32): (JSC::AbstractMacroAssembler::Label::Label): (JSC::AbstractMacroAssembler::DataLabelPtr::DataLabelPtr): (JSC::AbstractMacroAssembler::Call::Call): (JSC::AbstractMacroAssembler::Call::link): (JSC::AbstractMacroAssembler::Call::linkTo): (JSC::AbstractMacroAssembler::Jump::Jump): (JSC::AbstractMacroAssembler::Jump::linkTo): (JSC::AbstractMacroAssembler::CodeLocationCommon::CodeLocationCommon): (JSC::AbstractMacroAssembler::CodeLocationCommon::operator bool): (JSC::AbstractMacroAssembler::CodeLocationCommon::reset): (JSC::AbstractMacroAssembler::CodeLocationLabel::CodeLocationLabel): (JSC::AbstractMacroAssembler::CodeLocationLabel::addressForSwitch): (JSC::AbstractMacroAssembler::CodeLocationLabel::addressForExceptionHandler): (JSC::AbstractMacroAssembler::CodeLocationLabel::addressForJSR): (JSC::AbstractMacroAssembler::CodeLocationLabel::getJumpDestination): (JSC::AbstractMacroAssembler::CodeLocationJump::CodeLocationJump): (JSC::AbstractMacroAssembler::CodeLocationJump::relink): (JSC::AbstractMacroAssembler::CodeLocationCall::CodeLocationCall): (JSC::AbstractMacroAssembler::CodeLocationCall::relink): (JSC::AbstractMacroAssembler::CodeLocationCall::calleeReturnAddressValue): (JSC::AbstractMacroAssembler::CodeLocationDataLabel32::CodeLocationDataLabel32): (JSC::AbstractMacroAssembler::CodeLocationDataLabel32::repatch): (JSC::AbstractMacroAssembler::CodeLocationDataLabelPtr::CodeLocationDataLabelPtr): (JSC::AbstractMacroAssembler::CodeLocationDataLabelPtr::repatch): (JSC::AbstractMacroAssembler::ProcessorReturnAddress::ProcessorReturnAddress): (JSC::AbstractMacroAssembler::ProcessorReturnAddress::relinkCallerToFunction): (JSC::AbstractMacroAssembler::ProcessorReturnAddress::operator void*): (JSC::AbstractMacroAssembler::PatchBuffer::entry): (JSC::AbstractMacroAssembler::PatchBuffer::trampolineAt): (JSC::AbstractMacroAssembler::PatchBuffer::link): (JSC::AbstractMacroAssembler::PatchBuffer::linkTailRecursive): (JSC::AbstractMacroAssembler::PatchBuffer::patch): (JSC::AbstractMacroAssembler::PatchBuffer::locationOf): (JSC::AbstractMacroAssembler::PatchBuffer::returnAddressOffset): (JSC::AbstractMacroAssembler::differenceBetween): (JSC::::CodeLocationCommon::labelAtOffset): (JSC::::CodeLocationCommon::jumpAtOffset): (JSC::::CodeLocationCommon::callAtOffset): (JSC::::CodeLocationCommon::dataLabelPtrAtOffset): (JSC::::CodeLocationCommon::dataLabel32AtOffset):
  • assembler/MacroAssemblerX86Common.h: (JSC::MacroAssemblerX86Common::call):
  • assembler/X86Assembler.h: (JSC::X86Assembler::getCallReturnOffset):
  • bytecode/CodeBlock.h: (JSC::CallLinkInfo::CallLinkInfo): (JSC::getStructureStubInfoReturnLocation): (JSC::getCallLinkInfoReturnLocation):
  • bytecode/Instruction.h: (JSC::PolymorphicAccessStructureList::PolymorphicStubInfo::set): (JSC::PolymorphicAccessStructureList::PolymorphicAccessStructureList):
  • bytecode/JumpTable.h: (JSC::StringJumpTable::ctiForValue): (JSC::SimpleJumpTable::ctiForValue):
  • bytecode/StructureStubInfo.h: (JSC::StructureStubInfo::StructureStubInfo):
  • bytecompiler/BytecodeGenerator.cpp: (JSC::BytecodeGenerator::emitCatch): (JSC::prepareJumpTableForStringSwitch):
  • interpreter/Interpreter.cpp: (JSC::Interpreter::cti_op_get_by_id_self_fail): (JSC::getPolymorphicAccessStructureListSlot): (JSC::Interpreter::cti_op_throw): (JSC::Interpreter::cti_op_switch_imm): (JSC::Interpreter::cti_op_switch_char): (JSC::Interpreter::cti_op_switch_string): (JSC::Interpreter::cti_vm_throw):
  • jit/JIT.cpp: (JSC::ctiSetReturnAddress): (JSC::ctiPatchCallByReturnAddress): (JSC::JIT::privateCompile): (JSC::JIT::privateCompileCTIMachineTrampolines):
  • jit/JIT.h: (JSC::CallRecord::CallRecord): (JSC::JIT::compileGetByIdSelf): (JSC::JIT::compileGetByIdProto): (JSC::JIT::compileGetByIdChain): (JSC::JIT::compilePutByIdReplace): (JSC::JIT::compilePutByIdTransition): (JSC::JIT::compilePatchGetArrayLength): (JSC::JIT::emitCTICall):
  • jit/JITCall.cpp: (JSC::JIT::unlinkCall): (JSC::JIT::linkCall):
  • jit/JITInlineMethods.h: (JSC::JIT::emitNakedCall): (JSC::JIT::emitCTICall_internal):
  • jit/JITPropertyAccess.cpp: (JSC::JIT::compileGetByIdSlowCase): (JSC::JIT::compilePutByIdSlowCase): (JSC::JIT::privateCompilePutByIdTransition): (JSC::JIT::patchGetByIdSelf): (JSC::JIT::patchPutByIdReplace): (JSC::JIT::privateCompilePatchGetArrayLength): (JSC::JIT::privateCompileGetByIdSelf): (JSC::JIT::privateCompileGetByIdProto): (JSC::JIT::privateCompileGetByIdSelfList): (JSC::JIT::privateCompileGetByIdProtoList): (JSC::JIT::privateCompileGetByIdChainList): (JSC::JIT::privateCompileGetByIdChain): (JSC::JIT::privateCompilePutByIdReplace):
File size: 22.7 KB
Line 
1/*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <[email protected]>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#ifndef CodeBlock_h
31#define CodeBlock_h
32
33#include "EvalCodeCache.h"
34#include "Instruction.h"
35#include "JITCode.h"
36#include "JSGlobalObject.h"
37#include "JumpTable.h"
38#include "Nodes.h"
39#include "RegExp.h"
40#include "UString.h"
41#include <wtf/RefPtr.h>
42#include <wtf/Vector.h>
43
44#if ENABLE(JIT)
45#include "StructureStubInfo.h"
46#endif
47
48namespace JSC {
49
50 class ExecState;
51
52 enum CodeType { GlobalCode, EvalCode, FunctionCode };
53
54 static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); }
55
56 struct HandlerInfo {
57 uint32_t start;
58 uint32_t end;
59 uint32_t target;
60 uint32_t scopeDepth;
61#if ENABLE(JIT)
62 MacroAssembler::CodeLocationLabel nativeCode;
63#endif
64 };
65
66#if ENABLE(JIT)
67 // The code, and the associated pool from which it was allocated.
68 struct JITCodeRef {
69 JITCode code;
70#ifndef NDEBUG
71 unsigned codeSize;
72#endif
73 RefPtr<ExecutablePool> executablePool;
74
75 JITCodeRef()
76 : code(0)
77#ifndef NDEBUG
78 , codeSize(0)
79#endif
80 {
81 }
82
83 JITCodeRef(void* code, PassRefPtr<ExecutablePool> executablePool)
84 : code(code)
85#ifndef NDEBUG
86 , codeSize(0)
87#endif
88 , executablePool(executablePool)
89 {
90 }
91 };
92#endif
93
94 struct ExpressionRangeInfo {
95 enum {
96 MaxOffset = (1 << 7) - 1,
97 MaxDivot = (1 << 25) - 1
98 };
99 uint32_t instructionOffset : 25;
100 uint32_t divotPoint : 25;
101 uint32_t startOffset : 7;
102 uint32_t endOffset : 7;
103 };
104
105 struct LineInfo {
106 uint32_t instructionOffset;
107 int32_t lineNumber;
108 };
109
110 // Both op_construct and op_instanceof require a use of op_get_by_id to get
111 // the prototype property from an object. The exception messages for exceptions
112 // thrown by these instances op_get_by_id need to reflect this.
113 struct GetByIdExceptionInfo {
114 unsigned bytecodeOffset : 31;
115 bool isOpConstruct : 1;
116 };
117
118#if ENABLE(JIT)
119 struct CallLinkInfo {
120 CallLinkInfo()
121 : callee(0)
122 {
123 }
124
125 unsigned bytecodeIndex;
126 MacroAssembler::CodeLocationCall callReturnLocation;
127 MacroAssembler::CodeLocationDataLabelPtr hotPathBegin;
128 MacroAssembler::CodeLocationCall hotPathOther;
129 MacroAssembler::CodeLocationLabel coldPathOther;
130 CodeBlock* callee;
131 unsigned position;
132
133 void setUnlinked() { callee = 0; }
134 bool isLinked() { return callee; }
135 };
136
137 struct FunctionRegisterInfo {
138 FunctionRegisterInfo(unsigned bytecodeOffset, int functionRegisterIndex)
139 : bytecodeOffset(bytecodeOffset)
140 , functionRegisterIndex(functionRegisterIndex)
141 {
142 }
143
144 unsigned bytecodeOffset;
145 int functionRegisterIndex;
146 };
147
148 struct GlobalResolveInfo {
149 GlobalResolveInfo(unsigned bytecodeOffset)
150 : structure(0)
151 , offset(0)
152 , bytecodeOffset(bytecodeOffset)
153 {
154 }
155
156 Structure* structure;
157 unsigned offset;
158 unsigned bytecodeOffset;
159 };
160
161 // This structure is used to map from a call return location
162 // (given as an offset in bytes into the JIT code) back to
163 // the bytecode index of the corresponding bytecode operation.
164 // This is then used to look up the corresponding handler.
165 struct CallReturnOffsetToBytecodeIndex {
166 CallReturnOffsetToBytecodeIndex(unsigned callReturnOffset, unsigned bytecodeIndex)
167 : callReturnOffset(callReturnOffset)
168 , bytecodeIndex(bytecodeIndex)
169 {
170 }
171
172 unsigned callReturnOffset;
173 unsigned bytecodeIndex;
174 };
175
176 // valueAtPosition helpers for the binaryChop algorithm below.
177
178 inline void* getStructureStubInfoReturnLocation(StructureStubInfo* structureStubInfo)
179 {
180 return structureStubInfo->callReturnLocation.calleeReturnAddressValue();
181 }
182
183 inline void* getCallLinkInfoReturnLocation(CallLinkInfo* callLinkInfo)
184 {
185 return callLinkInfo->callReturnLocation.calleeReturnAddressValue();
186 }
187
188 inline unsigned getCallReturnOffset(CallReturnOffsetToBytecodeIndex* pc)
189 {
190 return pc->callReturnOffset;
191 }
192
193 // Binary chop algorithm, calls valueAtPosition on pre-sorted elements in array,
194 // compares result with key (KeyTypes should be comparable with '--', '<', '>').
195 // Optimized for cases where the array contains the key, checked by assertions.
196 template<typename ArrayType, typename KeyType, KeyType(*valueAtPosition)(ArrayType*)>
197 inline ArrayType* binaryChop(ArrayType* array, size_t size, KeyType key)
198 {
199 // The array must contain at least one element (pre-condition, array does conatin key).
200 // If the array only contains one element, no need to do the comparison.
201 while (size > 1) {
202 // Pick an element to check, half way through the array, and read the value.
203 int pos = (size - 1) >> 1;
204 KeyType val = valueAtPosition(&array[pos]);
205
206 // If the key matches, success!
207 if (val == key)
208 return &array[pos];
209 // The item we are looking for is smaller than the item being check; reduce the value of 'size',
210 // chopping off the right hand half of the array.
211 else if (key < val)
212 size = pos;
213 // Discard all values in the left hand half of the array, up to and including the item at pos.
214 else {
215 size -= (pos + 1);
216 array += (pos + 1);
217 }
218
219 // 'size' should never reach zero.
220 ASSERT(size);
221 }
222
223 // If we reach this point we've chopped down to one element, no need to check it matches
224 ASSERT(size == 1);
225 ASSERT(key == valueAtPosition(&array[0]));
226 return &array[0];
227 }
228#endif
229
230 class CodeBlock {
231 friend class JIT;
232 public:
233 CodeBlock(ScopeNode* ownerNode, CodeType, PassRefPtr<SourceProvider>, unsigned sourceOffset);
234 ~CodeBlock();
235
236 void mark();
237 void refStructures(Instruction* vPC) const;
238 void derefStructures(Instruction* vPC) const;
239#if ENABLE(JIT)
240 void unlinkCallers();
241#endif
242
243 static void dumpStatistics();
244
245#if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING
246 void dump(ExecState*) const;
247 void printStructures(const Instruction*) const;
248 void printStructure(const char* name, const Instruction*, int operand) const;
249#endif
250
251 inline bool isKnownNotImmediate(int index)
252 {
253 if (index == m_thisRegister)
254 return true;
255
256 if (isConstantRegisterIndex(index))
257 return getConstant(index).isCell();
258
259 return false;
260 }
261
262 ALWAYS_INLINE bool isConstantRegisterIndex(int index)
263 {
264 return index >= m_numVars && index < m_numVars + m_numConstants;
265 }
266
267 ALWAYS_INLINE JSValuePtr getConstant(int index)
268 {
269 return m_constantRegisters[index - m_numVars].getJSValue();
270 }
271
272 ALWAYS_INLINE bool isTemporaryRegisterIndex(int index)
273 {
274 return index >= m_numVars + m_numConstants;
275 }
276
277 HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset);
278 int lineNumberForBytecodeOffset(CallFrame*, unsigned bytecodeOffset);
279 int expressionRangeForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, int& divot, int& startOffset, int& endOffset);
280 bool getByIdExceptionInfoForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, OpcodeID&);
281
282#if ENABLE(JIT)
283 void addCaller(CallLinkInfo* caller)
284 {
285 caller->callee = this;
286 caller->position = m_linkedCallerList.size();
287 m_linkedCallerList.append(caller);
288 }
289
290 void removeCaller(CallLinkInfo* caller)
291 {
292 unsigned pos = caller->position;
293 unsigned lastPos = m_linkedCallerList.size() - 1;
294
295 if (pos != lastPos) {
296 m_linkedCallerList[pos] = m_linkedCallerList[lastPos];
297 m_linkedCallerList[pos]->position = pos;
298 }
299 m_linkedCallerList.shrink(lastPos);
300 }
301
302 StructureStubInfo& getStubInfo(void* returnAddress)
303 {
304 return *(binaryChop<StructureStubInfo, void*, getStructureStubInfoReturnLocation>(m_structureStubInfos.begin(), m_structureStubInfos.size(), returnAddress));
305 }
306
307 CallLinkInfo& getCallLinkInfo(void* returnAddress)
308 {
309 return *(binaryChop<CallLinkInfo, void*, getCallLinkInfoReturnLocation>(m_callLinkInfos.begin(), m_callLinkInfos.size(), returnAddress));
310 }
311
312 unsigned getBytecodeIndex(CallFrame* callFrame, void* nativePC)
313 {
314 reparseForExceptionInfoIfNecessary(callFrame);
315 return binaryChop<CallReturnOffsetToBytecodeIndex, unsigned, getCallReturnOffset>(m_exceptionInfo->m_callReturnIndexVector.begin(), m_exceptionInfo->m_callReturnIndexVector.size(), m_jitCode.code.offsetOf(nativePC))->bytecodeIndex;
316 }
317
318 bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset, int& functionRegisterIndex);
319#endif
320
321 Vector<Instruction>& instructions() { return m_instructions; }
322#ifndef NDEBUG
323 void setInstructionCount(unsigned instructionCount) { m_instructionCount = instructionCount; }
324#endif
325
326#if ENABLE(JIT)
327 void setJITCode(JITCodeRef& jitCode);
328 JITCode jitCode() { return m_jitCode.code; }
329 ExecutablePool* executablePool() { return m_jitCode.executablePool.get(); }
330#endif
331
332 ScopeNode* ownerNode() const { return m_ownerNode; }
333
334 void setGlobalData(JSGlobalData* globalData) { m_globalData = globalData; }
335
336 void setThisRegister(int thisRegister) { m_thisRegister = thisRegister; }
337 int thisRegister() const { return m_thisRegister; }
338
339 void setNeedsFullScopeChain(bool needsFullScopeChain) { m_needsFullScopeChain = needsFullScopeChain; }
340 bool needsFullScopeChain() const { return m_needsFullScopeChain; }
341 void setUsesEval(bool usesEval) { m_usesEval = usesEval; }
342 bool usesEval() const { return m_usesEval; }
343 void setUsesArguments(bool usesArguments) { m_usesArguments = usesArguments; }
344 bool usesArguments() const { return m_usesArguments; }
345
346 CodeType codeType() const { return m_codeType; }
347
348 SourceProvider* source() const { return m_source.get(); }
349 unsigned sourceOffset() const { return m_sourceOffset; }
350
351 size_t numberOfJumpTargets() const { return m_jumpTargets.size(); }
352 void addJumpTarget(unsigned jumpTarget) { m_jumpTargets.append(jumpTarget); }
353 unsigned jumpTarget(int index) const { return m_jumpTargets[index]; }
354 unsigned lastJumpTarget() const { return m_jumpTargets.last(); }
355
356#if !ENABLE(JIT)
357 void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); }
358 void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); }
359 bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset);
360#else
361 size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); }
362 void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); }
363 StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; }
364
365 void addGlobalResolveInfo(unsigned globalResolveInstruction) { m_globalResolveInfos.append(GlobalResolveInfo(globalResolveInstruction)); }
366 GlobalResolveInfo& globalResolveInfo(int index) { return m_globalResolveInfos[index]; }
367 bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset);
368
369 size_t numberOfCallLinkInfos() const { return m_callLinkInfos.size(); }
370 void addCallLinkInfo() { m_callLinkInfos.append(CallLinkInfo()); }
371 CallLinkInfo& callLinkInfo(int index) { return m_callLinkInfos[index]; }
372
373 void addFunctionRegisterInfo(unsigned bytecodeOffset, int functionIndex) { createRareDataIfNecessary(); m_rareData->m_functionRegisterInfos.append(FunctionRegisterInfo(bytecodeOffset, functionIndex)); }
374#endif
375
376 // Exception handling support
377
378 size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; }
379 void addExceptionHandler(const HandlerInfo& hanler) { createRareDataIfNecessary(); return m_rareData->m_exceptionHandlers.append(hanler); }
380 HandlerInfo& exceptionHandler(int index) { ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; }
381
382 bool hasExceptionInfo() const { return m_exceptionInfo; }
383 void clearExceptionInfo() { m_exceptionInfo.clear(); }
384
385 void addExpressionInfo(const ExpressionRangeInfo& expressionInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_expressionInfo.append(expressionInfo); }
386 void addGetByIdExceptionInfo(const GetByIdExceptionInfo& info) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_getByIdExceptionInfo.append(info); }
387
388 size_t numberOfLineInfos() const { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.size(); }
389 void addLineInfo(const LineInfo& lineInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_lineInfo.append(lineInfo); }
390 LineInfo& lastLineInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.last(); }
391
392#if ENABLE(JIT)
393 Vector<CallReturnOffsetToBytecodeIndex>& callReturnIndexVector() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_callReturnIndexVector; }
394#endif
395
396 // Constant Pool
397
398 size_t numberOfIdentifiers() const { return m_identifiers.size(); }
399 void addIdentifier(const Identifier& i) { return m_identifiers.append(i); }
400 Identifier& identifier(int index) { return m_identifiers[index]; }
401
402 size_t numberOfConstantRegisters() const { return m_constantRegisters.size(); }
403 void addConstantRegister(const Register& r) { return m_constantRegisters.append(r); }
404 Register& constantRegister(int index) { return m_constantRegisters[index]; }
405
406 unsigned addFunctionExpression(FuncExprNode* n) { unsigned size = m_functionExpressions.size(); m_functionExpressions.append(n); return size; }
407 FuncExprNode* functionExpression(int index) const { return m_functionExpressions[index].get(); }
408
409 unsigned addFunction(FuncDeclNode* n) { createRareDataIfNecessary(); unsigned size = m_rareData->m_functions.size(); m_rareData->m_functions.append(n); return size; }
410 FuncDeclNode* function(int index) const { ASSERT(m_rareData); return m_rareData->m_functions[index].get(); }
411
412 bool hasFunctions() const { return m_functionExpressions.size() || (m_rareData && m_rareData->m_functions.size()); }
413
414 unsigned addUnexpectedConstant(JSValuePtr v) { createRareDataIfNecessary(); unsigned size = m_rareData->m_unexpectedConstants.size(); m_rareData->m_unexpectedConstants.append(v); return size; }
415 JSValuePtr unexpectedConstant(int index) const { ASSERT(m_rareData); return m_rareData->m_unexpectedConstants[index]; }
416
417 unsigned addRegExp(RegExp* r) { createRareDataIfNecessary(); unsigned size = m_rareData->m_regexps.size(); m_rareData->m_regexps.append(r); return size; }
418 RegExp* regexp(int index) const { ASSERT(m_rareData); return m_rareData->m_regexps[index].get(); }
419
420
421 // Jump Tables
422
423 size_t numberOfImmediateSwitchJumpTables() const { return m_rareData ? m_rareData->m_immediateSwitchJumpTables.size() : 0; }
424 SimpleJumpTable& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_immediateSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_immediateSwitchJumpTables.last(); }
425 SimpleJumpTable& immediateSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_immediateSwitchJumpTables[tableIndex]; }
426
427 size_t numberOfCharacterSwitchJumpTables() const { return m_rareData ? m_rareData->m_characterSwitchJumpTables.size() : 0; }
428 SimpleJumpTable& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_characterSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_characterSwitchJumpTables.last(); }
429 SimpleJumpTable& characterSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_characterSwitchJumpTables[tableIndex]; }
430
431 size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; }
432 StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); }
433 StringJumpTable& stringSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; }
434
435
436 SymbolTable& symbolTable() { return m_symbolTable; }
437
438 EvalCodeCache& evalCodeCache() { createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; }
439
440 void shrinkToFit();
441
442 // FIXME: Make these remaining members private.
443
444 int m_numCalleeRegisters;
445 // NOTE: numConstants holds the number of constant registers allocated
446 // by the code generator, not the number of constant registers used.
447 // (Duplicate constants are uniqued during code generation, and spare
448 // constant registers may be allocated.)
449 int m_numConstants;
450 int m_numVars;
451 int m_numParameters;
452
453 private:
454#if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING)
455 void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const;
456#endif
457
458 void reparseForExceptionInfoIfNecessary(CallFrame*);
459
460 void createRareDataIfNecessary()
461 {
462 if (!m_rareData)
463 m_rareData.set(new RareData);
464 }
465
466 ScopeNode* m_ownerNode;
467 JSGlobalData* m_globalData;
468
469 Vector<Instruction> m_instructions;
470#ifndef NDEBUG
471 unsigned m_instructionCount;
472#endif
473#if ENABLE(JIT)
474 JITCodeRef m_jitCode;
475#endif
476
477 int m_thisRegister;
478
479 bool m_needsFullScopeChain;
480 bool m_usesEval;
481 bool m_usesArguments;
482
483 CodeType m_codeType;
484
485 RefPtr<SourceProvider> m_source;
486 unsigned m_sourceOffset;
487
488#if !ENABLE(JIT)
489 Vector<unsigned> m_propertyAccessInstructions;
490 Vector<unsigned> m_globalResolveInstructions;
491#else
492 Vector<StructureStubInfo> m_structureStubInfos;
493 Vector<GlobalResolveInfo> m_globalResolveInfos;
494 Vector<CallLinkInfo> m_callLinkInfos;
495 Vector<CallLinkInfo*> m_linkedCallerList;
496#endif
497
498 Vector<unsigned> m_jumpTargets;
499
500 // Constant Pool
501 Vector<Identifier> m_identifiers;
502 Vector<Register> m_constantRegisters;
503 Vector<RefPtr<FuncExprNode> > m_functionExpressions;
504
505 SymbolTable m_symbolTable;
506
507 struct ExceptionInfo {
508 Vector<ExpressionRangeInfo> m_expressionInfo;
509 Vector<LineInfo> m_lineInfo;
510 Vector<GetByIdExceptionInfo> m_getByIdExceptionInfo;
511
512#if ENABLE(JIT)
513 Vector<CallReturnOffsetToBytecodeIndex> m_callReturnIndexVector;
514#endif
515 };
516 OwnPtr<ExceptionInfo> m_exceptionInfo;
517
518 struct RareData {
519 Vector<HandlerInfo> m_exceptionHandlers;
520
521 // Rare Constants
522 Vector<RefPtr<FuncDeclNode> > m_functions;
523 Vector<JSValuePtr> m_unexpectedConstants;
524 Vector<RefPtr<RegExp> > m_regexps;
525
526 // Jump Tables
527 Vector<SimpleJumpTable> m_immediateSwitchJumpTables;
528 Vector<SimpleJumpTable> m_characterSwitchJumpTables;
529 Vector<StringJumpTable> m_stringSwitchJumpTables;
530
531 EvalCodeCache m_evalCodeCache;
532
533#if ENABLE(JIT)
534 Vector<FunctionRegisterInfo> m_functionRegisterInfos;
535#endif
536 };
537 OwnPtr<RareData> m_rareData;
538 };
539
540 // Program code is not marked by any function, so we make the global object
541 // responsible for marking it.
542
543 class ProgramCodeBlock : public CodeBlock {
544 public:
545 ProgramCodeBlock(ScopeNode* ownerNode, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider)
546 : CodeBlock(ownerNode, codeType, sourceProvider, 0)
547 , m_globalObject(globalObject)
548 {
549 m_globalObject->codeBlocks().add(this);
550 }
551
552 ~ProgramCodeBlock()
553 {
554 if (m_globalObject)
555 m_globalObject->codeBlocks().remove(this);
556 }
557
558 void clearGlobalObject() { m_globalObject = 0; }
559
560 private:
561 JSGlobalObject* m_globalObject; // For program and eval nodes, the global object that marks the constant pool.
562 };
563
564 class EvalCodeBlock : public ProgramCodeBlock {
565 public:
566 EvalCodeBlock(ScopeNode* ownerNode, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, int baseScopeDepth)
567 : ProgramCodeBlock(ownerNode, EvalCode, globalObject, sourceProvider)
568 , m_baseScopeDepth(baseScopeDepth)
569 {
570 }
571
572 int baseScopeDepth() const { return m_baseScopeDepth; }
573
574 private:
575 int m_baseScopeDepth;
576 };
577
578} // namespace JSC
579
580#endif // CodeBlock_h
Note: See TracBrowser for help on using the repository browser.