source: webkit/trunk/JavaScriptCore/bytecode/CodeBlock.h@ 59777

Last change on this file since 59777 was 59777, checked in by [email protected], 15 years ago

Standardized naming: "bytecode offset" wins; "bytecode index" loses.

Reviewed by Gavin Barraclough.

Also renamed getBytecodeOffset to bytecodeOffset, to match the WebKit
naming style.

  • bytecode/CodeBlock.h:

(JSC::CallReturnOffsetToBytecodeOffset::CallReturnOffsetToBytecodeOffset):
(JSC::getCallReturnOffset):
(JSC::CodeBlock::bytecodeOffset):
(JSC::CodeBlock::callReturnIndexVector):

  • interpreter/Interpreter.cpp:

(JSC::bytecodeOffsetForPC):

  • jit/JIT.cpp:

(JSC::JIT::JIT):
(JSC::JIT::privateCompileMainPass):
(JSC::JIT::privateCompileLinkPass):
(JSC::JIT::privateCompileSlowCases):
(JSC::JIT::privateCompile):

  • jit/JIT.h:

(JSC::CallRecord::CallRecord):
(JSC::JumpTable::JumpTable):
(JSC::SwitchRecord::SwitchRecord):

  • jit/JITCall.cpp:

(JSC::JIT::compileOpCallVarargsSlowCase):
(JSC::JIT::compileOpCall):

  • jit/JITInlineMethods.h:

(JSC::JIT::emitNakedCall):
(JSC::JIT::addSlowCase):
(JSC::JIT::addJump):
(JSC::JIT::emitJumpSlowToHot):
(JSC::JIT::isLabeled):
(JSC::JIT::map):
(JSC::JIT::unmap):
(JSC::JIT::isMapped):
(JSC::JIT::getMappedPayload):
(JSC::JIT::getMappedTag):
(JSC::JIT::emitGetVirtualRegister):

  • jit/JITOpcodes.cpp:

(JSC::JIT::emit_op_switch_imm):
(JSC::JIT::emit_op_switch_char):
(JSC::JIT::emit_op_switch_string):
(JSC::JIT::emit_op_new_error):

  • jit/JITOpcodes32_64.cpp:

(JSC::JIT::emit_op_mov):
(JSC::JIT::emit_op_get_global_var):
(JSC::JIT::emit_op_put_global_var):
(JSC::JIT::emit_op_get_scoped_var):
(JSC::JIT::emit_op_put_scoped_var):
(JSC::JIT::emit_op_to_primitive):
(JSC::JIT::emit_op_resolve_global):
(JSC::JIT::emit_op_to_jsnumber):
(JSC::JIT::emit_op_catch):
(JSC::JIT::emit_op_switch_imm):
(JSC::JIT::emit_op_switch_char):
(JSC::JIT::emit_op_switch_string):
(JSC::JIT::emit_op_new_error):
(JSC::JIT::emit_op_convert_this):

  • jit/JITPropertyAccess.cpp:

(JSC::JIT::emit_op_method_check):
(JSC::JIT::emitSlow_op_method_check):

  • jit/JITPropertyAccess32_64.cpp:

(JSC::JIT::emit_op_method_check):
(JSC::JIT::emitSlow_op_method_check):
(JSC::JIT::emit_op_get_by_val):
(JSC::JIT::emit_op_get_by_id):
(JSC::JIT::emit_op_get_by_pname):

  • jit/JITStubCall.h:

(JSC::JITStubCall::call):

  • jit/JITStubs.cpp:

(JSC::DEFINE_STUB_FUNCTION):

File size: 27.6 KB
Line 
1/*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <[email protected]>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#ifndef CodeBlock_h
31#define CodeBlock_h
32
33#include "EvalCodeCache.h"
34#include "Instruction.h"
35#include "JITCode.h"
36#include "JSGlobalObject.h"
37#include "JumpTable.h"
38#include "Nodes.h"
39#include "RegExp.h"
40#include "UString.h"
41#include <wtf/FastAllocBase.h>
42#include <wtf/RefPtr.h>
43#include <wtf/Vector.h>
44
45#if ENABLE(JIT)
46#include "StructureStubInfo.h"
47#endif
48
49// Register numbers used in bytecode operations have different meaning accoring to their ranges:
50// 0x80000000-0xFFFFFFFF Negative indicies from the CallFrame pointer are entries in the call frame, see RegisterFile.h.
51// 0x00000000-0x3FFFFFFF Forwards indices from the CallFrame pointer are local vars and temporaries with the function's callframe.
52// 0x40000000-0x7FFFFFFF Positive indices from 0x40000000 specify entries in the constant pool on the CodeBlock.
53static const int FirstConstantRegisterIndex = 0x40000000;
54
55namespace JSC {
56
57 enum HasSeenShouldRepatch {
58 hasSeenShouldRepatch
59 };
60
61 class ExecState;
62
63 enum CodeType { GlobalCode, EvalCode, FunctionCode };
64
65 inline int unmodifiedArgumentsRegister(int argumentsRegister) { return argumentsRegister - 1; }
66
67 static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); }
68
69 struct HandlerInfo {
70 uint32_t start;
71 uint32_t end;
72 uint32_t target;
73 uint32_t scopeDepth;
74#if ENABLE(JIT)
75 CodeLocationLabel nativeCode;
76#endif
77 };
78
79 struct ExpressionRangeInfo {
80 enum {
81 MaxOffset = (1 << 7) - 1,
82 MaxDivot = (1 << 25) - 1
83 };
84 uint32_t instructionOffset : 25;
85 uint32_t divotPoint : 25;
86 uint32_t startOffset : 7;
87 uint32_t endOffset : 7;
88 };
89
90 struct LineInfo {
91 uint32_t instructionOffset;
92 int32_t lineNumber;
93 };
94
95 // Both op_construct and op_instanceof require a use of op_get_by_id to get
96 // the prototype property from an object. The exception messages for exceptions
97 // thrown by these instances op_get_by_id need to reflect this.
98 struct GetByIdExceptionInfo {
99 unsigned bytecodeOffset : 31;
100 bool isOpConstruct : 1;
101 };
102
103#if ENABLE(JIT)
104 struct CallLinkInfo {
105 CallLinkInfo()
106 : callee(0)
107 , position(0)
108 , hasSeenShouldRepatch(0)
109 {
110 }
111
112 unsigned bytecodeOffset;
113 CodeLocationNearCall callReturnLocation;
114 CodeLocationDataLabelPtr hotPathBegin;
115 CodeLocationNearCall hotPathOther;
116 CodeBlock* ownerCodeBlock;
117 CodeBlock* callee;
118 unsigned position : 31;
119 unsigned hasSeenShouldRepatch : 1;
120
121 void setUnlinked() { callee = 0; }
122 bool isLinked() { return callee; }
123
124 bool seenOnce()
125 {
126 return hasSeenShouldRepatch;
127 }
128
129 void setSeen()
130 {
131 hasSeenShouldRepatch = true;
132 }
133 };
134
135 struct MethodCallLinkInfo {
136 MethodCallLinkInfo()
137 : cachedStructure(0)
138 , cachedPrototypeStructure(0)
139 {
140 }
141
142 bool seenOnce()
143 {
144 ASSERT(!cachedStructure);
145 return cachedPrototypeStructure;
146 }
147
148 void setSeen()
149 {
150 ASSERT(!cachedStructure && !cachedPrototypeStructure);
151 // We use the values of cachedStructure & cachedPrototypeStructure to indicate the
152 // current state.
153 // - In the initial state, both are null.
154 // - Once this transition has been taken once, cachedStructure is
155 // null and cachedPrototypeStructure is set to a nun-null value.
156 // - Once the call is linked both structures are set to non-null values.
157 cachedPrototypeStructure = (Structure*)1;
158 }
159
160 CodeLocationCall callReturnLocation;
161 CodeLocationDataLabelPtr structureLabel;
162 Structure* cachedStructure;
163 Structure* cachedPrototypeStructure;
164 };
165
166 struct FunctionRegisterInfo {
167 FunctionRegisterInfo(unsigned bytecodeOffset, int functionRegisterIndex)
168 : bytecodeOffset(bytecodeOffset)
169 , functionRegisterIndex(functionRegisterIndex)
170 {
171 }
172
173 unsigned bytecodeOffset;
174 int functionRegisterIndex;
175 };
176
177 struct GlobalResolveInfo {
178 GlobalResolveInfo(unsigned bytecodeOffset)
179 : structure(0)
180 , offset(0)
181 , bytecodeOffset(bytecodeOffset)
182 {
183 }
184
185 Structure* structure;
186 unsigned offset;
187 unsigned bytecodeOffset;
188 };
189
190 // This structure is used to map from a call return location
191 // (given as an offset in bytes into the JIT code) back to
192 // the bytecode index of the corresponding bytecode operation.
193 // This is then used to look up the corresponding handler.
194 struct CallReturnOffsetToBytecodeOffset {
195 CallReturnOffsetToBytecodeOffset(unsigned callReturnOffset, unsigned bytecodeOffset)
196 : callReturnOffset(callReturnOffset)
197 , bytecodeOffset(bytecodeOffset)
198 {
199 }
200
201 unsigned callReturnOffset;
202 unsigned bytecodeOffset;
203 };
204
205 // valueAtPosition helpers for the binaryChop algorithm below.
206
207 inline void* getStructureStubInfoReturnLocation(StructureStubInfo* structureStubInfo)
208 {
209 return structureStubInfo->callReturnLocation.executableAddress();
210 }
211
212 inline void* getCallLinkInfoReturnLocation(CallLinkInfo* callLinkInfo)
213 {
214 return callLinkInfo->callReturnLocation.executableAddress();
215 }
216
217 inline void* getMethodCallLinkInfoReturnLocation(MethodCallLinkInfo* methodCallLinkInfo)
218 {
219 return methodCallLinkInfo->callReturnLocation.executableAddress();
220 }
221
222 inline unsigned getCallReturnOffset(CallReturnOffsetToBytecodeOffset* pc)
223 {
224 return pc->callReturnOffset;
225 }
226
227 // Binary chop algorithm, calls valueAtPosition on pre-sorted elements in array,
228 // compares result with key (KeyTypes should be comparable with '--', '<', '>').
229 // Optimized for cases where the array contains the key, checked by assertions.
230 template<typename ArrayType, typename KeyType, KeyType(*valueAtPosition)(ArrayType*)>
231 inline ArrayType* binaryChop(ArrayType* array, size_t size, KeyType key)
232 {
233 // The array must contain at least one element (pre-condition, array does conatin key).
234 // If the array only contains one element, no need to do the comparison.
235 while (size > 1) {
236 // Pick an element to check, half way through the array, and read the value.
237 int pos = (size - 1) >> 1;
238 KeyType val = valueAtPosition(&array[pos]);
239
240 // If the key matches, success!
241 if (val == key)
242 return &array[pos];
243 // The item we are looking for is smaller than the item being check; reduce the value of 'size',
244 // chopping off the right hand half of the array.
245 else if (key < val)
246 size = pos;
247 // Discard all values in the left hand half of the array, up to and including the item at pos.
248 else {
249 size -= (pos + 1);
250 array += (pos + 1);
251 }
252
253 // 'size' should never reach zero.
254 ASSERT(size);
255 }
256
257 // If we reach this point we've chopped down to one element, no need to check it matches
258 ASSERT(size == 1);
259 ASSERT(key == valueAtPosition(&array[0]));
260 return &array[0];
261 }
262#endif
263
264 struct ExceptionInfo : FastAllocBase {
265 Vector<ExpressionRangeInfo> m_expressionInfo;
266 Vector<LineInfo> m_lineInfo;
267 Vector<GetByIdExceptionInfo> m_getByIdExceptionInfo;
268
269#if ENABLE(JIT)
270 Vector<CallReturnOffsetToBytecodeOffset> m_callReturnIndexVector;
271#endif
272 };
273
274 class CodeBlock : public FastAllocBase {
275 friend class JIT;
276 protected:
277 CodeBlock(ScriptExecutable* ownerExecutable, CodeType, PassRefPtr<SourceProvider>, unsigned sourceOffset, SymbolTable* symbolTable, bool isConstructor);
278 public:
279 virtual ~CodeBlock();
280
281 void markAggregate(MarkStack&);
282 void refStructures(Instruction* vPC) const;
283 void derefStructures(Instruction* vPC) const;
284#if ENABLE(JIT_OPTIMIZE_CALL)
285 void unlinkCallers();
286#endif
287
288 static void dumpStatistics();
289
290#if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING
291 void dump(ExecState*) const;
292 void printStructures(const Instruction*) const;
293 void printStructure(const char* name, const Instruction*, int operand) const;
294#endif
295
296 inline bool isKnownNotImmediate(int index)
297 {
298 if (index == m_thisRegister)
299 return true;
300
301 if (isConstantRegisterIndex(index))
302 return getConstant(index).isCell();
303
304 return false;
305 }
306
307 ALWAYS_INLINE bool isTemporaryRegisterIndex(int index)
308 {
309 return index >= m_numVars;
310 }
311
312 HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset);
313 int lineNumberForBytecodeOffset(CallFrame*, unsigned bytecodeOffset);
314 int expressionRangeForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, int& divot, int& startOffset, int& endOffset);
315 bool getByIdExceptionInfoForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, OpcodeID&);
316
317#if ENABLE(JIT)
318 void addCaller(CallLinkInfo* caller)
319 {
320 caller->callee = this;
321 caller->position = m_linkedCallerList.size();
322 m_linkedCallerList.append(caller);
323 }
324
325 void removeCaller(CallLinkInfo* caller)
326 {
327 unsigned pos = caller->position;
328 unsigned lastPos = m_linkedCallerList.size() - 1;
329
330 if (pos != lastPos) {
331 m_linkedCallerList[pos] = m_linkedCallerList[lastPos];
332 m_linkedCallerList[pos]->position = pos;
333 }
334 m_linkedCallerList.shrink(lastPos);
335 }
336
337 StructureStubInfo& getStubInfo(ReturnAddressPtr returnAddress)
338 {
339 return *(binaryChop<StructureStubInfo, void*, getStructureStubInfoReturnLocation>(m_structureStubInfos.begin(), m_structureStubInfos.size(), returnAddress.value()));
340 }
341
342 CallLinkInfo& getCallLinkInfo(ReturnAddressPtr returnAddress)
343 {
344 return *(binaryChop<CallLinkInfo, void*, getCallLinkInfoReturnLocation>(m_callLinkInfos.begin(), m_callLinkInfos.size(), returnAddress.value()));
345 }
346
347 MethodCallLinkInfo& getMethodCallLinkInfo(ReturnAddressPtr returnAddress)
348 {
349 return *(binaryChop<MethodCallLinkInfo, void*, getMethodCallLinkInfoReturnLocation>(m_methodCallLinkInfos.begin(), m_methodCallLinkInfos.size(), returnAddress.value()));
350 }
351
352 unsigned bytecodeOffset(CallFrame* callFrame, ReturnAddressPtr returnAddress)
353 {
354 reparseForExceptionInfoIfNecessary(callFrame);
355 return binaryChop<CallReturnOffsetToBytecodeOffset, unsigned, getCallReturnOffset>(callReturnIndexVector().begin(), callReturnIndexVector().size(), getJITCode().offsetOf(returnAddress.value()))->bytecodeOffset;
356 }
357
358 bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset, int& functionRegisterIndex);
359#endif
360
361 void setIsNumericCompareFunction(bool isNumericCompareFunction) { m_isNumericCompareFunction = isNumericCompareFunction; }
362 bool isNumericCompareFunction() { return m_isNumericCompareFunction; }
363
364 Vector<Instruction>& instructions() { return m_instructions; }
365 void discardBytecode() { m_instructions.clear(); }
366
367#ifndef NDEBUG
368 unsigned instructionCount() { return m_instructionCount; }
369 void setInstructionCount(unsigned instructionCount) { m_instructionCount = instructionCount; }
370#endif
371
372#if ENABLE(JIT)
373 JITCode& getJITCode() { return m_isConstructor ? ownerExecutable()->generatedJITCodeForConstruct() : ownerExecutable()->generatedJITCodeForCall(); }
374 ExecutablePool* executablePool() { return getJITCode().getExecutablePool(); }
375#endif
376
377 ScriptExecutable* ownerExecutable() const { return m_ownerExecutable; }
378
379 void setGlobalData(JSGlobalData* globalData) { m_globalData = globalData; }
380
381 void setThisRegister(int thisRegister) { m_thisRegister = thisRegister; }
382 int thisRegister() const { return m_thisRegister; }
383
384 void setNeedsFullScopeChain(bool needsFullScopeChain) { m_needsFullScopeChain = needsFullScopeChain; }
385 bool needsFullScopeChain() const { return m_needsFullScopeChain; }
386 void setUsesEval(bool usesEval) { m_usesEval = usesEval; }
387 bool usesEval() const { return m_usesEval; }
388
389 void setArgumentsRegister(int argumentsRegister)
390 {
391 ASSERT(argumentsRegister != -1);
392 m_argumentsRegister = argumentsRegister;
393 ASSERT(usesArguments());
394 }
395 int argumentsRegister()
396 {
397 ASSERT(usesArguments());
398 return m_argumentsRegister;
399 }
400 bool usesArguments() const { return m_argumentsRegister != -1; }
401
402 CodeType codeType() const { return m_codeType; }
403
404 SourceProvider* source() const { return m_source.get(); }
405 unsigned sourceOffset() const { return m_sourceOffset; }
406
407 size_t numberOfJumpTargets() const { return m_jumpTargets.size(); }
408 void addJumpTarget(unsigned jumpTarget) { m_jumpTargets.append(jumpTarget); }
409 unsigned jumpTarget(int index) const { return m_jumpTargets[index]; }
410 unsigned lastJumpTarget() const { return m_jumpTargets.last(); }
411
412#if !ENABLE(JIT)
413 void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); }
414 void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); }
415 bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset);
416#else
417 size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); }
418 void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); }
419 StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; }
420
421 void addGlobalResolveInfo(unsigned globalResolveInstruction) { m_globalResolveInfos.append(GlobalResolveInfo(globalResolveInstruction)); }
422 GlobalResolveInfo& globalResolveInfo(int index) { return m_globalResolveInfos[index]; }
423 bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset);
424
425 size_t numberOfCallLinkInfos() const { return m_callLinkInfos.size(); }
426 void addCallLinkInfo() { m_callLinkInfos.append(CallLinkInfo()); }
427 CallLinkInfo& callLinkInfo(int index) { return m_callLinkInfos[index]; }
428
429 void addMethodCallLinkInfos(unsigned n) { m_methodCallLinkInfos.grow(n); }
430 MethodCallLinkInfo& methodCallLinkInfo(int index) { return m_methodCallLinkInfos[index]; }
431
432 void addFunctionRegisterInfo(unsigned bytecodeOffset, int functionIndex) { createRareDataIfNecessary(); m_rareData->m_functionRegisterInfos.append(FunctionRegisterInfo(bytecodeOffset, functionIndex)); }
433#endif
434
435 // Exception handling support
436
437 size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; }
438 void addExceptionHandler(const HandlerInfo& hanler) { createRareDataIfNecessary(); return m_rareData->m_exceptionHandlers.append(hanler); }
439 HandlerInfo& exceptionHandler(int index) { ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; }
440
441 bool hasExceptionInfo() const { return m_exceptionInfo; }
442 void clearExceptionInfo() { m_exceptionInfo.clear(); }
443 ExceptionInfo* extractExceptionInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo.release(); }
444
445 void addExpressionInfo(const ExpressionRangeInfo& expressionInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_expressionInfo.append(expressionInfo); }
446 void addGetByIdExceptionInfo(const GetByIdExceptionInfo& info) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_getByIdExceptionInfo.append(info); }
447
448 size_t numberOfLineInfos() const { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.size(); }
449 void addLineInfo(const LineInfo& lineInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_lineInfo.append(lineInfo); }
450 LineInfo& lastLineInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.last(); }
451
452#if ENABLE(JIT)
453 Vector<CallReturnOffsetToBytecodeOffset>& callReturnIndexVector() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_callReturnIndexVector; }
454#endif
455
456 // Constant Pool
457
458 size_t numberOfIdentifiers() const { return m_identifiers.size(); }
459 void addIdentifier(const Identifier& i) { return m_identifiers.append(i); }
460 Identifier& identifier(int index) { return m_identifiers[index]; }
461
462 size_t numberOfConstantRegisters() const { return m_constantRegisters.size(); }
463 void addConstantRegister(const Register& r) { return m_constantRegisters.append(r); }
464 Register& constantRegister(int index) { return m_constantRegisters[index - FirstConstantRegisterIndex]; }
465 ALWAYS_INLINE bool isConstantRegisterIndex(int index) const { return index >= FirstConstantRegisterIndex; }
466 ALWAYS_INLINE JSValue getConstant(int index) const { return m_constantRegisters[index - FirstConstantRegisterIndex].jsValue(); }
467
468 unsigned addFunctionDecl(NonNullPassRefPtr<FunctionExecutable> n) { unsigned size = m_functionDecls.size(); m_functionDecls.append(n); return size; }
469 FunctionExecutable* functionDecl(int index) { return m_functionDecls[index].get(); }
470 int numberOfFunctionDecls() { return m_functionDecls.size(); }
471 unsigned addFunctionExpr(NonNullPassRefPtr<FunctionExecutable> n) { unsigned size = m_functionExprs.size(); m_functionExprs.append(n); return size; }
472 FunctionExecutable* functionExpr(int index) { return m_functionExprs[index].get(); }
473
474 unsigned addRegExp(RegExp* r) { createRareDataIfNecessary(); unsigned size = m_rareData->m_regexps.size(); m_rareData->m_regexps.append(r); return size; }
475 RegExp* regexp(int index) const { ASSERT(m_rareData); return m_rareData->m_regexps[index].get(); }
476
477
478 // Jump Tables
479
480 size_t numberOfImmediateSwitchJumpTables() const { return m_rareData ? m_rareData->m_immediateSwitchJumpTables.size() : 0; }
481 SimpleJumpTable& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_immediateSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_immediateSwitchJumpTables.last(); }
482 SimpleJumpTable& immediateSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_immediateSwitchJumpTables[tableIndex]; }
483
484 size_t numberOfCharacterSwitchJumpTables() const { return m_rareData ? m_rareData->m_characterSwitchJumpTables.size() : 0; }
485 SimpleJumpTable& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_characterSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_characterSwitchJumpTables.last(); }
486 SimpleJumpTable& characterSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_characterSwitchJumpTables[tableIndex]; }
487
488 size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; }
489 StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); }
490 StringJumpTable& stringSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; }
491
492
493 SymbolTable* symbolTable() { return m_symbolTable; }
494 SharedSymbolTable* sharedSymbolTable() { ASSERT(m_codeType == FunctionCode); return static_cast<SharedSymbolTable*>(m_symbolTable); }
495
496 EvalCodeCache& evalCodeCache() { createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; }
497
498 void shrinkToFit();
499
500 // FIXME: Make these remaining members private.
501
502 int m_numCalleeRegisters;
503 int m_numVars;
504 int m_numParameters;
505 bool m_isConstructor;
506
507 private:
508#if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING)
509 void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const;
510
511 CString registerName(ExecState*, int r) const;
512 void printUnaryOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
513 void printBinaryOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
514 void printConditionalJump(ExecState*, const Vector<Instruction>::const_iterator&, Vector<Instruction>::const_iterator&, int location, const char* op) const;
515 void printGetByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
516 void printPutByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
517#endif
518
519 void reparseForExceptionInfoIfNecessary(CallFrame*);
520
521 void createRareDataIfNecessary()
522 {
523 if (!m_rareData)
524 m_rareData.set(new RareData);
525 }
526
527 ScriptExecutable* m_ownerExecutable;
528 JSGlobalData* m_globalData;
529
530 Vector<Instruction> m_instructions;
531#ifndef NDEBUG
532 unsigned m_instructionCount;
533#endif
534
535 int m_thisRegister;
536 int m_argumentsRegister;
537
538 bool m_needsFullScopeChain;
539 bool m_usesEval;
540 bool m_isNumericCompareFunction;
541
542 CodeType m_codeType;
543
544 RefPtr<SourceProvider> m_source;
545 unsigned m_sourceOffset;
546
547#if !ENABLE(JIT)
548 Vector<unsigned> m_propertyAccessInstructions;
549 Vector<unsigned> m_globalResolveInstructions;
550#else
551 Vector<StructureStubInfo> m_structureStubInfos;
552 Vector<GlobalResolveInfo> m_globalResolveInfos;
553 Vector<CallLinkInfo> m_callLinkInfos;
554 Vector<MethodCallLinkInfo> m_methodCallLinkInfos;
555 Vector<CallLinkInfo*> m_linkedCallerList;
556#endif
557
558 Vector<unsigned> m_jumpTargets;
559
560 // Constant Pool
561 Vector<Identifier> m_identifiers;
562 Vector<Register> m_constantRegisters;
563 Vector<RefPtr<FunctionExecutable> > m_functionDecls;
564 Vector<RefPtr<FunctionExecutable> > m_functionExprs;
565
566 SymbolTable* m_symbolTable;
567
568 OwnPtr<ExceptionInfo> m_exceptionInfo;
569
570 struct RareData : FastAllocBase {
571 Vector<HandlerInfo> m_exceptionHandlers;
572
573 // Rare Constants
574 Vector<RefPtr<RegExp> > m_regexps;
575
576 // Jump Tables
577 Vector<SimpleJumpTable> m_immediateSwitchJumpTables;
578 Vector<SimpleJumpTable> m_characterSwitchJumpTables;
579 Vector<StringJumpTable> m_stringSwitchJumpTables;
580
581 EvalCodeCache m_evalCodeCache;
582
583#if ENABLE(JIT)
584 Vector<FunctionRegisterInfo> m_functionRegisterInfos;
585#endif
586 };
587 OwnPtr<RareData> m_rareData;
588 };
589
590 // Program code is not marked by any function, so we make the global object
591 // responsible for marking it.
592
593 class GlobalCodeBlock : public CodeBlock {
594 public:
595 GlobalCodeBlock(ScriptExecutable* ownerExecutable, CodeType codeType, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, JSGlobalObject* globalObject)
596 : CodeBlock(ownerExecutable, codeType, sourceProvider, sourceOffset, &m_unsharedSymbolTable, false)
597 , m_globalObject(globalObject)
598 {
599 m_globalObject->codeBlocks().add(this);
600 }
601
602 ~GlobalCodeBlock()
603 {
604 if (m_globalObject)
605 m_globalObject->codeBlocks().remove(this);
606 }
607
608 void clearGlobalObject() { m_globalObject = 0; }
609
610 private:
611 JSGlobalObject* m_globalObject; // For program and eval nodes, the global object that marks the constant pool.
612 SymbolTable m_unsharedSymbolTable;
613 };
614
615 class ProgramCodeBlock : public GlobalCodeBlock {
616 public:
617 ProgramCodeBlock(ProgramExecutable* ownerExecutable, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider)
618 : GlobalCodeBlock(ownerExecutable, codeType, sourceProvider, 0, globalObject)
619 {
620 }
621 };
622
623 class EvalCodeBlock : public GlobalCodeBlock {
624 public:
625 EvalCodeBlock(EvalExecutable* ownerExecutable, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, int baseScopeDepth)
626 : GlobalCodeBlock(ownerExecutable, EvalCode, sourceProvider, 0, globalObject)
627 , m_baseScopeDepth(baseScopeDepth)
628 {
629 }
630
631 int baseScopeDepth() const { return m_baseScopeDepth; }
632
633 const Identifier& variable(unsigned index) { return m_variables[index]; }
634 unsigned numVariables() { return m_variables.size(); }
635 void adoptVariables(Vector<Identifier>& variables)
636 {
637 ASSERT(m_variables.isEmpty());
638 m_variables.swap(variables);
639 }
640
641 private:
642 int m_baseScopeDepth;
643 Vector<Identifier> m_variables;
644 };
645
646 class FunctionCodeBlock : public CodeBlock {
647 public:
648 // Rather than using the usual RefCounted::create idiom for SharedSymbolTable we just use new
649 // as we need to initialise the CodeBlock before we could initialise any RefPtr to hold the shared
650 // symbol table, so we just pass as a raw pointer with a ref count of 1. We then manually deref
651 // in the destructor.
652 FunctionCodeBlock(FunctionExecutable* ownerExecutable, CodeType codeType, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, bool isConstructor)
653 : CodeBlock(ownerExecutable, codeType, sourceProvider, sourceOffset, new SharedSymbolTable, isConstructor)
654 {
655 }
656 ~FunctionCodeBlock()
657 {
658 sharedSymbolTable()->deref();
659 }
660 };
661
662 inline Register& ExecState::r(int index)
663 {
664 CodeBlock* codeBlock = this->codeBlock();
665 if (codeBlock->isConstantRegisterIndex(index))
666 return codeBlock->constantRegister(index);
667 return this[index];
668 }
669
670} // namespace JSC
671
672#endif // CodeBlock_h
Note: See TracBrowser for help on using the repository browser.