source: webkit/trunk/JavaScriptCore/bytecode/CodeBlock.h@ 57955

Last change on this file since 57955 was 57955, checked in by [email protected], 15 years ago

2010-04-20 Oliver Hunt <[email protected]>

Reviewed by Maciej Stachowiak.

[ES5] RegExp literals are constants that should be persistent across multiple function calls.
https://bugs.webkit.org/show_bug.cgi?id=37908

Dump the separate RegExp constant pool, and just use the standard JS constant pool
in codeblock. This allows us to drop op_new_regexp and all associated code as well.

  • bytecode/CodeBlock.cpp: (JSC::CodeBlock::dump): (JSC::CodeBlock::shrinkToFit):
  • bytecode/CodeBlock.h:
  • bytecode/Opcode.h:
  • bytecompiler/BytecodeGenerator.cpp: (JSC::BytecodeGenerator::emitLoad):
  • bytecompiler/BytecodeGenerator.h:
  • bytecompiler/NodesCodegen.cpp: (JSC::RegExpNode::emitBytecode):
  • interpreter/Interpreter.cpp: (JSC::Interpreter::privateExecute):
  • jit/JIT.cpp: (JSC::JIT::privateCompileMainPass):
  • jit/JIT.h:
  • jit/JITOpcodes.cpp:
  • jit/JITStubs.cpp:
  • jit/JITStubs.h: (JSC::):

2010-04-20 Oliver Hunt <[email protected]>

Reviewed by Maciej Stachowiak.

[ES5] RegExp literals are constants that should be persistent across multiple function calls.
https://bugs.webkit.org/show_bug.cgi?id=37908

Add tests to ensure correct persistence of RegExp literals, and correctly avoid
sharing "identical" regexps used in different places.

  • fast/js/regexp-literals-are-constants-expected.txt: Added.
  • fast/js/regexp-literals-are-constants.html: Added.
  • fast/js/script-tests/regexp-literals-are-constants.js: Added. (test1): (returnRegExpLiteral): (returnConditionalRegExpLiteral):
File size: 26.8 KB
Line 
1/*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <[email protected]>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#ifndef CodeBlock_h
31#define CodeBlock_h
32
33#include "EvalCodeCache.h"
34#include "Instruction.h"
35#include "JITCode.h"
36#include "JSGlobalObject.h"
37#include "JumpTable.h"
38#include "Nodes.h"
39#include "RegExp.h"
40#include "UString.h"
41#include <wtf/FastAllocBase.h>
42#include <wtf/RefPtr.h>
43#include <wtf/Vector.h>
44
45#if ENABLE(JIT)
46#include "StructureStubInfo.h"
47#endif
48
49// Register numbers used in bytecode operations have different meaning accoring to their ranges:
50// 0x80000000-0xFFFFFFFF Negative indicies from the CallFrame pointer are entries in the call frame, see RegisterFile.h.
51// 0x00000000-0x3FFFFFFF Forwards indices from the CallFrame pointer are local vars and temporaries with the function's callframe.
52// 0x40000000-0x7FFFFFFF Positive indices from 0x40000000 specify entries in the constant pool on the CodeBlock.
53static const int FirstConstantRegisterIndex = 0x40000000;
54
55namespace JSC {
56
57 enum HasSeenShouldRepatch {
58 hasSeenShouldRepatch
59 };
60
61 class ExecState;
62
63 enum CodeType { GlobalCode, EvalCode, FunctionCode };
64
65 static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); }
66
67 struct HandlerInfo {
68 uint32_t start;
69 uint32_t end;
70 uint32_t target;
71 uint32_t scopeDepth;
72#if ENABLE(JIT)
73 CodeLocationLabel nativeCode;
74#endif
75 };
76
77 struct ExpressionRangeInfo {
78 enum {
79 MaxOffset = (1 << 7) - 1,
80 MaxDivot = (1 << 25) - 1
81 };
82 uint32_t instructionOffset : 25;
83 uint32_t divotPoint : 25;
84 uint32_t startOffset : 7;
85 uint32_t endOffset : 7;
86 };
87
88 struct LineInfo {
89 uint32_t instructionOffset;
90 int32_t lineNumber;
91 };
92
93 // Both op_construct and op_instanceof require a use of op_get_by_id to get
94 // the prototype property from an object. The exception messages for exceptions
95 // thrown by these instances op_get_by_id need to reflect this.
96 struct GetByIdExceptionInfo {
97 unsigned bytecodeOffset : 31;
98 bool isOpConstruct : 1;
99 };
100
101#if ENABLE(JIT)
102 struct CallLinkInfo {
103 CallLinkInfo()
104 : callee(0)
105 , position(0)
106 , hasSeenShouldRepatch(0)
107 {
108 }
109
110 unsigned bytecodeIndex;
111 CodeLocationNearCall callReturnLocation;
112 CodeLocationDataLabelPtr hotPathBegin;
113 CodeLocationNearCall hotPathOther;
114 CodeBlock* ownerCodeBlock;
115 CodeBlock* callee;
116 unsigned position : 31;
117 unsigned hasSeenShouldRepatch : 1;
118
119 void setUnlinked() { callee = 0; }
120 bool isLinked() { return callee; }
121
122 bool seenOnce()
123 {
124 return hasSeenShouldRepatch;
125 }
126
127 void setSeen()
128 {
129 hasSeenShouldRepatch = true;
130 }
131 };
132
133 struct MethodCallLinkInfo {
134 MethodCallLinkInfo()
135 : cachedStructure(0)
136 , cachedPrototypeStructure(0)
137 {
138 }
139
140 bool seenOnce()
141 {
142 ASSERT(!cachedStructure);
143 return cachedPrototypeStructure;
144 }
145
146 void setSeen()
147 {
148 ASSERT(!cachedStructure && !cachedPrototypeStructure);
149 // We use the values of cachedStructure & cachedPrototypeStructure to indicate the
150 // current state.
151 // - In the initial state, both are null.
152 // - Once this transition has been taken once, cachedStructure is
153 // null and cachedPrototypeStructure is set to a nun-null value.
154 // - Once the call is linked both structures are set to non-null values.
155 cachedPrototypeStructure = (Structure*)1;
156 }
157
158 CodeLocationCall callReturnLocation;
159 CodeLocationDataLabelPtr structureLabel;
160 Structure* cachedStructure;
161 Structure* cachedPrototypeStructure;
162 };
163
164 struct FunctionRegisterInfo {
165 FunctionRegisterInfo(unsigned bytecodeOffset, int functionRegisterIndex)
166 : bytecodeOffset(bytecodeOffset)
167 , functionRegisterIndex(functionRegisterIndex)
168 {
169 }
170
171 unsigned bytecodeOffset;
172 int functionRegisterIndex;
173 };
174
175 struct GlobalResolveInfo {
176 GlobalResolveInfo(unsigned bytecodeOffset)
177 : structure(0)
178 , offset(0)
179 , bytecodeOffset(bytecodeOffset)
180 {
181 }
182
183 Structure* structure;
184 unsigned offset;
185 unsigned bytecodeOffset;
186 };
187
188 // This structure is used to map from a call return location
189 // (given as an offset in bytes into the JIT code) back to
190 // the bytecode index of the corresponding bytecode operation.
191 // This is then used to look up the corresponding handler.
192 struct CallReturnOffsetToBytecodeIndex {
193 CallReturnOffsetToBytecodeIndex(unsigned callReturnOffset, unsigned bytecodeIndex)
194 : callReturnOffset(callReturnOffset)
195 , bytecodeIndex(bytecodeIndex)
196 {
197 }
198
199 unsigned callReturnOffset;
200 unsigned bytecodeIndex;
201 };
202
203 // valueAtPosition helpers for the binaryChop algorithm below.
204
205 inline void* getStructureStubInfoReturnLocation(StructureStubInfo* structureStubInfo)
206 {
207 return structureStubInfo->callReturnLocation.executableAddress();
208 }
209
210 inline void* getCallLinkInfoReturnLocation(CallLinkInfo* callLinkInfo)
211 {
212 return callLinkInfo->callReturnLocation.executableAddress();
213 }
214
215 inline void* getMethodCallLinkInfoReturnLocation(MethodCallLinkInfo* methodCallLinkInfo)
216 {
217 return methodCallLinkInfo->callReturnLocation.executableAddress();
218 }
219
220 inline unsigned getCallReturnOffset(CallReturnOffsetToBytecodeIndex* pc)
221 {
222 return pc->callReturnOffset;
223 }
224
225 // Binary chop algorithm, calls valueAtPosition on pre-sorted elements in array,
226 // compares result with key (KeyTypes should be comparable with '--', '<', '>').
227 // Optimized for cases where the array contains the key, checked by assertions.
228 template<typename ArrayType, typename KeyType, KeyType(*valueAtPosition)(ArrayType*)>
229 inline ArrayType* binaryChop(ArrayType* array, size_t size, KeyType key)
230 {
231 // The array must contain at least one element (pre-condition, array does conatin key).
232 // If the array only contains one element, no need to do the comparison.
233 while (size > 1) {
234 // Pick an element to check, half way through the array, and read the value.
235 int pos = (size - 1) >> 1;
236 KeyType val = valueAtPosition(&array[pos]);
237
238 // If the key matches, success!
239 if (val == key)
240 return &array[pos];
241 // The item we are looking for is smaller than the item being check; reduce the value of 'size',
242 // chopping off the right hand half of the array.
243 else if (key < val)
244 size = pos;
245 // Discard all values in the left hand half of the array, up to and including the item at pos.
246 else {
247 size -= (pos + 1);
248 array += (pos + 1);
249 }
250
251 // 'size' should never reach zero.
252 ASSERT(size);
253 }
254
255 // If we reach this point we've chopped down to one element, no need to check it matches
256 ASSERT(size == 1);
257 ASSERT(key == valueAtPosition(&array[0]));
258 return &array[0];
259 }
260#endif
261
262 struct ExceptionInfo : FastAllocBase {
263 Vector<ExpressionRangeInfo> m_expressionInfo;
264 Vector<LineInfo> m_lineInfo;
265 Vector<GetByIdExceptionInfo> m_getByIdExceptionInfo;
266
267#if ENABLE(JIT)
268 Vector<CallReturnOffsetToBytecodeIndex> m_callReturnIndexVector;
269#endif
270 };
271
272 class CodeBlock : public FastAllocBase {
273 friend class JIT;
274 protected:
275 CodeBlock(ScriptExecutable* ownerExecutable, CodeType, PassRefPtr<SourceProvider>, unsigned sourceOffset, SymbolTable* symbolTable);
276 public:
277 virtual ~CodeBlock();
278
279 void markAggregate(MarkStack&);
280 void refStructures(Instruction* vPC) const;
281 void derefStructures(Instruction* vPC) const;
282#if ENABLE(JIT_OPTIMIZE_CALL)
283 void unlinkCallers();
284#endif
285
286 static void dumpStatistics();
287
288#if !defined(NDEBUG) || ENABLE_OPCODE_SAMPLING
289 void dump(ExecState*) const;
290 void printStructures(const Instruction*) const;
291 void printStructure(const char* name, const Instruction*, int operand) const;
292#endif
293
294 inline bool isKnownNotImmediate(int index)
295 {
296 if (index == m_thisRegister)
297 return true;
298
299 if (isConstantRegisterIndex(index))
300 return getConstant(index).isCell();
301
302 return false;
303 }
304
305 ALWAYS_INLINE bool isTemporaryRegisterIndex(int index)
306 {
307 return index >= m_numVars;
308 }
309
310 HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset);
311 int lineNumberForBytecodeOffset(CallFrame*, unsigned bytecodeOffset);
312 int expressionRangeForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, int& divot, int& startOffset, int& endOffset);
313 bool getByIdExceptionInfoForBytecodeOffset(CallFrame*, unsigned bytecodeOffset, OpcodeID&);
314
315#if ENABLE(JIT)
316 void addCaller(CallLinkInfo* caller)
317 {
318 caller->callee = this;
319 caller->position = m_linkedCallerList.size();
320 m_linkedCallerList.append(caller);
321 }
322
323 void removeCaller(CallLinkInfo* caller)
324 {
325 unsigned pos = caller->position;
326 unsigned lastPos = m_linkedCallerList.size() - 1;
327
328 if (pos != lastPos) {
329 m_linkedCallerList[pos] = m_linkedCallerList[lastPos];
330 m_linkedCallerList[pos]->position = pos;
331 }
332 m_linkedCallerList.shrink(lastPos);
333 }
334
335 StructureStubInfo& getStubInfo(ReturnAddressPtr returnAddress)
336 {
337 return *(binaryChop<StructureStubInfo, void*, getStructureStubInfoReturnLocation>(m_structureStubInfos.begin(), m_structureStubInfos.size(), returnAddress.value()));
338 }
339
340 CallLinkInfo& getCallLinkInfo(ReturnAddressPtr returnAddress)
341 {
342 return *(binaryChop<CallLinkInfo, void*, getCallLinkInfoReturnLocation>(m_callLinkInfos.begin(), m_callLinkInfos.size(), returnAddress.value()));
343 }
344
345 MethodCallLinkInfo& getMethodCallLinkInfo(ReturnAddressPtr returnAddress)
346 {
347 return *(binaryChop<MethodCallLinkInfo, void*, getMethodCallLinkInfoReturnLocation>(m_methodCallLinkInfos.begin(), m_methodCallLinkInfos.size(), returnAddress.value()));
348 }
349
350 unsigned getBytecodeIndex(CallFrame* callFrame, ReturnAddressPtr returnAddress)
351 {
352 reparseForExceptionInfoIfNecessary(callFrame);
353 return binaryChop<CallReturnOffsetToBytecodeIndex, unsigned, getCallReturnOffset>(callReturnIndexVector().begin(), callReturnIndexVector().size(), ownerExecutable()->generatedJITCode().offsetOf(returnAddress.value()))->bytecodeIndex;
354 }
355
356 bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset, int& functionRegisterIndex);
357#endif
358
359 void setIsNumericCompareFunction(bool isNumericCompareFunction) { m_isNumericCompareFunction = isNumericCompareFunction; }
360 bool isNumericCompareFunction() { return m_isNumericCompareFunction; }
361
362 Vector<Instruction>& instructions() { return m_instructions; }
363 void discardBytecode() { m_instructions.clear(); }
364
365#ifndef NDEBUG
366 unsigned instructionCount() { return m_instructionCount; }
367 void setInstructionCount(unsigned instructionCount) { m_instructionCount = instructionCount; }
368#endif
369
370#if ENABLE(JIT)
371 JITCode& getJITCode() { return ownerExecutable()->generatedJITCode(); }
372 ExecutablePool* executablePool() { return ownerExecutable()->getExecutablePool(); }
373#endif
374
375 ScriptExecutable* ownerExecutable() const { return m_ownerExecutable; }
376
377 void setGlobalData(JSGlobalData* globalData) { m_globalData = globalData; }
378
379 void setThisRegister(int thisRegister) { m_thisRegister = thisRegister; }
380 int thisRegister() const { return m_thisRegister; }
381
382 void setNeedsFullScopeChain(bool needsFullScopeChain) { m_needsFullScopeChain = needsFullScopeChain; }
383 bool needsFullScopeChain() const { return m_needsFullScopeChain; }
384 void setUsesEval(bool usesEval) { m_usesEval = usesEval; }
385 bool usesEval() const { return m_usesEval; }
386 void setUsesArguments(bool usesArguments) { m_usesArguments = usesArguments; }
387 bool usesArguments() const { return m_usesArguments; }
388
389 CodeType codeType() const { return m_codeType; }
390
391 SourceProvider* source() const { return m_source.get(); }
392 unsigned sourceOffset() const { return m_sourceOffset; }
393
394 size_t numberOfJumpTargets() const { return m_jumpTargets.size(); }
395 void addJumpTarget(unsigned jumpTarget) { m_jumpTargets.append(jumpTarget); }
396 unsigned jumpTarget(int index) const { return m_jumpTargets[index]; }
397 unsigned lastJumpTarget() const { return m_jumpTargets.last(); }
398
399#if !ENABLE(JIT)
400 void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); }
401 void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); }
402 bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset);
403#else
404 size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); }
405 void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); }
406 StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; }
407
408 void addGlobalResolveInfo(unsigned globalResolveInstruction) { m_globalResolveInfos.append(GlobalResolveInfo(globalResolveInstruction)); }
409 GlobalResolveInfo& globalResolveInfo(int index) { return m_globalResolveInfos[index]; }
410 bool hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset);
411
412 size_t numberOfCallLinkInfos() const { return m_callLinkInfos.size(); }
413 void addCallLinkInfo() { m_callLinkInfos.append(CallLinkInfo()); }
414 CallLinkInfo& callLinkInfo(int index) { return m_callLinkInfos[index]; }
415
416 void addMethodCallLinkInfos(unsigned n) { m_methodCallLinkInfos.grow(n); }
417 MethodCallLinkInfo& methodCallLinkInfo(int index) { return m_methodCallLinkInfos[index]; }
418
419 void addFunctionRegisterInfo(unsigned bytecodeOffset, int functionIndex) { createRareDataIfNecessary(); m_rareData->m_functionRegisterInfos.append(FunctionRegisterInfo(bytecodeOffset, functionIndex)); }
420#endif
421
422 // Exception handling support
423
424 size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; }
425 void addExceptionHandler(const HandlerInfo& hanler) { createRareDataIfNecessary(); return m_rareData->m_exceptionHandlers.append(hanler); }
426 HandlerInfo& exceptionHandler(int index) { ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; }
427
428 bool hasExceptionInfo() const { return m_exceptionInfo; }
429 void clearExceptionInfo() { m_exceptionInfo.clear(); }
430 ExceptionInfo* extractExceptionInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo.release(); }
431
432 void addExpressionInfo(const ExpressionRangeInfo& expressionInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_expressionInfo.append(expressionInfo); }
433 void addGetByIdExceptionInfo(const GetByIdExceptionInfo& info) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_getByIdExceptionInfo.append(info); }
434
435 size_t numberOfLineInfos() const { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.size(); }
436 void addLineInfo(const LineInfo& lineInfo) { ASSERT(m_exceptionInfo); m_exceptionInfo->m_lineInfo.append(lineInfo); }
437 LineInfo& lastLineInfo() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_lineInfo.last(); }
438
439#if ENABLE(JIT)
440 Vector<CallReturnOffsetToBytecodeIndex>& callReturnIndexVector() { ASSERT(m_exceptionInfo); return m_exceptionInfo->m_callReturnIndexVector; }
441#endif
442
443 // Constant Pool
444
445 size_t numberOfIdentifiers() const { return m_identifiers.size(); }
446 void addIdentifier(const Identifier& i) { return m_identifiers.append(i); }
447 Identifier& identifier(int index) { return m_identifiers[index]; }
448
449 size_t numberOfConstantRegisters() const { return m_constantRegisters.size(); }
450 void addConstantRegister(const Register& r) { return m_constantRegisters.append(r); }
451 Register& constantRegister(int index) { return m_constantRegisters[index - FirstConstantRegisterIndex]; }
452 ALWAYS_INLINE bool isConstantRegisterIndex(int index) const { return index >= FirstConstantRegisterIndex; }
453 ALWAYS_INLINE JSValue getConstant(int index) const { return m_constantRegisters[index - FirstConstantRegisterIndex].jsValue(); }
454
455 unsigned addFunctionDecl(NonNullPassRefPtr<FunctionExecutable> n) { unsigned size = m_functionDecls.size(); m_functionDecls.append(n); return size; }
456 FunctionExecutable* functionDecl(int index) { return m_functionDecls[index].get(); }
457 int numberOfFunctionDecls() { return m_functionDecls.size(); }
458 unsigned addFunctionExpr(NonNullPassRefPtr<FunctionExecutable> n) { unsigned size = m_functionExprs.size(); m_functionExprs.append(n); return size; }
459 FunctionExecutable* functionExpr(int index) { return m_functionExprs[index].get(); }
460
461 // Jump Tables
462
463 size_t numberOfImmediateSwitchJumpTables() const { return m_rareData ? m_rareData->m_immediateSwitchJumpTables.size() : 0; }
464 SimpleJumpTable& addImmediateSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_immediateSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_immediateSwitchJumpTables.last(); }
465 SimpleJumpTable& immediateSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_immediateSwitchJumpTables[tableIndex]; }
466
467 size_t numberOfCharacterSwitchJumpTables() const { return m_rareData ? m_rareData->m_characterSwitchJumpTables.size() : 0; }
468 SimpleJumpTable& addCharacterSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_characterSwitchJumpTables.append(SimpleJumpTable()); return m_rareData->m_characterSwitchJumpTables.last(); }
469 SimpleJumpTable& characterSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_characterSwitchJumpTables[tableIndex]; }
470
471 size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; }
472 StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); }
473 StringJumpTable& stringSwitchJumpTable(int tableIndex) { ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; }
474
475
476 SymbolTable* symbolTable() { return m_symbolTable; }
477 SharedSymbolTable* sharedSymbolTable() { ASSERT(m_codeType == FunctionCode); return static_cast<SharedSymbolTable*>(m_symbolTable); }
478
479 EvalCodeCache& evalCodeCache() { createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; }
480
481 void shrinkToFit();
482
483 // FIXME: Make these remaining members private.
484
485 int m_numCalleeRegisters;
486 int m_numVars;
487 int m_numParameters;
488
489 private:
490#if !defined(NDEBUG) || ENABLE(OPCODE_SAMPLING)
491 void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const;
492
493 CString registerName(ExecState*, int r) const;
494 void printUnaryOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
495 void printBinaryOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
496 void printConditionalJump(ExecState*, const Vector<Instruction>::const_iterator&, Vector<Instruction>::const_iterator&, int location, const char* op) const;
497 void printGetByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
498 void printPutByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
499#endif
500
501 void reparseForExceptionInfoIfNecessary(CallFrame*);
502
503 void createRareDataIfNecessary()
504 {
505 if (!m_rareData)
506 m_rareData.set(new RareData);
507 }
508
509 ScriptExecutable* m_ownerExecutable;
510 JSGlobalData* m_globalData;
511
512 Vector<Instruction> m_instructions;
513#ifndef NDEBUG
514 unsigned m_instructionCount;
515#endif
516
517 int m_thisRegister;
518
519 bool m_needsFullScopeChain;
520 bool m_usesEval;
521 bool m_usesArguments;
522 bool m_isNumericCompareFunction;
523
524 CodeType m_codeType;
525
526 RefPtr<SourceProvider> m_source;
527 unsigned m_sourceOffset;
528
529#if !ENABLE(JIT)
530 Vector<unsigned> m_propertyAccessInstructions;
531 Vector<unsigned> m_globalResolveInstructions;
532#else
533 Vector<StructureStubInfo> m_structureStubInfos;
534 Vector<GlobalResolveInfo> m_globalResolveInfos;
535 Vector<CallLinkInfo> m_callLinkInfos;
536 Vector<MethodCallLinkInfo> m_methodCallLinkInfos;
537 Vector<CallLinkInfo*> m_linkedCallerList;
538#endif
539
540 Vector<unsigned> m_jumpTargets;
541
542 // Constant Pool
543 Vector<Identifier> m_identifiers;
544 Vector<Register> m_constantRegisters;
545 Vector<RefPtr<FunctionExecutable> > m_functionDecls;
546 Vector<RefPtr<FunctionExecutable> > m_functionExprs;
547
548 SymbolTable* m_symbolTable;
549
550 OwnPtr<ExceptionInfo> m_exceptionInfo;
551
552 struct RareData : FastAllocBase {
553 Vector<HandlerInfo> m_exceptionHandlers;
554
555 // Jump Tables
556 Vector<SimpleJumpTable> m_immediateSwitchJumpTables;
557 Vector<SimpleJumpTable> m_characterSwitchJumpTables;
558 Vector<StringJumpTable> m_stringSwitchJumpTables;
559
560 EvalCodeCache m_evalCodeCache;
561
562#if ENABLE(JIT)
563 Vector<FunctionRegisterInfo> m_functionRegisterInfos;
564#endif
565 };
566 OwnPtr<RareData> m_rareData;
567 };
568
569 // Program code is not marked by any function, so we make the global object
570 // responsible for marking it.
571
572 class GlobalCodeBlock : public CodeBlock {
573 public:
574 GlobalCodeBlock(ScriptExecutable* ownerExecutable, CodeType codeType, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, JSGlobalObject* globalObject)
575 : CodeBlock(ownerExecutable, codeType, sourceProvider, sourceOffset, &m_unsharedSymbolTable)
576 , m_globalObject(globalObject)
577 {
578 m_globalObject->codeBlocks().add(this);
579 }
580
581 ~GlobalCodeBlock()
582 {
583 if (m_globalObject)
584 m_globalObject->codeBlocks().remove(this);
585 }
586
587 void clearGlobalObject() { m_globalObject = 0; }
588
589 private:
590 JSGlobalObject* m_globalObject; // For program and eval nodes, the global object that marks the constant pool.
591 SymbolTable m_unsharedSymbolTable;
592 };
593
594 class ProgramCodeBlock : public GlobalCodeBlock {
595 public:
596 ProgramCodeBlock(ProgramExecutable* ownerExecutable, CodeType codeType, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider)
597 : GlobalCodeBlock(ownerExecutable, codeType, sourceProvider, 0, globalObject)
598 {
599 }
600 };
601
602 class EvalCodeBlock : public GlobalCodeBlock {
603 public:
604 EvalCodeBlock(EvalExecutable* ownerExecutable, JSGlobalObject* globalObject, PassRefPtr<SourceProvider> sourceProvider, int baseScopeDepth)
605 : GlobalCodeBlock(ownerExecutable, EvalCode, sourceProvider, 0, globalObject)
606 , m_baseScopeDepth(baseScopeDepth)
607 {
608 }
609
610 int baseScopeDepth() const { return m_baseScopeDepth; }
611
612 const Identifier& variable(unsigned index) { return m_variables[index]; }
613 unsigned numVariables() { return m_variables.size(); }
614 void adoptVariables(Vector<Identifier>& variables)
615 {
616 ASSERT(m_variables.isEmpty());
617 m_variables.swap(variables);
618 }
619
620 private:
621 int m_baseScopeDepth;
622 Vector<Identifier> m_variables;
623 };
624
625 class FunctionCodeBlock : public CodeBlock {
626 public:
627 // Rather than using the usual RefCounted::create idiom for SharedSymbolTable we just use new
628 // as we need to initialise the CodeBlock before we could initialise any RefPtr to hold the shared
629 // symbol table, so we just pass as a raw pointer with a ref count of 1. We then manually deref
630 // in the destructor.
631 FunctionCodeBlock(FunctionExecutable* ownerExecutable, CodeType codeType, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset)
632 : CodeBlock(ownerExecutable, codeType, sourceProvider, sourceOffset, new SharedSymbolTable)
633 {
634 }
635 ~FunctionCodeBlock()
636 {
637 sharedSymbolTable()->deref();
638 }
639 };
640
641 inline Register& ExecState::r(int index)
642 {
643 CodeBlock* codeBlock = this->codeBlock();
644 if (codeBlock->isConstantRegisterIndex(index))
645 return codeBlock->constantRegister(index);
646 return this[index];
647 }
648
649} // namespace JSC
650
651#endif // CodeBlock_h
Note: See TracBrowser for help on using the repository browser.