source: webkit/trunk/JavaScriptCore/VM/CodeBlock.h@ 37670

Last change on this file since 37670 was 37670, checked in by [email protected], 17 years ago

2008-10-17 Gavin Barraclough <[email protected]>

Optimize op_call by allowing call sites to be directly linked to callees.

For the hot path of op_call, CTI now generates a check (initially for an impossible
value), and the first time the call is executed we attempt to link the call directly
to the callee. WWe can currently only do so if the arity of the caller and callee
match. The (optimized) setup for the call on the hot path is linked directly to
the ctiCode for the callee, without indirection.


Two forms of the slow case of the call are generated, the first will be executed the
first time the call is reached. As well as this path attempting to link the call to
a callee, it also relinks the slow case to a second slow case, which will not continue
to attempt relinking the call. (This policy could be changed in future, but for not
this is intended to prevent thrashing).

If a callee that the caller has been linked to is garbage collected, then the link
in the caller's JIt code will be reset back to a value that cannot match - to prevent
any false positive matches.

~20% progression on deltablue & richards, >12% overall reduction in v8-tests
runtime, one or two percent progression on sunspider.

Reviewed by Oliver Hunt.

  • VM/CTI.cpp: (JSC::): (JSC::CTI::emitNakedCall): (JSC::unreachable): (JSC::CTI::compileOpCallInitializeCallFrame): (JSC::CTI::compileOpCallSetupArgs): (JSC::CTI::compileOpCall): (JSC::CTI::privateCompileMainPass): (JSC::CTI::privateCompileSlowCases): (JSC::CTI::privateCompile): (JSC::CTI::unlinkCall): (JSC::CTI::linkCall):
  • VM/CTI.h:
  • VM/CodeBlock.cpp: (JSC::CodeBlock::~CodeBlock): (JSC::CodeBlock::unlinkCallers): (JSC::CodeBlock::derefStructureIDs):
  • VM/CodeBlock.h: (JSC::StructureStubInfo::StructureStubInfo): (JSC::CallLinkInfo::CallLinkInfo): (JSC::CodeBlock::addCaller): (JSC::CodeBlock::removeCaller): (JSC::CodeBlock::getStubInfo):
  • VM/CodeGenerator.cpp: (JSC::CodeGenerator::emitCall): (JSC::CodeGenerator::emitConstruct):
  • VM/Machine.cpp: (JSC::Machine::cti_op_call_profiler): (JSC::Machine::cti_op_call_JSFunction): (JSC::Machine::cti_vm_lazyLinkCall): (JSC::Machine::cti_op_construct_JSConstructFast): (JSC::Machine::cti_op_construct_JSConstruct): (JSC::Machine::cti_op_construct_NotJSConstruct):
  • VM/Machine.h:
  • kjs/JSFunction.cpp: (JSC::JSFunction::~JSFunction):
  • kjs/JSFunction.h:
  • kjs/nodes.h: (JSC::FunctionBodyNode::):
  • masm/X86Assembler.h: (JSC::X86Assembler::getDifferenceBetweenLabels):
File size: 11.9 KB
Line 
1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <[email protected]>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#ifndef CodeBlock_h
31#define CodeBlock_h
32
33#include "Instruction.h"
34#include "JSGlobalObject.h"
35#include "nodes.h"
36#include "Parser.h"
37#include "SourceRange.h"
38#include "ustring.h"
39#include <wtf/RefPtr.h>
40#include <wtf/Vector.h>
41
42namespace JSC {
43
44 class ExecState;
45
46 enum CodeType { GlobalCode, EvalCode, FunctionCode };
47
48 static ALWAYS_INLINE int missingThisObjectMarker() { return std::numeric_limits<int>::max(); }
49
50 struct HandlerInfo {
51 uint32_t start;
52 uint32_t end;
53 uint32_t target;
54 uint32_t scopeDepth;
55 void* nativeCode;
56 };
57
58 struct ExpressionRangeInfo {
59 enum {
60 MaxOffset = (1 << 7) - 1,
61 MaxDivot = (1 << 25) - 1
62 };
63 uint32_t instructionOffset : 25;
64 uint32_t divotPoint : 25;
65 uint32_t startOffset : 7;
66 uint32_t endOffset : 7;
67 };
68
69 struct LineInfo {
70 uint32_t instructionOffset;
71 int32_t lineNumber;
72 };
73
74 struct OffsetLocation {
75 int32_t branchOffset;
76#if ENABLE(CTI)
77 void* ctiOffset;
78#endif
79 };
80
81 struct CallLinkInfo;
82
83 struct StructureStubInfo {
84 StructureStubInfo(unsigned opcodeIndex)
85 : opcodeIndex(opcodeIndex)
86 , stubRoutine(0)
87 , callReturnLocation(0)
88 , hotPathBegin(0)
89 , hotPathOther(0)
90 , coldPathOther(0)
91 , linkInfoPtr(0)
92 {
93 }
94
95 unsigned opcodeIndex;
96 void* stubRoutine;
97 void* callReturnLocation;
98 void* hotPathBegin;
99 void* hotPathOther;
100 void* coldPathOther;
101 CallLinkInfo* linkInfoPtr;
102 };
103
104 struct CallLinkInfo {
105 CodeBlock* callee;
106 StructureStubInfo* callerStructureStubInfo;
107 unsigned position;
108
109 CallLinkInfo(CodeBlock* c, StructureStubInfo* css)
110 {
111 callee = c;
112 callerStructureStubInfo = css;
113 }
114 };
115
116 struct StringJumpTable {
117 typedef HashMap<RefPtr<UString::Rep>, OffsetLocation> StringOffsetTable;
118 StringOffsetTable offsetTable;
119#if ENABLE(CTI)
120 void* ctiDefault; // FIXME: it should not be necessary to store this.
121#endif
122
123 inline int32_t offsetForValue(UString::Rep* value, int32_t defaultOffset)
124 {
125 StringOffsetTable::const_iterator end = offsetTable.end();
126 StringOffsetTable::const_iterator loc = offsetTable.find(value);
127 if (loc == end)
128 return defaultOffset;
129 return loc->second.branchOffset;
130 }
131
132#if ENABLE(CTI)
133 inline void* ctiForValue(UString::Rep* value)
134 {
135 StringOffsetTable::const_iterator end = offsetTable.end();
136 StringOffsetTable::const_iterator loc = offsetTable.find(value);
137 if (loc == end)
138 return ctiDefault;
139 return loc->second.ctiOffset;
140 }
141#endif
142 };
143
144 struct SimpleJumpTable {
145 // FIXME: The two Vectors can be combind into one Vector<OffsetLocation>
146 Vector<int32_t> branchOffsets;
147 int32_t min;
148#if ENABLE(CTI)
149 Vector<void*> ctiOffsets;
150 void* ctiDefault;
151#endif
152
153 int32_t offsetForValue(int32_t value, int32_t defaultOffset);
154 void add(int32_t key, int32_t offset)
155 {
156 if (!branchOffsets[key])
157 branchOffsets[key] = offset;
158 }
159
160#if ENABLE(CTI)
161 inline void* ctiForValue(int32_t value)
162 {
163 if (value >= min && static_cast<uint32_t>(value - min) < ctiOffsets.size())
164 return ctiOffsets[value - min];
165 return ctiDefault;
166 }
167#endif
168 };
169
170 class EvalCodeCache {
171 public:
172 PassRefPtr<EvalNode> get(ExecState* exec, const UString& evalSource, ScopeChainNode* scopeChain, JSValue*& exceptionValue)
173 {
174 RefPtr<EvalNode> evalNode;
175
176 if (evalSource.size() < maxCacheableSourceLength && (*scopeChain->begin())->isVariableObject())
177 evalNode = cacheMap.get(evalSource.rep());
178
179 if (!evalNode) {
180 int errLine;
181 UString errMsg;
182
183 SourceCode source = makeSource(evalSource);
184 evalNode = exec->globalData().parser->parse<EvalNode>(exec, exec->dynamicGlobalObject()->debugger(), source, &errLine, &errMsg);
185 if (evalNode) {
186 if (evalSource.size() < maxCacheableSourceLength && (*scopeChain->begin())->isVariableObject() && cacheMap.size() < maxCacheEntries)
187 cacheMap.set(evalSource.rep(), evalNode);
188 } else {
189 exceptionValue = Error::create(exec, SyntaxError, errMsg, errLine, source.provider()->asID(), NULL);
190 return 0;
191 }
192 }
193
194 return evalNode.release();
195 }
196
197 private:
198 static const int maxCacheableSourceLength = 256;
199 static const int maxCacheEntries = 64;
200
201 HashMap<RefPtr<UString::Rep>, RefPtr<EvalNode> > cacheMap;
202 };
203
204 struct CodeBlock {
205 CodeBlock(ScopeNode* ownerNode_, CodeType codeType_, PassRefPtr<SourceProvider> source_, unsigned sourceOffset_)
206 : ownerNode(ownerNode_)
207 , globalData(0)
208#if ENABLE(CTI)
209 , ctiCode(0)
210#endif
211 , numCalleeRegisters(0)
212 , numConstants(0)
213 , numVars(0)
214 , numParameters(0)
215 , needsFullScopeChain(ownerNode_->needsActivation())
216 , usesEval(ownerNode_->usesEval())
217 , codeType(codeType_)
218 , source(source_)
219 , sourceOffset(sourceOffset_)
220 {
221 ASSERT(source);
222 }
223
224 ~CodeBlock();
225
226#if ENABLE(CTI)
227 void unlinkCallers();
228#endif
229
230 void addCaller(StructureStubInfo* caller)
231 {
232 CallLinkInfo* callLinkInfo = new CallLinkInfo(this, caller);
233 caller->linkInfoPtr = callLinkInfo;
234 callLinkInfo->position = linkedCallerList.size();
235 linkedCallerList.append(callLinkInfo);
236 }
237
238 void removeCaller(CallLinkInfo* caller)
239 {
240 unsigned pos = caller->position;
241 unsigned lastPos = linkedCallerList.size() - 1;
242
243 if (pos != lastPos) {
244 linkedCallerList[pos] = linkedCallerList[lastPos];
245 linkedCallerList[pos]->position = pos;
246 }
247 linkedCallerList.shrink(lastPos);
248 }
249
250#if !defined(NDEBUG) || ENABLE_SAMPLING_TOOL
251 void dump(ExecState*) const;
252 void printStructureIDs(const Instruction*) const;
253 void printStructureID(const char* name, const Instruction*, int operand) const;
254#endif
255 int expressionRangeForVPC(const Instruction*, int& divot, int& startOffset, int& endOffset);
256 int lineNumberForVPC(const Instruction* vPC);
257 bool getHandlerForVPC(const Instruction* vPC, Instruction*& target, int& scopeDepth);
258 void* nativeExceptionCodeForHandlerVPC(const Instruction* handlerVPC);
259
260 void mark();
261 void refStructureIDs(Instruction* vPC) const;
262 void derefStructureIDs(Instruction* vPC) const;
263
264 StructureStubInfo& getStubInfo(void* returnAddress)
265 {
266 // FIXME: would a binary chop be faster here?
267 for (unsigned i = 0; ; ++i) {
268 ASSERT(i < structureIDInstructions.size());
269 if (structureIDInstructions[i].callReturnLocation == returnAddress)
270 return structureIDInstructions[i];
271 }
272 }
273
274 ScopeNode* ownerNode;
275 JSGlobalData* globalData;
276#if ENABLE(CTI)
277 void* ctiCode;
278#endif
279
280 int numCalleeRegisters;
281
282 // NOTE: numConstants holds the number of constant registers allocated
283 // by the code generator, not the number of constant registers used.
284 // (Duplicate constants are uniqued during code generation, and spare
285 // constant registers may be allocated.)
286 int numConstants;
287 int numVars;
288 int numParameters;
289 int thisRegister;
290 bool needsFullScopeChain;
291 bool usesEval;
292 bool usesArguments;
293 CodeType codeType;
294 RefPtr<SourceProvider> source;
295 unsigned sourceOffset;
296
297 Vector<Instruction> instructions;
298 Vector<StructureStubInfo> structureIDInstructions;
299 Vector<CallLinkInfo*> linkedCallerList;
300
301 // Constant pool
302 Vector<Identifier> identifiers;
303 Vector<RefPtr<FuncDeclNode> > functions;
304 Vector<RefPtr<FuncExprNode> > functionExpressions;
305 Vector<Register> constantRegisters;
306 Vector<JSValue*> unexpectedConstants;
307 Vector<RefPtr<RegExp> > regexps;
308 Vector<HandlerInfo> exceptionHandlers;
309 Vector<ExpressionRangeInfo> expressionInfo;
310 Vector<LineInfo> lineInfo;
311
312 Vector<SimpleJumpTable> immediateSwitchJumpTables;
313 Vector<SimpleJumpTable> characterSwitchJumpTables;
314 Vector<StringJumpTable> stringSwitchJumpTables;
315
316 HashSet<unsigned, DefaultHash<unsigned>::Hash, WTF::UnsignedWithZeroKeyHashTraits<unsigned> > labels;
317
318#if ENABLE(CTI)
319 HashMap<void*, unsigned> ctiReturnAddressVPCMap;
320#endif
321
322 EvalCodeCache evalCodeCache;
323
324 private:
325#if !defined(NDEBUG) || ENABLE(SAMPLING_TOOL)
326 void dump(ExecState*, const Vector<Instruction>::const_iterator& begin, Vector<Instruction>::const_iterator&) const;
327#endif
328
329 };
330
331 // Program code is not marked by any function, so we make the global object
332 // responsible for marking it.
333
334 struct ProgramCodeBlock : public CodeBlock {
335 ProgramCodeBlock(ScopeNode* ownerNode_, CodeType codeType_, JSGlobalObject* globalObject_, PassRefPtr<SourceProvider> source_)
336 : CodeBlock(ownerNode_, codeType_, source_, 0)
337 , globalObject(globalObject_)
338 {
339 globalObject->codeBlocks().add(this);
340 }
341
342 ~ProgramCodeBlock()
343 {
344 if (globalObject)
345 globalObject->codeBlocks().remove(this);
346 }
347
348 JSGlobalObject* globalObject; // For program and eval nodes, the global object that marks the constant pool.
349 };
350
351 struct EvalCodeBlock : public ProgramCodeBlock {
352 EvalCodeBlock(ScopeNode* ownerNode_, JSGlobalObject* globalObject_, PassRefPtr<SourceProvider> source_)
353 : ProgramCodeBlock(ownerNode_, EvalCode, globalObject_, source_)
354 {
355 }
356 };
357
358} // namespace JSC
359
360#endif // CodeBlock_h
Note: See TracBrowser for help on using the repository browser.