source: webkit/trunk/JavaScriptCore/jit/JIT.cpp@ 39577

Last change on this file since 39577 was 39577, checked in by [email protected], 16 years ago

2009-01-03 Sam Weinig <[email protected]>

Reviewed by Oliver Hunt.

Change the pcVector from storing native code pointers to storing offsets
from the base pointer. This will allow us to generate the pcVector on demand
for exceptions.

  • bytecode/CodeBlock.h: (JSC::PC::PC): (JSC::getNativePCOffset): (JSC::CodeBlock::getBytecodeIndex):
  • jit/JIT.cpp: (JSC::JIT::privateCompile):
File size: 80.0 KB
Line 
1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "JIT.h"
28
29#if ENABLE(JIT)
30
31#include "CodeBlock.h"
32#include "JITInlineMethods.h"
33#include "JSArray.h"
34#include "JSFunction.h"
35#include "Interpreter.h"
36#include "ResultType.h"
37#include "SamplingTool.h"
38
39#ifndef NDEBUG
40#include <stdio.h>
41#endif
42
43using namespace std;
44
45namespace JSC {
46
47COMPILE_ASSERT(STUB_ARGS_code == 0xC, STUB_ARGS_code_is_C);
48COMPILE_ASSERT(STUB_ARGS_callFrame == 0xE, STUB_ARGS_callFrame_is_E);
49
50#if COMPILER(GCC) && PLATFORM(X86)
51
52#if PLATFORM(DARWIN)
53#define SYMBOL_STRING(name) "_" #name
54#else
55#define SYMBOL_STRING(name) #name
56#endif
57
58asm(
59".globl " SYMBOL_STRING(ctiTrampoline) "\n"
60SYMBOL_STRING(ctiTrampoline) ":" "\n"
61 "pushl %ebp" "\n"
62 "movl %esp, %ebp" "\n"
63 "pushl %esi" "\n"
64 "pushl %edi" "\n"
65 "pushl %ebx" "\n"
66 "subl $0x1c, %esp" "\n"
67 "movl $512, %esi" "\n"
68 "movl 0x38(%esp), %edi" "\n" // Ox38 = 0x0E * 4, 0x0E = STUB_ARGS_callFrame (see assertion above)
69 "call *0x30(%esp)" "\n" // Ox30 = 0x0C * 4, 0x0C = STUB_ARGS_code (see assertion above)
70 "addl $0x1c, %esp" "\n"
71 "popl %ebx" "\n"
72 "popl %edi" "\n"
73 "popl %esi" "\n"
74 "popl %ebp" "\n"
75 "ret" "\n"
76);
77
78asm(
79".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
80SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
81#if USE(JIT_STUB_ARGUMENT_VA_LIST)
82 "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPvz) "\n"
83#else
84#if USE(JIT_STUB_ARGUMENT_REGISTER)
85 "movl %esp, %ecx" "\n"
86#else // JIT_STUB_ARGUMENT_STACK
87 "movl %esp, 0(%esp)" "\n"
88#endif
89 "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPPv) "\n"
90#endif
91 "addl $0x1c, %esp" "\n"
92 "popl %ebx" "\n"
93 "popl %edi" "\n"
94 "popl %esi" "\n"
95 "popl %ebp" "\n"
96 "ret" "\n"
97);
98
99#elif COMPILER(GCC) && PLATFORM(X86_64)
100
101#if PLATFORM(DARWIN)
102#define SYMBOL_STRING(name) "_" #name
103#else
104#define SYMBOL_STRING(name) #name
105#endif
106
107asm(
108".globl " SYMBOL_STRING(ctiTrampoline) "\n"
109SYMBOL_STRING(ctiTrampoline) ":" "\n"
110 "pushq %rbp" "\n"
111 "movq %rsp, %rbp" "\n"
112 "pushq %r12" "\n"
113 "pushq %r13" "\n"
114 "pushq %rbx" "\n"
115 "subq $0x38, %rsp" "\n"
116 "movq $512, %r12" "\n"
117 "movq 0x70(%rsp), %r13" "\n" // Ox70 = 0x0E * 8, 0x0E = STUB_ARGS_callFrame (see assertion above)
118 "call *0x60(%rsp)" "\n" // Ox60 = 0x0C * 8, 0x0C = STUB_ARGS_code (see assertion above)
119 "addq $0x38, %rsp" "\n"
120 "popq %rbx" "\n"
121 "popq %r13" "\n"
122 "popq %r12" "\n"
123 "popq %rbp" "\n"
124 "ret" "\n"
125);
126
127asm(
128".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
129SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
130#if USE(JIT_STUB_ARGUMENT_REGISTER)
131 "movq %rsp, %rdi" "\n"
132 "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPPv) "\n"
133#else // JIT_STUB_ARGUMENT_VA_LIST or JIT_STUB_ARGUMENT_STACK
134#error "JIT_STUB_ARGUMENT configuration not supported."
135#endif
136 "addq $0x38, %rsp" "\n"
137 "popq %rbx" "\n"
138 "popq %r13" "\n"
139 "popq %r12" "\n"
140 "popq %rbp" "\n"
141 "ret" "\n"
142);
143
144#elif COMPILER(MSVC)
145
146extern "C" {
147
148 __declspec(naked) JSValue* ctiTrampoline(void* code, RegisterFile*, CallFrame*, JSValue** exception, Profiler**, JSGlobalData*)
149 {
150 __asm {
151 push ebp;
152 mov ebp, esp;
153 push esi;
154 push edi;
155 push ebx;
156 sub esp, 0x1c;
157 mov esi, 512;
158 mov ecx, esp;
159 mov edi, [esp + 0x38];
160 call [esp + 0x30]; // Ox30 = 0x0C * 4, 0x0C = STUB_ARGS_code (see assertion above)
161 add esp, 0x1c;
162 pop ebx;
163 pop edi;
164 pop esi;
165 pop ebp;
166 ret;
167 }
168 }
169
170 __declspec(naked) void ctiVMThrowTrampoline()
171 {
172 __asm {
173#if USE(JIT_STUB_ARGUMENT_REGISTER)
174 mov ecx, esp;
175#else // JIT_STUB_ARGUMENT_VA_LIST or JIT_STUB_ARGUMENT_STACK
176#error "JIT_STUB_ARGUMENT configuration not supported."
177#endif
178 call JSC::Interpreter::cti_vm_throw;
179 add esp, 0x1c;
180 pop ebx;
181 pop edi;
182 pop esi;
183 pop ebp;
184 ret;
185 }
186 }
187
188}
189
190#endif
191
192void ctiSetReturnAddress(void** where, void* what)
193{
194 *where = what;
195}
196
197void ctiPatchCallByReturnAddress(void* where, void* what)
198{
199 MacroAssembler::Jump::patch(where, what);
200}
201
202JIT::JIT(JSGlobalData* globalData, CodeBlock* codeBlock)
203 : m_interpreter(globalData->interpreter)
204 , m_globalData(globalData)
205 , m_codeBlock(codeBlock)
206 , m_labels(codeBlock ? codeBlock->instructions().size() : 0)
207 , m_propertyAccessCompilationInfo(codeBlock ? codeBlock->numberOfStructureStubInfos() : 0)
208 , m_callStructureStubCompilationInfo(codeBlock ? codeBlock->numberOfCallLinkInfos() : 0)
209 , m_lastResultBytecodeRegister(std::numeric_limits<int>::max())
210 , m_jumpTargetsPosition(0)
211{
212}
213
214void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
215{
216 bool negated = (type == OpNStrictEq);
217
218 unsigned dst = currentInstruction[1].u.operand;
219 unsigned src1 = currentInstruction[2].u.operand;
220 unsigned src2 = currentInstruction[3].u.operand;
221
222 emitGetVirtualRegisters(src1, X86::eax, src2, X86::edx);
223
224 // Check that bot are immediates, if so check if they're equal
225 Jump firstNotImmediate = emitJumpIfJSCell(X86::eax);
226 Jump secondNotImmediate = emitJumpIfJSCell(X86::edx);
227 Jump bothWereImmediatesButNotEqual = jne32(X86::edx, X86::eax);
228
229 // They are equal - set the result to true. (Or false, if negated).
230 move(ImmPtr(jsBoolean(!negated)), X86::eax);
231 Jump bothWereImmediatesAndEqual = jump();
232
233 // eax was not an immediate, we haven't yet checked edx.
234 // If edx is also a JSCell, or is 0, then jump to a slow case,
235 // otherwise these values are not equal.
236 firstNotImmediate.link(this);
237 emitJumpSlowCaseIfJSCell(X86::edx);
238 addSlowCase(jePtr(X86::edx, ImmPtr(JSImmediate::zeroImmediate())));
239 Jump firstWasNotImmediate = jump();
240
241 // eax was an immediate, but edx wasn't.
242 // If eax is 0 jump to a slow case, otherwise these values are not equal.
243 secondNotImmediate.link(this);
244 addSlowCase(jePtr(X86::eax, ImmPtr(JSImmediate::zeroImmediate())));
245
246 // We get here if the two values are different immediates, or one is 0 and the other is a JSCell.
247 // Vaelues are not equal, set the result to false.
248 bothWereImmediatesButNotEqual.link(this);
249 firstWasNotImmediate.link(this);
250 move(ImmPtr(jsBoolean(negated)), X86::eax);
251
252 bothWereImmediatesAndEqual.link(this);
253 emitPutVirtualRegister(dst);
254}
255
256void JIT::emitSlowScriptCheck()
257{
258 Jump skipTimeout = jnzSub32(Imm32(1), timeoutCheckRegister);
259 emitCTICall(Interpreter::cti_timeout_check);
260 move(X86::eax, timeoutCheckRegister);
261 skipTimeout.link(this);
262
263 killLastResultRegister();
264}
265
266
267#define NEXT_OPCODE(name) \
268 m_bytecodeIndex += OPCODE_LENGTH(name); \
269 break;
270
271#define CTI_COMPILE_BINARY_OP(name) \
272 case name: { \
273 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx); \
274 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx); \
275 emitCTICall(Interpreter::cti_##name); \
276 emitPutVirtualRegister(currentInstruction[1].u.operand); \
277 NEXT_OPCODE(name); \
278 }
279
280#define CTI_COMPILE_UNARY_OP(name) \
281 case name: { \
282 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx); \
283 emitCTICall(Interpreter::cti_##name); \
284 emitPutVirtualRegister(currentInstruction[1].u.operand); \
285 NEXT_OPCODE(name); \
286 }
287
288void JIT::privateCompileMainPass()
289{
290 Instruction* instructionsBegin = m_codeBlock->instructions().begin();
291 unsigned instructionCount = m_codeBlock->instructions().size();
292 unsigned propertyAccessInstructionIndex = 0;
293 unsigned globalResolveInfoIndex = 0;
294 unsigned callLinkInfoIndex = 0;
295
296 for (m_bytecodeIndex = 0; m_bytecodeIndex < instructionCount; ) {
297 Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
298 ASSERT_WITH_MESSAGE(m_interpreter->isOpcode(currentInstruction->u.opcode), "privateCompileMainPass gone bad @ %d", m_bytecodeIndex);
299
300#if ENABLE(OPCODE_SAMPLING)
301 if (m_bytecodeIndex > 0) // Avoid the overhead of sampling op_enter twice.
302 store32(m_interpreter->sampler()->encodeSample(currentInstruction), m_interpreter->sampler()->sampleSlot());
303#endif
304
305 m_labels[m_bytecodeIndex] = label();
306 OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode);
307
308 switch (opcodeID) {
309 case op_mov: {
310 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
311 emitPutVirtualRegister(currentInstruction[1].u.operand);
312 NEXT_OPCODE(op_mov);
313 }
314 case op_add: {
315 compileFastArith_op_add(currentInstruction);
316 NEXT_OPCODE(op_add);
317 }
318 case op_end: {
319 if (m_codeBlock->needsFullScopeChain())
320 emitCTICall(Interpreter::cti_op_end);
321 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
322 push(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
323 ret();
324 NEXT_OPCODE(op_end);
325 }
326 case op_jmp: {
327 unsigned target = currentInstruction[1].u.operand;
328 addJump(jump(), target + 1);
329 NEXT_OPCODE(op_jmp);
330 }
331 case op_pre_inc: {
332 compileFastArith_op_pre_inc(currentInstruction[1].u.operand);
333 NEXT_OPCODE(op_pre_inc);
334 }
335 case op_loop: {
336 emitSlowScriptCheck();
337
338 unsigned target = currentInstruction[1].u.operand;
339 addJump(jump(), target + 1);
340 NEXT_OPCODE(op_end);
341 }
342 case op_loop_if_less: {
343 emitSlowScriptCheck();
344
345 unsigned op1 = currentInstruction[1].u.operand;
346 unsigned op2 = currentInstruction[2].u.operand;
347 unsigned target = currentInstruction[3].u.operand;
348 if (isOperandConstantImmediateInt(op2)) {
349 emitGetVirtualRegister(op1, X86::eax);
350 emitJumpSlowCaseIfNotImmNum(X86::eax);
351 addJump(jlPtr(X86::eax, ImmPtr(getConstantOperand(op2))), target + 3);
352 } else {
353 emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
354 emitJumpSlowCaseIfNotImmNum(X86::eax);
355 emitJumpSlowCaseIfNotImmNum(X86::edx);
356 addJump(jlPtr(X86::eax, X86::edx), target + 3);
357 }
358 NEXT_OPCODE(op_loop_if_less);
359 }
360 case op_loop_if_lesseq: {
361 emitSlowScriptCheck();
362
363 unsigned op1 = currentInstruction[1].u.operand;
364 unsigned op2 = currentInstruction[2].u.operand;
365 unsigned target = currentInstruction[3].u.operand;
366 if (isOperandConstantImmediateInt(op2)) {
367 emitGetVirtualRegister(op1, X86::eax);
368 emitJumpSlowCaseIfNotImmNum(X86::eax);
369 addJump(jlePtr(X86::eax, ImmPtr(getConstantOperand(op2))), target + 3);
370 } else {
371 emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
372 emitJumpSlowCaseIfNotImmNum(X86::eax);
373 emitJumpSlowCaseIfNotImmNum(X86::edx);
374 addJump(jlePtr(X86::eax, X86::edx), target + 3);
375 }
376 NEXT_OPCODE(op_loop_if_less);
377 }
378 case op_new_object: {
379 emitCTICall(Interpreter::cti_op_new_object);
380 emitPutVirtualRegister(currentInstruction[1].u.operand);
381 NEXT_OPCODE(op_new_object);
382 }
383 case op_put_by_id: {
384 compilePutByIdHotPath(currentInstruction[1].u.operand, &(m_codeBlock->identifier(currentInstruction[2].u.operand)), currentInstruction[3].u.operand, propertyAccessInstructionIndex++);
385 NEXT_OPCODE(op_put_by_id);
386 }
387 case op_get_by_id: {
388 compileGetByIdHotPath(currentInstruction[1].u.operand, currentInstruction[2].u.operand, &(m_codeBlock->identifier(currentInstruction[3].u.operand)), propertyAccessInstructionIndex++);
389 NEXT_OPCODE(op_get_by_id);
390 }
391 case op_instanceof: {
392 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax); // value
393 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx); // baseVal
394 emitGetVirtualRegister(currentInstruction[4].u.operand, X86::edx); // proto
395
396 // check if any are immediates
397 move(X86::eax, X86::ebx);
398 or32(X86::ecx, X86::ebx);
399 or32(X86::edx, X86::ebx);
400 emitJumpSlowCaseIfNotJSCell(X86::ebx);
401
402 // check that all are object type - this is a bit of a bithack to avoid excess branching;
403 // we check that the sum of the three type codes from Structures is exactly 3 * ObjectType,
404 // this works because NumberType and StringType are smaller
405 move(Imm32(3 * ObjectType), X86::ebx);
406 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::eax);
407 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
408 loadPtr(Address(X86::edx, FIELD_OFFSET(JSCell, m_structure)), X86::edx);
409 sub32(Address(X86::eax, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx);
410 sub32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx);
411 addSlowCase(jne32(Address(X86::edx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx));
412
413 // check that baseVal's flags include ImplementsHasInstance but not OverridesHasInstance
414 load32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), X86::ecx);
415 and32(Imm32(ImplementsHasInstance | OverridesHasInstance), X86::ecx);
416 addSlowCase(jne32(X86::ecx, Imm32(ImplementsHasInstance)));
417
418 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::ecx); // reload value
419 emitGetVirtualRegister(currentInstruction[4].u.operand, X86::edx); // reload proto
420
421 // optimistically load true result
422 move(ImmPtr(jsBoolean(true)), X86::eax);
423
424 Label loop(this);
425
426 // load value's prototype
427 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
428 loadPtr(Address(X86::ecx, FIELD_OFFSET(Structure, m_prototype)), X86::ecx);
429
430 Jump exit = jePtr(X86::ecx, X86::edx);
431
432 jnePtr(X86::ecx, ImmPtr(jsNull()), loop);
433
434 move(ImmPtr(jsBoolean(false)), X86::eax);
435
436 exit.link(this);
437
438 emitPutVirtualRegister(currentInstruction[1].u.operand);
439
440 NEXT_OPCODE(op_instanceof);
441 }
442 case op_del_by_id: {
443 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
444 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
445 emitPutJITStubArgConstant(ident, 2);
446 emitCTICall(Interpreter::cti_op_del_by_id);
447 emitPutVirtualRegister(currentInstruction[1].u.operand);
448 NEXT_OPCODE(op_del_by_id);
449 }
450 case op_mul: {
451 compileFastArith_op_mul(currentInstruction);
452 NEXT_OPCODE(op_mul);
453 }
454 case op_new_func: {
455 FuncDeclNode* func = m_codeBlock->function(currentInstruction[2].u.operand);
456 emitPutJITStubArgConstant(func, 1);
457 emitCTICall(Interpreter::cti_op_new_func);
458 emitPutVirtualRegister(currentInstruction[1].u.operand);
459 NEXT_OPCODE(op_new_func);
460 }
461 case op_call: {
462 compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
463 NEXT_OPCODE(op_call);
464 }
465 case op_call_eval: {
466 compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
467 NEXT_OPCODE(op_call_eval);
468 }
469 case op_construct: {
470 compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
471 NEXT_OPCODE(op_construct);
472 }
473 case op_get_global_var: {
474 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
475 move(ImmPtr(globalObject), X86::eax);
476 emitGetVariableObjectRegister(X86::eax, currentInstruction[3].u.operand, X86::eax);
477 emitPutVirtualRegister(currentInstruction[1].u.operand);
478 NEXT_OPCODE(op_get_global_var);
479 }
480 case op_put_global_var: {
481 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::edx);
482 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
483 move(ImmPtr(globalObject), X86::eax);
484 emitPutVariableObjectRegister(X86::edx, X86::eax, currentInstruction[2].u.operand);
485 NEXT_OPCODE(op_put_global_var);
486 }
487 case op_get_scoped_var: {
488 int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
489
490 emitGetFromCallFrameHeader(RegisterFile::ScopeChain, X86::eax);
491 while (skip--)
492 loadPtr(Address(X86::eax, FIELD_OFFSET(ScopeChainNode, next)), X86::eax);
493
494 loadPtr(Address(X86::eax, FIELD_OFFSET(ScopeChainNode, object)), X86::eax);
495 emitGetVariableObjectRegister(X86::eax, currentInstruction[2].u.operand, X86::eax);
496 emitPutVirtualRegister(currentInstruction[1].u.operand);
497 NEXT_OPCODE(op_get_scoped_var);
498 }
499 case op_put_scoped_var: {
500 int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
501
502 emitGetFromCallFrameHeader(RegisterFile::ScopeChain, X86::edx);
503 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::eax);
504 while (skip--)
505 loadPtr(Address(X86::edx, FIELD_OFFSET(ScopeChainNode, next)), X86::edx);
506
507 loadPtr(Address(X86::edx, FIELD_OFFSET(ScopeChainNode, object)), X86::edx);
508 emitPutVariableObjectRegister(X86::eax, X86::edx, currentInstruction[1].u.operand);
509 NEXT_OPCODE(op_put_scoped_var);
510 }
511 case op_tear_off_activation: {
512 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
513 emitCTICall(Interpreter::cti_op_tear_off_activation);
514 NEXT_OPCODE(op_tear_off_activation);
515 }
516 case op_tear_off_arguments: {
517 emitCTICall(Interpreter::cti_op_tear_off_arguments);
518 NEXT_OPCODE(op_tear_off_arguments);
519 }
520 case op_ret: {
521 // We could JIT generate the deref, only calling out to C when the refcount hits zero.
522 if (m_codeBlock->needsFullScopeChain())
523 emitCTICall(Interpreter::cti_op_ret_scopeChain);
524
525 // Return the result in %eax.
526 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
527
528 // Grab the return address.
529 emitGetFromCallFrameHeader(RegisterFile::ReturnPC, X86::edx);
530
531 // Restore our caller's "r".
532 emitGetFromCallFrameHeader(RegisterFile::CallerFrame, callFrameRegister);
533
534 // Return.
535 push(X86::edx);
536 ret();
537
538 NEXT_OPCODE(op_ret);
539 }
540 case op_new_array: {
541 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 1);
542 emitPutJITStubArgConstant(currentInstruction[3].u.operand, 2);
543 emitCTICall(Interpreter::cti_op_new_array);
544 emitPutVirtualRegister(currentInstruction[1].u.operand);
545 NEXT_OPCODE(op_new_array);
546 }
547 case op_resolve: {
548 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
549 emitPutJITStubArgConstant(ident, 1);
550 emitCTICall(Interpreter::cti_op_resolve);
551 emitPutVirtualRegister(currentInstruction[1].u.operand);
552 NEXT_OPCODE(op_resolve);
553 }
554 case op_construct_verify: {
555 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
556
557 emitJumpSlowCaseIfNotJSCell(X86::eax);
558 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
559 addSlowCase(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type)), Imm32(ObjectType)));
560
561 NEXT_OPCODE(op_construct_verify);
562 }
563 case op_get_by_val: {
564 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
565 emitJumpSlowCaseIfNotImmNum(X86::edx);
566 emitFastArithImmToInt(X86::edx);
567 emitJumpSlowCaseIfNotJSCell(X86::eax);
568 addSlowCase(jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr)));
569
570 // This is an array; get the m_storage pointer into ecx, then check if the index is below the fast cutoff
571 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
572 addSlowCase(jae32(X86::edx, Address(X86::eax, FIELD_OFFSET(JSArray, m_fastAccessCutoff))));
573
574 // Get the value from the vector
575 loadPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), X86::eax);
576 emitPutVirtualRegister(currentInstruction[1].u.operand);
577 NEXT_OPCODE(op_get_by_val);
578 }
579 case op_resolve_func: {
580 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
581 emitPutJITStubArgConstant(ident, 1);
582 emitCTICall(Interpreter::cti_op_resolve_func);
583 emitPutVirtualRegister(currentInstruction[2].u.operand, X86::edx);
584 emitPutVirtualRegister(currentInstruction[1].u.operand);
585 NEXT_OPCODE(op_resolve_func);
586 }
587 case op_sub: {
588 compileBinaryArithOp(op_sub, currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, OperandTypes::fromInt(currentInstruction[4].u.operand));
589 NEXT_OPCODE(op_sub);
590 }
591 case op_put_by_val: {
592 emitGetVirtualRegisters(currentInstruction[1].u.operand, X86::eax, currentInstruction[2].u.operand, X86::edx);
593 emitJumpSlowCaseIfNotImmNum(X86::edx);
594 emitFastArithImmToInt(X86::edx);
595 emitJumpSlowCaseIfNotJSCell(X86::eax);
596 addSlowCase(jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr)));
597
598 // This is an array; get the m_storage pointer into ecx, then check if the index is below the fast cutoff
599 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
600 Jump inFastVector = jb32(X86::edx, Address(X86::eax, FIELD_OFFSET(JSArray, m_fastAccessCutoff)));
601 // No; oh well, check if the access if within the vector - if so, we may still be okay.
602 addSlowCase(jae32(X86::edx, Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_vectorLength))));
603
604 // This is a write to the slow part of the vector; first, we have to check if this would be the first write to this location.
605 // FIXME: should be able to handle initial write to array; increment the the number of items in the array, and potentially update fast access cutoff.
606 addSlowCase(jzPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0]))));
607
608 // All good - put the value into the array.
609 inFastVector.link(this);
610 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::eax);
611 storePtr(X86::eax, BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])));
612 NEXT_OPCODE(op_put_by_val);
613 }
614 CTI_COMPILE_BINARY_OP(op_lesseq)
615 case op_loop_if_true: {
616 emitSlowScriptCheck();
617
618 unsigned target = currentInstruction[2].u.operand;
619 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
620
621 Jump isZero = jePtr(X86::eax, ImmPtr(JSImmediate::zeroImmediate()));
622 addJump(jnz32(X86::eax, Imm32(JSImmediate::TagBitTypeInteger)), target + 2);
623
624 addJump(jePtr(X86::eax, ImmPtr(jsBoolean(true))), target + 2);
625 addSlowCase(jnePtr(X86::eax, ImmPtr(jsBoolean(false))));
626
627 isZero.link(this);
628 NEXT_OPCODE(op_loop_if_true);
629 };
630 case op_resolve_base: {
631 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
632 emitPutJITStubArgConstant(ident, 1);
633 emitCTICall(Interpreter::cti_op_resolve_base);
634 emitPutVirtualRegister(currentInstruction[1].u.operand);
635 NEXT_OPCODE(op_resolve_base);
636 }
637 case op_negate: {
638 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
639 emitCTICall(Interpreter::cti_op_negate);
640 emitPutVirtualRegister(currentInstruction[1].u.operand);
641 NEXT_OPCODE(op_negate);
642 }
643 case op_resolve_skip: {
644 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
645 emitPutJITStubArgConstant(ident, 1);
646 emitPutJITStubArgConstant(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain(), 2);
647 emitCTICall(Interpreter::cti_op_resolve_skip);
648 emitPutVirtualRegister(currentInstruction[1].u.operand);
649 NEXT_OPCODE(op_resolve_skip);
650 }
651 case op_resolve_global: {
652 // Fast case
653 void* globalObject = currentInstruction[2].u.jsCell;
654 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
655
656 unsigned currentIndex = globalResolveInfoIndex++;
657 void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
658 void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
659
660 // Check Structure of global object
661 move(ImmPtr(globalObject), X86::eax);
662 loadPtr(structureAddress, X86::edx);
663 Jump noMatch = jnePtr(X86::edx, Address(X86::eax, FIELD_OFFSET(JSCell, m_structure))); // Structures don't match
664
665 // Load cached property
666 loadPtr(Address(X86::eax, FIELD_OFFSET(JSGlobalObject, m_propertyStorage)), X86::eax);
667 load32(offsetAddr, X86::edx);
668 loadPtr(BaseIndex(X86::eax, X86::edx, ScalePtr), X86::eax);
669 emitPutVirtualRegister(currentInstruction[1].u.operand);
670 Jump end = jump();
671
672 // Slow case
673 noMatch.link(this);
674 emitPutJITStubArgConstant(globalObject, 1);
675 emitPutJITStubArgConstant(ident, 2);
676 emitPutJITStubArgConstant(currentIndex, 3);
677 emitCTICall(Interpreter::cti_op_resolve_global);
678 emitPutVirtualRegister(currentInstruction[1].u.operand);
679 end.link(this);
680 NEXT_OPCODE(op_resolve_global);
681 }
682 CTI_COMPILE_BINARY_OP(op_div)
683 case op_pre_dec: {
684 compileFastArith_op_pre_dec(currentInstruction[1].u.operand);
685 NEXT_OPCODE(op_pre_dec);
686 }
687 case op_jnless: {
688 unsigned op1 = currentInstruction[1].u.operand;
689 unsigned op2 = currentInstruction[2].u.operand;
690 unsigned target = currentInstruction[3].u.operand;
691 if (isOperandConstantImmediateInt(op2)) {
692 emitGetVirtualRegister(op1, X86::eax);
693 emitJumpSlowCaseIfNotImmNum(X86::eax);
694 addJump(jgePtr(X86::eax, ImmPtr(getConstantOperand(op2))), target + 3);
695 } else {
696 emitGetVirtualRegisters(op1, X86::eax, op2, X86::edx);
697 emitJumpSlowCaseIfNotImmNum(X86::eax);
698 emitJumpSlowCaseIfNotImmNum(X86::edx);
699 addJump(jgePtr(X86::eax, X86::edx), target + 3);
700 }
701 NEXT_OPCODE(op_jnless);
702 }
703 case op_not: {
704 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
705 xorPtr(Imm32(JSImmediate::FullTagTypeBool), X86::eax);
706 addSlowCase(jnz32(X86::eax, Imm32(JSImmediate::FullTagTypeMask)));
707 xorPtr(Imm32(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue), X86::eax);
708 emitPutVirtualRegister(currentInstruction[1].u.operand);
709 NEXT_OPCODE(op_not);
710 }
711 case op_jfalse: {
712 unsigned target = currentInstruction[2].u.operand;
713 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
714
715 addJump(jePtr(X86::eax, ImmPtr(JSImmediate::zeroImmediate())), target + 2);
716 Jump isNonZero = jnz32(X86::eax, Imm32(JSImmediate::TagBitTypeInteger));
717
718 addJump(jePtr(X86::eax, ImmPtr(jsBoolean(false))), target + 2);
719 addSlowCase(jnePtr(X86::eax, ImmPtr(jsBoolean(true))));
720
721 isNonZero.link(this);
722 NEXT_OPCODE(op_jfalse);
723 };
724 case op_jeq_null: {
725 unsigned src = currentInstruction[1].u.operand;
726 unsigned target = currentInstruction[2].u.operand;
727
728 emitGetVirtualRegister(src, X86::eax);
729 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
730
731 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
732 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
733 addJump(jnz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
734 Jump wasNotImmediate = jump();
735
736 // Now handle the immediate cases - undefined & null
737 isImmediate.link(this);
738 and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
739 addJump(jePtr(X86::eax, ImmPtr(jsNull())), target + 2);
740
741 wasNotImmediate.link(this);
742 NEXT_OPCODE(op_jeq_null);
743 };
744 case op_jneq_null: {
745 unsigned src = currentInstruction[1].u.operand;
746 unsigned target = currentInstruction[2].u.operand;
747
748 emitGetVirtualRegister(src, X86::eax);
749 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
750
751 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
752 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
753 addJump(jz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
754 Jump wasNotImmediate = jump();
755
756 // Now handle the immediate cases - undefined & null
757 isImmediate.link(this);
758 and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
759 addJump(jnePtr(X86::eax, ImmPtr(jsNull())), target + 2);
760
761 wasNotImmediate.link(this);
762 NEXT_OPCODE(op_jneq_null);
763 }
764 case op_post_inc: {
765 compileFastArith_op_post_inc(currentInstruction[1].u.operand, currentInstruction[2].u.operand);
766 NEXT_OPCODE(op_post_inc);
767 }
768 case op_unexpected_load: {
769 JSValue* v = m_codeBlock->unexpectedConstant(currentInstruction[2].u.operand);
770 move(ImmPtr(v), X86::eax);
771 emitPutVirtualRegister(currentInstruction[1].u.operand);
772 NEXT_OPCODE(op_unexpected_load);
773 }
774 case op_jsr: {
775 int retAddrDst = currentInstruction[1].u.operand;
776 int target = currentInstruction[2].u.operand;
777 DataLabelPtr storeLocation = storePtrWithPatch(Address(callFrameRegister, sizeof(Register) * retAddrDst));
778 addJump(jump(), target + 2);
779 m_jsrSites.append(JSRInfo(storeLocation, label()));
780 NEXT_OPCODE(op_jsr);
781 }
782 case op_sret: {
783 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
784 NEXT_OPCODE(op_sret);
785 }
786 case op_eq: {
787 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
788 emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
789 sete32(X86::edx, X86::eax);
790 emitTagAsBoolImmediate(X86::eax);
791 emitPutVirtualRegister(currentInstruction[1].u.operand);
792 NEXT_OPCODE(op_eq);
793 }
794 case op_lshift: {
795 compileFastArith_op_lshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
796 NEXT_OPCODE(op_lshift);
797 }
798 case op_bitand: {
799 compileFastArith_op_bitand(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
800 NEXT_OPCODE(op_bitand);
801 }
802 case op_rshift: {
803 compileFastArith_op_rshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
804 NEXT_OPCODE(op_rshift);
805 }
806 case op_bitnot: {
807 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
808 emitJumpSlowCaseIfNotImmNum(X86::eax);
809 xorPtr(Imm32(~JSImmediate::TagBitTypeInteger), X86::eax);
810 emitPutVirtualRegister(currentInstruction[1].u.operand);
811 NEXT_OPCODE(op_bitnot);
812 }
813 case op_resolve_with_base: {
814 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
815 emitPutJITStubArgConstant(ident, 1);
816 emitCTICall(Interpreter::cti_op_resolve_with_base);
817 emitPutVirtualRegister(currentInstruction[2].u.operand, X86::edx);
818 emitPutVirtualRegister(currentInstruction[1].u.operand);
819 NEXT_OPCODE(op_resolve_with_base);
820 }
821 case op_new_func_exp: {
822 FuncExprNode* func = m_codeBlock->functionExpression(currentInstruction[2].u.operand);
823 emitPutJITStubArgConstant(func, 1);
824 emitCTICall(Interpreter::cti_op_new_func_exp);
825 emitPutVirtualRegister(currentInstruction[1].u.operand);
826 NEXT_OPCODE(op_new_func_exp);
827 }
828 case op_mod: {
829 compileFastArith_op_mod(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand);
830 NEXT_OPCODE(op_mod);
831 }
832 case op_jtrue: {
833 unsigned target = currentInstruction[2].u.operand;
834 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
835
836 Jump isZero = jePtr(X86::eax, ImmPtr(JSImmediate::zeroImmediate()));
837 addJump(jnz32(X86::eax, Imm32(JSImmediate::TagBitTypeInteger)), target + 2);
838
839 addJump(jePtr(X86::eax, ImmPtr(jsBoolean(true))), target + 2);
840 addSlowCase(jnePtr(X86::eax, ImmPtr(jsBoolean(false))));
841
842 isZero.link(this);
843 NEXT_OPCODE(op_jtrue);
844 }
845 CTI_COMPILE_BINARY_OP(op_less)
846 case op_neq: {
847 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
848 emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
849 setne32(X86::edx, X86::eax);
850 emitTagAsBoolImmediate(X86::eax);
851
852 emitPutVirtualRegister(currentInstruction[1].u.operand);
853
854 NEXT_OPCODE(op_neq);
855 }
856 case op_post_dec: {
857 compileFastArith_op_post_dec(currentInstruction[1].u.operand, currentInstruction[2].u.operand);
858 NEXT_OPCODE(op_post_dec);
859 }
860 CTI_COMPILE_BINARY_OP(op_urshift)
861 case op_bitxor: {
862 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
863 emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
864 xorPtr(X86::edx, X86::eax);
865 emitFastArithReTagImmediate(X86::eax);
866 emitPutVirtualRegister(currentInstruction[1].u.operand);
867 NEXT_OPCODE(op_bitxor);
868 }
869 case op_new_regexp: {
870 RegExp* regExp = m_codeBlock->regexp(currentInstruction[2].u.operand);
871 emitPutJITStubArgConstant(regExp, 1);
872 emitCTICall(Interpreter::cti_op_new_regexp);
873 emitPutVirtualRegister(currentInstruction[1].u.operand);
874 NEXT_OPCODE(op_new_regexp);
875 }
876 case op_bitor: {
877 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
878 emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
879 orPtr(X86::edx, X86::eax);
880 emitPutVirtualRegister(currentInstruction[1].u.operand);
881 NEXT_OPCODE(op_bitor);
882 }
883 case op_throw: {
884 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
885 emitCTICall(Interpreter::cti_op_throw);
886#if PLATFORM(X86_64)
887 addPtr(Imm32(0x38), X86::esp);
888 pop(X86::ebx);
889 pop(X86::r13);
890 pop(X86::r12);
891 pop(X86::ebp);
892 ret();
893#else
894 addPtr(Imm32(0x1c), X86::esp);
895 pop(X86::ebx);
896 pop(X86::edi);
897 pop(X86::esi);
898 pop(X86::ebp);
899 ret();
900#endif
901 NEXT_OPCODE(op_throw);
902 }
903 case op_get_pnames: {
904 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
905 emitCTICall(Interpreter::cti_op_get_pnames);
906 emitPutVirtualRegister(currentInstruction[1].u.operand);
907 NEXT_OPCODE(op_get_pnames);
908 }
909 case op_next_pname: {
910 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
911 unsigned target = currentInstruction[3].u.operand;
912 emitCTICall(Interpreter::cti_op_next_pname);
913 Jump endOfIter = jzPtr(X86::eax);
914 emitPutVirtualRegister(currentInstruction[1].u.operand);
915 addJump(jump(), target + 3);
916 endOfIter.link(this);
917 NEXT_OPCODE(op_next_pname);
918 }
919 case op_push_scope: {
920 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
921 emitCTICall(Interpreter::cti_op_push_scope);
922 emitPutVirtualRegister(currentInstruction[1].u.operand);
923 NEXT_OPCODE(op_push_scope);
924 }
925 case op_pop_scope: {
926 emitCTICall(Interpreter::cti_op_pop_scope);
927 NEXT_OPCODE(op_pop_scope);
928 }
929 CTI_COMPILE_UNARY_OP(op_typeof)
930 CTI_COMPILE_UNARY_OP(op_is_undefined)
931 CTI_COMPILE_UNARY_OP(op_is_boolean)
932 CTI_COMPILE_UNARY_OP(op_is_number)
933 CTI_COMPILE_UNARY_OP(op_is_string)
934 CTI_COMPILE_UNARY_OP(op_is_object)
935 CTI_COMPILE_UNARY_OP(op_is_function)
936 case op_stricteq: {
937 compileOpStrictEq(currentInstruction, OpStrictEq);
938 NEXT_OPCODE(op_stricteq);
939 }
940 case op_nstricteq: {
941 compileOpStrictEq(currentInstruction, OpNStrictEq);
942 NEXT_OPCODE(op_nstricteq);
943 }
944 case op_to_jsnumber: {
945 int srcVReg = currentInstruction[2].u.operand;
946 emitGetVirtualRegister(srcVReg, X86::eax);
947
948 Jump wasImmediate = jnz32(X86::eax, Imm32(JSImmediate::TagBitTypeInteger));
949
950 emitJumpSlowCaseIfNotJSCell(X86::eax, srcVReg);
951 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
952 addSlowCase(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
953
954 wasImmediate.link(this);
955
956 emitPutVirtualRegister(currentInstruction[1].u.operand);
957 NEXT_OPCODE(op_to_jsnumber);
958 }
959 CTI_COMPILE_BINARY_OP(op_in)
960 case op_push_new_scope: {
961 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
962 emitPutJITStubArgConstant(ident, 1);
963 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
964 emitCTICall(Interpreter::cti_op_push_new_scope);
965 emitPutVirtualRegister(currentInstruction[1].u.operand);
966 NEXT_OPCODE(op_push_new_scope);
967 }
968 case op_catch: {
969 emitGetCTIParam(STUB_ARGS_callFrame, callFrameRegister);
970 emitPutVirtualRegister(currentInstruction[1].u.operand);
971 NEXT_OPCODE(op_catch);
972 }
973 case op_jmp_scopes: {
974 unsigned count = currentInstruction[1].u.operand;
975 emitPutJITStubArgConstant(count, 1);
976 emitCTICall(Interpreter::cti_op_jmp_scopes);
977 unsigned target = currentInstruction[2].u.operand;
978 addJump(jump(), target + 2);
979 NEXT_OPCODE(op_jmp_scopes);
980 }
981 case op_put_by_index: {
982 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
983 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 2);
984 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
985 emitCTICall(Interpreter::cti_op_put_by_index);
986 NEXT_OPCODE(op_put_by_index);
987 }
988 case op_switch_imm: {
989 unsigned tableIndex = currentInstruction[1].u.operand;
990 unsigned defaultOffset = currentInstruction[2].u.operand;
991 unsigned scrutinee = currentInstruction[3].u.operand;
992
993 // create jump table for switch destinations, track this switch statement.
994 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
995 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
996 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
997
998 emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
999 emitPutJITStubArgConstant(tableIndex, 2);
1000 emitCTICall(Interpreter::cti_op_switch_imm);
1001 jump(X86::eax);
1002 NEXT_OPCODE(op_switch_imm);
1003 }
1004 case op_switch_char: {
1005 unsigned tableIndex = currentInstruction[1].u.operand;
1006 unsigned defaultOffset = currentInstruction[2].u.operand;
1007 unsigned scrutinee = currentInstruction[3].u.operand;
1008
1009 // create jump table for switch destinations, track this switch statement.
1010 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1011 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
1012 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1013
1014 emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1015 emitPutJITStubArgConstant(tableIndex, 2);
1016 emitCTICall(Interpreter::cti_op_switch_char);
1017 jump(X86::eax);
1018 NEXT_OPCODE(op_switch_char);
1019 }
1020 case op_switch_string: {
1021 unsigned tableIndex = currentInstruction[1].u.operand;
1022 unsigned defaultOffset = currentInstruction[2].u.operand;
1023 unsigned scrutinee = currentInstruction[3].u.operand;
1024
1025 // create jump table for switch destinations, track this switch statement.
1026 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1027 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
1028
1029 emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1030 emitPutJITStubArgConstant(tableIndex, 2);
1031 emitCTICall(Interpreter::cti_op_switch_string);
1032 jump(X86::eax);
1033 NEXT_OPCODE(op_switch_string);
1034 }
1035 case op_del_by_val: {
1036 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1037 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1038 emitCTICall(Interpreter::cti_op_del_by_val);
1039 emitPutVirtualRegister(currentInstruction[1].u.operand);
1040 NEXT_OPCODE(op_del_by_val);
1041 }
1042 case op_put_getter: {
1043 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1044 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1045 emitPutJITStubArgConstant(ident, 2);
1046 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1047 emitCTICall(Interpreter::cti_op_put_getter);
1048 NEXT_OPCODE(op_put_getter);
1049 }
1050 case op_put_setter: {
1051 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1052 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1053 emitPutJITStubArgConstant(ident, 2);
1054 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1055 emitCTICall(Interpreter::cti_op_put_setter);
1056 NEXT_OPCODE(op_put_setter);
1057 }
1058 case op_new_error: {
1059 JSValue* message = m_codeBlock->unexpectedConstant(currentInstruction[3].u.operand);
1060 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 1);
1061 emitPutJITStubArgConstant(message, 2);
1062 emitPutJITStubArgConstant(m_codeBlock->lineNumberForBytecodeOffset(m_bytecodeIndex), 3);
1063 emitCTICall(Interpreter::cti_op_new_error);
1064 emitPutVirtualRegister(currentInstruction[1].u.operand);
1065 NEXT_OPCODE(op_new_error);
1066 }
1067 case op_debug: {
1068 emitPutJITStubArgConstant(currentInstruction[1].u.operand, 1);
1069 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 2);
1070 emitPutJITStubArgConstant(currentInstruction[3].u.operand, 3);
1071 emitCTICall(Interpreter::cti_op_debug);
1072 NEXT_OPCODE(op_debug);
1073 }
1074 case op_eq_null: {
1075 unsigned dst = currentInstruction[1].u.operand;
1076 unsigned src1 = currentInstruction[2].u.operand;
1077
1078 emitGetVirtualRegister(src1, X86::eax);
1079 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
1080
1081 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1082 setnz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), X86::eax);
1083
1084 Jump wasNotImmediate = jump();
1085
1086 isImmediate.link(this);
1087
1088 and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
1089 sete32(Imm32(JSImmediate::FullTagTypeNull), X86::eax);
1090
1091 wasNotImmediate.link(this);
1092
1093 emitTagAsBoolImmediate(X86::eax);
1094 emitPutVirtualRegister(dst);
1095
1096 NEXT_OPCODE(op_eq_null);
1097 }
1098 case op_neq_null: {
1099 unsigned dst = currentInstruction[1].u.operand;
1100 unsigned src1 = currentInstruction[2].u.operand;
1101
1102 emitGetVirtualRegister(src1, X86::eax);
1103 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
1104
1105 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1106 setz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), X86::eax);
1107
1108 Jump wasNotImmediate = jump();
1109
1110 isImmediate.link(this);
1111
1112 and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
1113 setne32(Imm32(JSImmediate::FullTagTypeNull), X86::eax);
1114
1115 wasNotImmediate.link(this);
1116
1117 emitTagAsBoolImmediate(X86::eax);
1118 emitPutVirtualRegister(dst);
1119
1120 NEXT_OPCODE(op_neq_null);
1121 }
1122 case op_enter: {
1123 // Even though CTI doesn't use them, we initialize our constant
1124 // registers to zap stale pointers, to avoid unnecessarily prolonging
1125 // object lifetime and increasing GC pressure.
1126 size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
1127 for (size_t j = 0; j < count; ++j)
1128 emitInitRegister(j);
1129
1130 NEXT_OPCODE(op_enter);
1131 }
1132 case op_enter_with_activation: {
1133 // Even though CTI doesn't use them, we initialize our constant
1134 // registers to zap stale pointers, to avoid unnecessarily prolonging
1135 // object lifetime and increasing GC pressure.
1136 size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
1137 for (size_t j = 0; j < count; ++j)
1138 emitInitRegister(j);
1139
1140 emitCTICall(Interpreter::cti_op_push_activation);
1141 emitPutVirtualRegister(currentInstruction[1].u.operand);
1142
1143 NEXT_OPCODE(op_enter_with_activation);
1144 }
1145 case op_create_arguments: {
1146 if (m_codeBlock->m_numParameters == 1)
1147 emitCTICall(Interpreter::cti_op_create_arguments_no_params);
1148 else
1149 emitCTICall(Interpreter::cti_op_create_arguments);
1150 NEXT_OPCODE(op_create_arguments);
1151 }
1152 case op_convert_this: {
1153 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
1154
1155 emitJumpSlowCaseIfNotJSCell(X86::eax);
1156 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::edx);
1157 addSlowCase(jnz32(Address(X86::edx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1158
1159 NEXT_OPCODE(op_convert_this);
1160 }
1161 case op_profile_will_call: {
1162 emitGetCTIParam(STUB_ARGS_profilerReference, X86::eax);
1163 Jump noProfiler = jzPtr(Address(X86::eax));
1164 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::eax);
1165 emitCTICall(Interpreter::cti_op_profile_will_call);
1166 noProfiler.link(this);
1167
1168 NEXT_OPCODE(op_profile_will_call);
1169 }
1170 case op_profile_did_call: {
1171 emitGetCTIParam(STUB_ARGS_profilerReference, X86::eax);
1172 Jump noProfiler = jzPtr(Address(X86::eax));
1173 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::eax);
1174 emitCTICall(Interpreter::cti_op_profile_did_call);
1175 noProfiler.link(this);
1176
1177 NEXT_OPCODE(op_profile_did_call);
1178 }
1179 case op_get_array_length:
1180 case op_get_by_id_chain:
1181 case op_get_by_id_generic:
1182 case op_get_by_id_proto:
1183 case op_get_by_id_proto_list:
1184 case op_get_by_id_self:
1185 case op_get_by_id_self_list:
1186 case op_get_string_length:
1187 case op_put_by_id_generic:
1188 case op_put_by_id_replace:
1189 case op_put_by_id_transition:
1190 ASSERT_NOT_REACHED();
1191 }
1192 }
1193
1194 ASSERT(propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
1195 ASSERT(callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
1196
1197#ifndef NDEBUG
1198 // reset this, in order to guard it's use with asserts
1199 m_bytecodeIndex = (unsigned)-1;
1200#endif
1201}
1202
1203
1204void JIT::privateCompileLinkPass()
1205{
1206 unsigned jmpTableCount = m_jmpTable.size();
1207 for (unsigned i = 0; i < jmpTableCount; ++i)
1208 m_jmpTable[i].from.linkTo(m_labels[m_jmpTable[i].toBytecodeIndex], this);
1209 m_jmpTable.clear();
1210}
1211
1212void JIT::privateCompileSlowCases()
1213{
1214 Instruction* instructionsBegin = m_codeBlock->instructions().begin();
1215 unsigned propertyAccessInstructionIndex = 0;
1216 unsigned callLinkInfoIndex = 0;
1217
1218 for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
1219 // FIXME: enable peephole optimizations for slow cases when applicable
1220 killLastResultRegister();
1221
1222 m_bytecodeIndex = iter->to;
1223#ifndef NDEBUG
1224 unsigned firstTo = m_bytecodeIndex;
1225#endif
1226 Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
1227
1228 switch (OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
1229 case op_convert_this: {
1230 linkSlowCase(iter);
1231 linkSlowCase(iter);
1232 emitPutJITStubArg(X86::eax, 1);
1233 emitCTICall(Interpreter::cti_op_convert_this);
1234 emitPutVirtualRegister(currentInstruction[1].u.operand);
1235 NEXT_OPCODE(op_convert_this);
1236 }
1237 case op_add: {
1238 compileFastArithSlow_op_add(currentInstruction, iter);
1239 NEXT_OPCODE(op_add);
1240 }
1241 case op_construct_verify: {
1242 linkSlowCase(iter);
1243 linkSlowCase(iter);
1244 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
1245 emitPutVirtualRegister(currentInstruction[1].u.operand);
1246
1247 NEXT_OPCODE(op_construct_verify);
1248 }
1249 case op_get_by_val: {
1250 // The slow case that handles accesses to arrays (below) may jump back up to here.
1251 Label beginGetByValSlow(this);
1252
1253 Jump notImm = getSlowCase(iter);
1254 linkSlowCase(iter);
1255 linkSlowCase(iter);
1256 emitFastArithIntToImmNoCheck(X86::edx);
1257 notImm.link(this);
1258 emitPutJITStubArg(X86::eax, 1);
1259 emitPutJITStubArg(X86::edx, 2);
1260 emitCTICall(Interpreter::cti_op_get_by_val);
1261 emitPutVirtualRegister(currentInstruction[1].u.operand);
1262 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
1263
1264 // This is slow case that handles accesses to arrays above the fast cut-off.
1265 // First, check if this is an access to the vector
1266 linkSlowCase(iter);
1267 jae32(X86::edx, Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_vectorLength)), beginGetByValSlow);
1268
1269 // okay, missed the fast region, but it is still in the vector. Get the value.
1270 loadPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), X86::ecx);
1271 // Check whether the value loaded is zero; if so we need to return undefined.
1272 jzPtr(X86::ecx, beginGetByValSlow);
1273 move(X86::ecx, X86::eax);
1274 emitPutVirtualRegister(currentInstruction[1].u.operand, X86::eax);
1275
1276 NEXT_OPCODE(op_get_by_val);
1277 }
1278 case op_sub: {
1279 compileBinaryArithOpSlowCase(op_sub, iter, currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, OperandTypes::fromInt(currentInstruction[4].u.operand));
1280 NEXT_OPCODE(op_sub);
1281 }
1282 case op_rshift: {
1283 compileFastArithSlow_op_rshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1284 NEXT_OPCODE(op_rshift);
1285 }
1286 case op_lshift: {
1287 compileFastArithSlow_op_lshift(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1288 NEXT_OPCODE(op_lshift);
1289 }
1290 case op_loop_if_less: {
1291 unsigned target = currentInstruction[3].u.operand;
1292 JSValue* src2imm = getConstantImmediateNumericArg(currentInstruction[2].u.operand);
1293 if (src2imm) {
1294 linkSlowCase(iter);
1295 emitPutJITStubArg(X86::eax, 1);
1296 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1297 emitCTICall(Interpreter::cti_op_loop_if_less);
1298 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1299 } else {
1300 linkSlowCase(iter);
1301 linkSlowCase(iter);
1302 emitPutJITStubArg(X86::eax, 1);
1303 emitPutJITStubArg(X86::edx, 2);
1304 emitCTICall(Interpreter::cti_op_loop_if_less);
1305 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1306 }
1307 NEXT_OPCODE(op_loop_if_less);
1308 }
1309 case op_put_by_id: {
1310 compilePutByIdSlowCase(currentInstruction[1].u.operand, &(m_codeBlock->identifier(currentInstruction[2].u.operand)), currentInstruction[3].u.operand, iter, propertyAccessInstructionIndex++);
1311 NEXT_OPCODE(op_put_by_id);
1312 }
1313 case op_get_by_id: {
1314 compileGetByIdSlowCase(currentInstruction[1].u.operand, currentInstruction[2].u.operand, &(m_codeBlock->identifier(currentInstruction[3].u.operand)), iter, propertyAccessInstructionIndex++);
1315 NEXT_OPCODE(op_get_by_id);
1316 }
1317 case op_loop_if_lesseq: {
1318 unsigned target = currentInstruction[3].u.operand;
1319 JSValue* src2imm = getConstantImmediateNumericArg(currentInstruction[2].u.operand);
1320 if (src2imm) {
1321 linkSlowCase(iter);
1322 emitPutJITStubArg(X86::eax, 1);
1323 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1324 emitCTICall(Interpreter::cti_op_loop_if_lesseq);
1325 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1326 } else {
1327 linkSlowCase(iter);
1328 linkSlowCase(iter);
1329 emitPutJITStubArg(X86::eax, 1);
1330 emitPutJITStubArg(X86::edx, 2);
1331 emitCTICall(Interpreter::cti_op_loop_if_lesseq);
1332 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1333 }
1334 NEXT_OPCODE(op_loop_if_lesseq);
1335 }
1336 case op_pre_inc: {
1337 compileFastArithSlow_op_pre_inc(currentInstruction[1].u.operand, iter);
1338 NEXT_OPCODE(op_pre_inc);
1339 }
1340 case op_put_by_val: {
1341 // Normal slow cases - either is not an immediate imm, or is an array.
1342 Jump notImm = getSlowCase(iter);
1343 linkSlowCase(iter);
1344 linkSlowCase(iter);
1345 emitFastArithIntToImmNoCheck(X86::edx);
1346 notImm.link(this);
1347 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx);
1348 emitPutJITStubArg(X86::eax, 1);
1349 emitPutJITStubArg(X86::edx, 2);
1350 emitPutJITStubArg(X86::ecx, 3);
1351 emitCTICall(Interpreter::cti_op_put_by_val);
1352 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_put_by_val));
1353
1354 // slow cases for immediate int accesses to arrays
1355 linkSlowCase(iter);
1356 linkSlowCase(iter);
1357 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx);
1358 emitPutJITStubArg(X86::eax, 1);
1359 emitPutJITStubArg(X86::edx, 2);
1360 emitPutJITStubArg(X86::ecx, 3);
1361 emitCTICall(Interpreter::cti_op_put_by_val_array);
1362
1363 NEXT_OPCODE(op_put_by_val);
1364 }
1365 case op_loop_if_true: {
1366 linkSlowCase(iter);
1367 emitPutJITStubArg(X86::eax, 1);
1368 emitCTICall(Interpreter::cti_op_jtrue);
1369 unsigned target = currentInstruction[2].u.operand;
1370 emitJumpSlowToHot(jnz32(X86::eax), target + 2);
1371 NEXT_OPCODE(op_loop_if_true);
1372 }
1373 case op_pre_dec: {
1374 compileFastArithSlow_op_pre_dec(currentInstruction[1].u.operand, iter);
1375 NEXT_OPCODE(op_pre_dec);
1376 }
1377 case op_jnless: {
1378 unsigned target = currentInstruction[3].u.operand;
1379 JSValue* src2imm = getConstantImmediateNumericArg(currentInstruction[2].u.operand);
1380 if (src2imm) {
1381 linkSlowCase(iter);
1382 emitPutJITStubArg(X86::eax, 1);
1383 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1384 emitCTICall(Interpreter::cti_op_jless);
1385 emitJumpSlowToHot(jz32(X86::eax), target + 3);
1386 } else {
1387 linkSlowCase(iter);
1388 linkSlowCase(iter);
1389 emitPutJITStubArg(X86::eax, 1);
1390 emitPutJITStubArg(X86::edx, 2);
1391 emitCTICall(Interpreter::cti_op_jless);
1392 emitJumpSlowToHot(jz32(X86::eax), target + 3);
1393 }
1394 NEXT_OPCODE(op_jnless);
1395 }
1396 case op_not: {
1397 linkSlowCase(iter);
1398 xorPtr(Imm32(JSImmediate::FullTagTypeBool), X86::eax);
1399 emitPutJITStubArg(X86::eax, 1);
1400 emitCTICall(Interpreter::cti_op_not);
1401 emitPutVirtualRegister(currentInstruction[1].u.operand);
1402 NEXT_OPCODE(op_not);
1403 }
1404 case op_jfalse: {
1405 linkSlowCase(iter);
1406 emitPutJITStubArg(X86::eax, 1);
1407 emitCTICall(Interpreter::cti_op_jtrue);
1408 unsigned target = currentInstruction[2].u.operand;
1409 emitJumpSlowToHot(jz32(X86::eax), target + 2); // inverted!
1410 NEXT_OPCODE(op_jfalse);
1411 }
1412 case op_post_inc: {
1413 compileFastArithSlow_op_post_inc(currentInstruction[1].u.operand, currentInstruction[2].u.operand, iter);
1414 NEXT_OPCODE(op_post_inc);
1415 }
1416 case op_bitnot: {
1417 linkSlowCase(iter);
1418 emitPutJITStubArg(X86::eax, 1);
1419 emitCTICall(Interpreter::cti_op_bitnot);
1420 emitPutVirtualRegister(currentInstruction[1].u.operand);
1421 NEXT_OPCODE(op_bitnot);
1422 }
1423 case op_bitand: {
1424 compileFastArithSlow_op_bitand(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1425 NEXT_OPCODE(op_bitand);
1426 }
1427 case op_jtrue: {
1428 linkSlowCase(iter);
1429 emitPutJITStubArg(X86::eax, 1);
1430 emitCTICall(Interpreter::cti_op_jtrue);
1431 unsigned target = currentInstruction[2].u.operand;
1432 emitJumpSlowToHot(jnz32(X86::eax), target + 2);
1433 NEXT_OPCODE(op_jtrue);
1434 }
1435 case op_post_dec: {
1436 compileFastArithSlow_op_post_dec(currentInstruction[1].u.operand, currentInstruction[2].u.operand, iter);
1437 NEXT_OPCODE(op_post_dec);
1438 }
1439 case op_bitxor: {
1440 linkSlowCase(iter);
1441 emitPutJITStubArg(X86::eax, 1);
1442 emitPutJITStubArg(X86::edx, 2);
1443 emitCTICall(Interpreter::cti_op_bitxor);
1444 emitPutVirtualRegister(currentInstruction[1].u.operand);
1445 NEXT_OPCODE(op_bitxor);
1446 }
1447 case op_bitor: {
1448 linkSlowCase(iter);
1449 emitPutJITStubArg(X86::eax, 1);
1450 emitPutJITStubArg(X86::edx, 2);
1451 emitCTICall(Interpreter::cti_op_bitor);
1452 emitPutVirtualRegister(currentInstruction[1].u.operand);
1453 NEXT_OPCODE(op_bitor);
1454 }
1455 case op_eq: {
1456 linkSlowCase(iter);
1457 emitPutJITStubArg(X86::eax, 1);
1458 emitPutJITStubArg(X86::edx, 2);
1459 emitCTICall(Interpreter::cti_op_eq);
1460 emitPutVirtualRegister(currentInstruction[1].u.operand);
1461 NEXT_OPCODE(op_eq);
1462 }
1463 case op_neq: {
1464 linkSlowCase(iter);
1465 emitPutJITStubArg(X86::eax, 1);
1466 emitPutJITStubArg(X86::edx, 2);
1467 emitCTICall(Interpreter::cti_op_neq);
1468 emitPutVirtualRegister(currentInstruction[1].u.operand);
1469 NEXT_OPCODE(op_neq);
1470 }
1471 case op_stricteq: {
1472 linkSlowCase(iter);
1473 linkSlowCase(iter);
1474 linkSlowCase(iter);
1475 emitPutJITStubArg(X86::eax, 1);
1476 emitPutJITStubArg(X86::edx, 2);
1477 emitCTICall(Interpreter::cti_op_stricteq);
1478 emitPutVirtualRegister(currentInstruction[1].u.operand);
1479 NEXT_OPCODE(op_stricteq);
1480 }
1481 case op_nstricteq: {
1482 linkSlowCase(iter);
1483 linkSlowCase(iter);
1484 linkSlowCase(iter);
1485 emitPutJITStubArg(X86::eax, 1);
1486 emitPutJITStubArg(X86::edx, 2);
1487 emitCTICall(Interpreter::cti_op_nstricteq);
1488 emitPutVirtualRegister(currentInstruction[1].u.operand);
1489 NEXT_OPCODE(op_nstricteq);
1490 }
1491 case op_instanceof: {
1492 linkSlowCase(iter);
1493 linkSlowCase(iter);
1494 linkSlowCase(iter);
1495 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1496 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1497 emitPutJITStubArgFromVirtualRegister(currentInstruction[4].u.operand, 3, X86::ecx);
1498 emitCTICall(Interpreter::cti_op_instanceof);
1499 emitPutVirtualRegister(currentInstruction[1].u.operand);
1500 NEXT_OPCODE(op_instanceof);
1501 }
1502 case op_mod: {
1503 compileFastArithSlow_op_mod(currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, iter);
1504 NEXT_OPCODE(op_mod);
1505 }
1506 case op_mul: {
1507 compileFastArithSlow_op_mul(currentInstruction, iter);
1508 NEXT_OPCODE(op_mul);
1509 }
1510
1511 case op_call: {
1512 compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1513 NEXT_OPCODE(op_call);
1514 }
1515 case op_call_eval: {
1516 compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1517 NEXT_OPCODE(op_call_eval);
1518 }
1519 case op_construct: {
1520 compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1521 NEXT_OPCODE(op_construct);
1522 }
1523 case op_to_jsnumber: {
1524 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1525 linkSlowCase(iter);
1526
1527 emitPutJITStubArg(X86::eax, 1);
1528 emitCTICall(Interpreter::cti_op_to_jsnumber);
1529
1530 emitPutVirtualRegister(currentInstruction[1].u.operand);
1531 NEXT_OPCODE(op_to_jsnumber);
1532 }
1533
1534 default:
1535 ASSERT_NOT_REACHED();
1536 }
1537
1538 ASSERT_WITH_MESSAGE(iter == m_slowCases.end() || firstTo != iter->to,"Not enough jumps linked in slow case codegen.");
1539 ASSERT_WITH_MESSAGE(firstTo == (iter - 1)->to, "Too many jumps linked in slow case codegen.");
1540
1541 emitJumpSlowToHot(jump(), 0);
1542 }
1543
1544#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1545 ASSERT(propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
1546#endif
1547 ASSERT(callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
1548
1549#ifndef NDEBUG
1550 // reset this, in order to guard it's use with asserts
1551 m_bytecodeIndex = (unsigned)-1;
1552#endif
1553}
1554
1555void JIT::privateCompile()
1556{
1557#if ENABLE(CODEBLOCK_SAMPLING)
1558 storePtr(ImmPtr(m_codeBlock), m_interpreter->sampler()->codeBlockSlot());
1559#endif
1560#if ENABLE(OPCODE_SAMPLING)
1561 store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin())), m_interpreter->sampler()->sampleSlot());
1562#endif
1563
1564 // Could use a pop_m, but would need to offset the following instruction if so.
1565 pop(X86::ecx);
1566 emitPutToCallFrameHeader(X86::ecx, RegisterFile::ReturnPC);
1567
1568 Jump slowRegisterFileCheck;
1569 Label afterRegisterFileCheck;
1570 if (m_codeBlock->codeType() == FunctionCode) {
1571 // In the case of a fast linked call, we do not set this up in the caller.
1572 emitPutImmediateToCallFrameHeader(m_codeBlock, RegisterFile::CodeBlock);
1573
1574 emitGetCTIParam(STUB_ARGS_registerFile, X86::eax);
1575 addPtr(Imm32(m_codeBlock->m_numCalleeRegisters * sizeof(Register)), callFrameRegister, X86::edx);
1576
1577 slowRegisterFileCheck = jg32(X86::edx, Address(X86::eax, FIELD_OFFSET(RegisterFile, m_end)));
1578 afterRegisterFileCheck = label();
1579 }
1580
1581 privateCompileMainPass();
1582 privateCompileLinkPass();
1583 privateCompileSlowCases();
1584
1585 if (m_codeBlock->codeType() == FunctionCode) {
1586 slowRegisterFileCheck.link(this);
1587 m_bytecodeIndex = 0; // emitCTICall will add to the map, but doesn't actually need this...
1588 emitCTICall(Interpreter::cti_register_file_check);
1589#ifndef NDEBUG
1590 // reset this, in order to guard it's use with asserts
1591 m_bytecodeIndex = (unsigned)-1;
1592#endif
1593 jump(afterRegisterFileCheck);
1594 }
1595
1596 ASSERT(m_jmpTable.isEmpty());
1597
1598 RefPtr<ExecutablePool> allocator = m_globalData->poolForSize(m_assembler.size());
1599 void* code = m_assembler.executableCopy(allocator.get());
1600 JITCodeRef codeRef(code, allocator);
1601
1602 PatchBuffer patchBuffer(code);
1603
1604 // Translate vPC offsets into addresses in JIT generated code, for switch tables.
1605 for (unsigned i = 0; i < m_switches.size(); ++i) {
1606 SwitchRecord record = m_switches[i];
1607 unsigned bytecodeIndex = record.bytecodeIndex;
1608
1609 if (record.type != SwitchRecord::String) {
1610 ASSERT(record.type == SwitchRecord::Immediate || record.type == SwitchRecord::Character);
1611 ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size());
1612
1613 record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
1614
1615 for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) {
1616 unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j];
1617 record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.simpleJumpTable->ctiDefault;
1618 }
1619 } else {
1620 ASSERT(record.type == SwitchRecord::String);
1621
1622 record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
1623
1624 StringJumpTable::StringOffsetTable::iterator end = record.jumpTable.stringJumpTable->offsetTable.end();
1625 for (StringJumpTable::StringOffsetTable::iterator it = record.jumpTable.stringJumpTable->offsetTable.begin(); it != end; ++it) {
1626 unsigned offset = it->second.branchOffset;
1627 it->second.ctiOffset = offset ? patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.stringJumpTable->ctiDefault;
1628 }
1629 }
1630 }
1631
1632 for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) {
1633 HandlerInfo& handler = m_codeBlock->exceptionHandler(i);
1634 handler.nativeCode = patchBuffer.addressOf(m_labels[handler.target]);
1635 }
1636
1637 m_codeBlock->pcVector().reserveCapacity(m_calls.size());
1638 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1639 if (iter->to)
1640 patchBuffer.link(iter->from, iter->to);
1641 m_codeBlock->pcVector().append(PC(reinterpret_cast<void**>(patchBuffer.addressOf(iter->from)) - reinterpret_cast<void**>(code), iter->bytecodeIndex));
1642 }
1643
1644 // Link absolute addresses for jsr
1645 for (Vector<JSRInfo>::iterator iter = m_jsrSites.begin(); iter != m_jsrSites.end(); ++iter)
1646 patchBuffer.setPtr(iter->storeLocation, patchBuffer.addressOf(iter->target));
1647
1648 for (unsigned i = 0; i < m_codeBlock->numberOfStructureStubInfos(); ++i) {
1649 StructureStubInfo& info = m_codeBlock->structureStubInfo(i);
1650#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1651 info.callReturnLocation = patchBuffer.addressOf(m_propertyAccessCompilationInfo[i].callReturnLocation);
1652 info.hotPathBegin = patchBuffer.addressOf(m_propertyAccessCompilationInfo[i].hotPathBegin);
1653#else
1654 info.callReturnLocation = 0;
1655 info.hotPathBegin = 0;
1656#endif
1657 }
1658 for (unsigned i = 0; i < m_codeBlock->numberOfCallLinkInfos(); ++i) {
1659 CallLinkInfo& info = m_codeBlock->callLinkInfo(i);
1660#if ENABLE(JIT_OPTIMIZE_CALL)
1661 info.callReturnLocation = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].callReturnLocation);
1662 info.hotPathBegin = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
1663 info.hotPathOther = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].hotPathOther);
1664 info.coldPathOther = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].coldPathOther);
1665#else
1666 info.callReturnLocation = 0;
1667 info.hotPathBegin = 0;
1668 info.hotPathOther = 0;
1669 info.coldPathOther = 0;
1670#endif
1671 }
1672
1673 m_codeBlock->setJITCode(codeRef);
1674}
1675
1676void JIT::privateCompileCTIMachineTrampolines()
1677{
1678#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1679 // (1) The first function provides fast property access for array length
1680 Label arrayLengthBegin = align();
1681
1682 // Check eax is an array
1683 Jump array_failureCases1 = emitJumpIfNotJSCell(X86::eax);
1684 Jump array_failureCases2 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr));
1685
1686 // Checks out okay! - get the length from the storage
1687 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::eax);
1688 load32(Address(X86::eax, FIELD_OFFSET(ArrayStorage, m_length)), X86::eax);
1689
1690 Jump array_failureCases3 = ja32(X86::eax, Imm32(JSImmediate::maxImmediateInt));
1691
1692 // X86::eax contains a 64 bit value (is signed, is zero extended) so we don't need sign extend here.
1693 emitFastArithIntToImmNoCheck(X86::eax);
1694
1695 ret();
1696
1697 // (2) The second function provides fast property access for string length
1698 Label stringLengthBegin = align();
1699
1700 // Check eax is a string
1701 Jump string_failureCases1 = emitJumpIfNotJSCell(X86::eax);
1702 Jump string_failureCases2 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsStringVptr));
1703
1704 // Checks out okay! - get the length from the Ustring.
1705 loadPtr(Address(X86::eax, FIELD_OFFSET(JSString, m_value) + FIELD_OFFSET(UString, m_rep)), X86::eax);
1706 load32(Address(X86::eax, FIELD_OFFSET(UString::Rep, len)), X86::eax);
1707
1708 Jump string_failureCases3 = ja32(X86::eax, Imm32(JSImmediate::maxImmediateInt));
1709
1710 // X86::eax contains a 64 bit value (is signed, is zero extended) so we don't need sign extend here.
1711 emitFastArithIntToImmNoCheck(X86::eax);
1712
1713 ret();
1714#endif
1715
1716 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
1717
1718 Label virtualCallPreLinkBegin = align();
1719
1720 // Load the callee CodeBlock* into eax
1721 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1722 loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1723 Jump hasCodeBlock1 = jnzPtr(X86::eax);
1724 pop(X86::ebx);
1725 restoreArgumentReference();
1726 Jump callJSFunction1 = call();
1727 emitGetJITStubArg(1, X86::ecx);
1728 emitGetJITStubArg(3, X86::edx);
1729 push(X86::ebx);
1730 hasCodeBlock1.link(this);
1731
1732 // Check argCount matches callee arity.
1733 Jump arityCheckOkay1 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1734 pop(X86::ebx);
1735 emitPutJITStubArg(X86::ebx, 2);
1736 emitPutJITStubArg(X86::eax, 4);
1737 restoreArgumentReference();
1738 Jump callArityCheck1 = call();
1739 move(X86::edx, callFrameRegister);
1740 emitGetJITStubArg(1, X86::ecx);
1741 emitGetJITStubArg(3, X86::edx);
1742 push(X86::ebx);
1743 arityCheckOkay1.link(this);
1744
1745 compileOpCallInitializeCallFrame();
1746
1747 pop(X86::ebx);
1748 emitPutJITStubArg(X86::ebx, 2);
1749 restoreArgumentReference();
1750 Jump callDontLazyLinkCall = call();
1751 push(X86::ebx);
1752
1753 jump(X86::eax);
1754
1755 Label virtualCallLinkBegin = align();
1756
1757 // Load the callee CodeBlock* into eax
1758 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1759 loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1760 Jump hasCodeBlock2 = jnzPtr(X86::eax);
1761 pop(X86::ebx);
1762 restoreArgumentReference();
1763 Jump callJSFunction2 = call();
1764 emitGetJITStubArg(1, X86::ecx);
1765 emitGetJITStubArg(3, X86::edx);
1766 push(X86::ebx);
1767 hasCodeBlock2.link(this);
1768
1769 // Check argCount matches callee arity.
1770 Jump arityCheckOkay2 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1771 pop(X86::ebx);
1772 emitPutJITStubArg(X86::ebx, 2);
1773 emitPutJITStubArg(X86::eax, 4);
1774 restoreArgumentReference();
1775 Jump callArityCheck2 = call();
1776 move(X86::edx, callFrameRegister);
1777 emitGetJITStubArg(1, X86::ecx);
1778 emitGetJITStubArg(3, X86::edx);
1779 push(X86::ebx);
1780 arityCheckOkay2.link(this);
1781
1782 compileOpCallInitializeCallFrame();
1783
1784 pop(X86::ebx);
1785 emitPutJITStubArg(X86::ebx, 2);
1786 restoreArgumentReference();
1787 Jump callLazyLinkCall = call();
1788 push(X86::ebx);
1789
1790 jump(X86::eax);
1791
1792 Label virtualCallBegin = align();
1793
1794 // Load the callee CodeBlock* into eax
1795 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1796 loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1797 Jump hasCodeBlock3 = jnzPtr(X86::eax);
1798 pop(X86::ebx);
1799 restoreArgumentReference();
1800 Jump callJSFunction3 = call();
1801 emitGetJITStubArg(1, X86::ecx);
1802 emitGetJITStubArg(3, X86::edx);
1803 push(X86::ebx);
1804 hasCodeBlock3.link(this);
1805
1806 // Check argCount matches callee arity.
1807 Jump arityCheckOkay3 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1808 pop(X86::ebx);
1809 emitPutJITStubArg(X86::ebx, 2);
1810 emitPutJITStubArg(X86::eax, 4);
1811 restoreArgumentReference();
1812 Jump callArityCheck3 = call();
1813 move(X86::edx, callFrameRegister);
1814 emitGetJITStubArg(1, X86::ecx);
1815 emitGetJITStubArg(3, X86::edx);
1816 push(X86::ebx);
1817 arityCheckOkay3.link(this);
1818
1819 compileOpCallInitializeCallFrame();
1820
1821 // load ctiCode from the new codeBlock.
1822 loadPtr(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_jitCode)), X86::eax);
1823
1824 jump(X86::eax);
1825
1826 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
1827 m_interpreter->m_executablePool = m_globalData->poolForSize(m_assembler.size());
1828 void* code = m_assembler.executableCopy(m_interpreter->m_executablePool.get());
1829 PatchBuffer patchBuffer(code);
1830
1831#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1832 patchBuffer.link(array_failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1833 patchBuffer.link(array_failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1834 patchBuffer.link(array_failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
1835 patchBuffer.link(string_failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1836 patchBuffer.link(string_failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1837 patchBuffer.link(string_failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
1838
1839 m_interpreter->m_ctiArrayLengthTrampoline = patchBuffer.addressOf(arrayLengthBegin);
1840 m_interpreter->m_ctiStringLengthTrampoline = patchBuffer.addressOf(stringLengthBegin);
1841#endif
1842 patchBuffer.link(callArityCheck1, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1843 patchBuffer.link(callArityCheck2, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1844 patchBuffer.link(callArityCheck3, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
1845 patchBuffer.link(callJSFunction1, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1846 patchBuffer.link(callJSFunction2, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1847 patchBuffer.link(callJSFunction3, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
1848 patchBuffer.link(callDontLazyLinkCall, reinterpret_cast<void*>(Interpreter::cti_vm_dontLazyLinkCall));
1849 patchBuffer.link(callLazyLinkCall, reinterpret_cast<void*>(Interpreter::cti_vm_lazyLinkCall));
1850
1851 m_interpreter->m_ctiVirtualCallPreLink = patchBuffer.addressOf(virtualCallPreLinkBegin);
1852 m_interpreter->m_ctiVirtualCallLink = patchBuffer.addressOf(virtualCallLinkBegin);
1853 m_interpreter->m_ctiVirtualCall = patchBuffer.addressOf(virtualCallBegin);
1854}
1855
1856void JIT::emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst)
1857{
1858 loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject, d)), dst);
1859 loadPtr(Address(dst, FIELD_OFFSET(JSVariableObject::JSVariableObjectData, registers)), dst);
1860 loadPtr(Address(dst, index * sizeof(Register)), dst);
1861}
1862
1863void JIT::emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index)
1864{
1865 loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject, d)), variableObject);
1866 loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject::JSVariableObjectData, registers)), variableObject);
1867 storePtr(src, Address(variableObject, index * sizeof(Register)));
1868}
1869
1870} // namespace JSC
1871
1872#endif // ENABLE(JIT)
Note: See TracBrowser for help on using the repository browser.